diff --git a/.codecov.yml b/.codecov.yml index a3f9e9e6dd06..b3d0129de992 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -25,12 +25,12 @@ component_management: branches: - "!master" individual_components: - - component_id: api - paths: - - api/** - component_id: pkg_aws_library paths: - packages/aws-library/** + - component_id: pkg_celery_library + paths: + - packages/celery-library/** - component_id: pkg_dask_task_models_library paths: - packages/dask-task-models-library/** @@ -130,6 +130,7 @@ comment: ignore: + - "api/tests" - "test_*.py" - "**/generated_models/*.py" - "**/generated_code/*.py" diff --git a/.env-devel b/.env-devel index 1842a982b854..cd60f2e9d365 100644 --- a/.env-devel +++ b/.env-devel @@ -17,12 +17,13 @@ AGENT_VOLUMES_CLEANUP_S3_ENDPOINT=http://172.17.0.1:9001 AGENT_VOLUMES_CLEANUP_S3_PROVIDER=MINIO AGENT_VOLUMES_CLEANUP_S3_REGION=us-east-1 AGENT_VOLUMES_CLEANUP_S3_SECRET_KEY=12345678 -AGENT_TRACING=null +AGENT_TRACING={} +API_SERVER_CELERY_CONCURRENCY=50 API_SERVER_DEV_FEATURES_ENABLED=0 API_SERVER_LOGLEVEL=INFO API_SERVER_PROFILING=1 -API_SERVER_TRACING=null +API_SERVER_TRACING={} TRAEFIK_API_SERVER_INFLIGHTREQ_AMOUNT=25 AUTOSCALING_DASK=null @@ -35,7 +36,7 @@ AUTOSCALING_LOGLEVEL=INFO AUTOSCALING_NODES_MONITORING=null AUTOSCALING_POLL_INTERVAL="00:00:10" AUTOSCALING_SSM_ACCESS=null -AUTOSCALING_TRACING=null +AUTOSCALING_TRACING={} AWS_S3_CLI_S3=null @@ -47,13 +48,15 @@ CATALOG_PORT=8000 CATALOG_PROFILING=1 CATALOG_SERVICES_DEFAULT_RESOURCES='{"CPU": {"limit": 0.1, "reservation": 0.1}, "RAM": {"limit": 2147483648, "reservation": 2147483648}}' CATALOG_SERVICES_DEFAULT_SPECIFICATIONS='{}' -CATALOG_TRACING=null +CATALOG_TRACING={} CELERY_RESULT_EXPIRES=P7D CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH='{"type":"tls","tls_ca_file":"/home/scu/.dask/dask-crt.pem","tls_client_cert":"/home/scu/.dask/dask-crt.pem","tls_client_key":"/home/scu/.dask/dask-key.pem"}' CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DOCKER_IMAGE_TAG=master-github-latest +CLUSTERS_KEEPER_DASK_NPROCS=1 CLUSTERS_KEEPER_DASK_NTHREADS=0 +CLUSTERS_KEEPER_DASK_NTHREADS_MULTIPLIER=1 CLUSTERS_KEEPER_DASK_WORKER_SATURATION=inf CLUSTERS_KEEPER_EC2_ACCESS=null CLUSTERS_KEEPER_SSM_ACCESS=null @@ -63,7 +66,7 @@ CLUSTERS_KEEPER_MAX_MISSED_HEARTBEATS_BEFORE_CLUSTER_TERMINATION=5 CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES=null CLUSTERS_KEEPER_TASK_INTERVAL=00:00:30 CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES=null -CLUSTERS_KEEPER_TRACING=null +CLUSTERS_KEEPER_TRACING={} DASK_SCHEDULER_HOST=dask-scheduler DASK_SCHEDULER_PORT=8786 @@ -83,7 +86,7 @@ DIRECTOR_PUBLISHED_HOST_NAME="127.0.0.1:9081" DIRECTOR_REGISTRY_CACHING_TTL=00:15:00 DIRECTOR_REGISTRY_CACHING=True DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS=null -DIRECTOR_TRACING=null +DIRECTOR_TRACING={} DOCKER_API_PROXY_HOST=docker-api-proxy DOCKER_API_PROXY_PASSWORD=admin @@ -98,11 +101,11 @@ EFS_GROUP_NAME=efs-group EFS_DNS_NAME=fs-xxx.efs.us-east-1.amazonaws.com EFS_MOUNTED_PATH=/tmp/efs EFS_PROJECT_SPECIFIC_DATA_DIRECTORY=project-specific-data -EFS_GUARDIAN_TRACING=null +EFS_GUARDIAN_TRACING={} EFS_DEFAULT_USER_SERVICE_SIZE_BYTES=10000 # DATCORE_ADAPTER -DATCORE_ADAPTER_TRACING=null +DATCORE_ADAPTER_TRACING={} # DIRECTOR_V2 ---- COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH='{"type":"tls","tls_ca_file":"/home/scu/.dask/dask-crt.pem","tls_client_cert":"/home/scu/.dask/dask-crt.pem","tls_client_key":"/home/scu/.dask/dask-key.pem"}' @@ -128,25 +131,28 @@ DYNAMIC_SIDECAR_LOG_LEVEL=DEBUG DYNAMIC_SIDECAR_PROMETHEUS_MONITORING_NETWORKS=[] DYNAMIC_SIDECAR_PROMETHEUS_SERVICE_LABELS={} DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT=01:00:00 -DIRECTOR_V2_TRACING=null +DIRECTOR_V2_TRACING={} +DIRECTOR_V2_DYNAMIC_SCHEDULER_ENABLED=1 # DYNAMIC_SCHEDULER ---- DYNAMIC_SCHEDULER_LOGLEVEL=INFO DYNAMIC_SCHEDULER_PROFILING=1 DYNAMIC_SCHEDULER_USE_INTERNAL_SCHEDULER=0 DYNAMIC_SCHEDULER_STOP_SERVICE_TIMEOUT=01:00:00 -DYNAMIC_SCHEDULER_TRACING=null +DYNAMIC_SCHEDULER_TRACING={} DYNAMIC_SCHEDULER_UI_STORAGE_SECRET=adminadmin FUNCTION_SERVICES_AUTHORS='{"UN": {"name": "Unknown", "email": "unknown@osparc.io", "affiliation": "unknown"}}' WEBSERVER_LICENSES={} +WEBSERVER_FOGBUGZ={} LICENSES_ITIS_VIP_SYNCER_ENABLED=false LICENSES_ITIS_VIP_SYNCER_PERIODICITY=1D00:00:00 LICENSES_ITIS_VIP_API_URL=https://replace-with-itis-api/{category} LICENSES_ITIS_VIP_CATEGORIES='{"HumanWholeBody": "Humans", "HumanBodyRegion": "Humans (Region)", "AnimalWholeBody": "Animal"}' LICENSES_SPEAG_PHANTOMS_API_URL=https://replace-with-speag-api/{category} LICENSES_SPEAG_PHANTOMS_CATEGORIES='{"ComputationalPhantom": "Phantom of the Opera"}' +LONG_RUNNING_TASKS_NAMESPACE_SUFFIX=development # Can use 'docker run -it itisfoundation/invitations:latest simcore-service-invitations generate-dotenv --auto-password' INVITATIONS_DEFAULT_PRODUCT=osparc @@ -158,13 +164,13 @@ INVITATIONS_PORT=8000 INVITATIONS_SECRET_KEY='REPLACE_ME_with_result__Fernet_generate_key=' INVITATIONS_SWAGGER_API_DOC_ENABLED=1 INVITATIONS_USERNAME=admin -INVITATIONS_TRACING=null +INVITATIONS_TRACING={} LOG_FORMAT_LOCAL_DEV_ENABLED=1 -LOG_FILTER_MAPPING='{}' +LOG_FILTER_MAPPING='{"gunicorn.access":[" /v0/ ", " /v0/health "], "uvicorn.access":[" / ", " /v0/ "]}' NOTIFICATIONS_LOGLEVEL=INFO -NOTIFICATIONS_TRACING=null +NOTIFICATIONS_TRACING={} PAYMENTS_ACCESS_TOKEN_EXPIRE_MINUTES=30 PAYMENTS_ACCESS_TOKEN_SECRET_KEY=2c0411810565e063309be1457009fb39ce023946f6a354e6935107b57676 @@ -186,15 +192,17 @@ PAYMENTS_STRIPE_API_SECRET='REPLACE_ME_with_api_secret' PAYMENTS_STRIPE_URL=https://api.stripe.com PAYMENTS_SWAGGER_API_DOC_ENABLED=1 PAYMENTS_USERNAME=admin -PAYMENTS_TRACING=null +PAYMENTS_TRACING={} POSTGRES_DB=simcoredb -POSTGRES_ENDPOINT=postgres:5432 POSTGRES_HOST=postgres POSTGRES_PASSWORD=adminadmin POSTGRES_PORT=5432 POSTGRES_USER=scu - +POSTGRES_MINSIZE=1 +POSTGRES_MAXSIZE=50 +POSTGRES_MAX_POOLSIZE=10 +POSTGRES_MAX_OVERFLOW=20 POSTGRES_READONLY_PASSWORD=readonly POSTGRES_READONLY_USER=postgres_readonly @@ -227,7 +235,7 @@ RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_CHECK_ENABLED=1 RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_COUNTER_FAIL=6 RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_INTERVAL_SEC=300 RESOURCE_USAGE_TRACKER_S3=null -RESOURCE_USAGE_TRACKER_TRACING=null +RESOURCE_USAGE_TRACKER_TRACING={} # NOTE: 172.17.0.1 is the docker0 interface, which redirect from inside a container onto the host network interface. R_CLONE_OPTION_BUFFER_SIZE=16M @@ -259,7 +267,7 @@ STORAGE_HOST=storage STORAGE_LOGLEVEL=INFO STORAGE_PORT=8080 STORAGE_PROFILING=1 -STORAGE_TRACING=null +STORAGE_TRACING={} # STORAGE ---- SWARM_STACK_NAME=master-simcore @@ -269,11 +277,18 @@ VENDOR_DEV_MANUAL_IMAGE=containous/whoami VENDOR_DEV_MANUAL_REPLICAS=1 VENDOR_DEV_MANUAL_SUBDOMAIN=manual -## VENDOR DEVELOPMENT SERVICES --- +## WEBSERVER SERVICES VARIANTS --- WB_API_WEBSERVER_HOST=wb-api-server WB_API_WEBSERVER_PORT=8080 +WB_AUTH_DIAGNOSTICS={} +WB_AUTH_LOGLEVEL=INFO +WB_AUTH_PROFILING=1 +WB_AUTH_TRACING={} +WB_AUTH_WEBSERVER_HOST=wb-auth +WB_AUTH_WEBSERVER_PORT=8080 + WB_GC_ACTIVITY=null WB_GC_ANNOUNCEMENTS=0 WB_GC_CATALOG=null @@ -300,7 +315,7 @@ WB_GC_SOCKETIO=1 WB_GC_STATICWEB=null WB_GC_STUDIES_DISPATCHER=null WB_GC_TAGS=0 -WB_GC_TRACING=null +WB_GC_TRACING={} WB_GC_USERS={} WB_GC_WALLETS=0 @@ -330,7 +345,7 @@ WB_DB_EL_STATICWEB=null WB_DB_EL_STORAGE=null WB_DB_EL_STUDIES_DISPATCHER=null WB_DB_EL_TAGS=0 -WB_DB_EL_TRACING=null +WB_DB_EL_TRACING={} WB_DB_EL_USERS={} WB_DB_EL_WALLETS=0 @@ -395,11 +410,12 @@ WEBSERVER_PROJECTS={} WEBSERVER_PROMETHEUS_API_VERSION=v1 WEBSERVER_PROMETHEUS_URL=http://prometheus:9090 WEBSERVER_PUBLICATIONS=1 +WEBSERVER_REALTIME_COLLABORATION='{"RTC_MAX_NUMBER_OF_USERS":3}' WEBSERVER_SCICRUNCH={} WEBSERVER_SESSION_SECRET_KEY='REPLACE_ME_with_result__Fernet_generate_key=' WEBSERVER_SOCKETIO=1 WEBSERVER_STATICWEB={} WEBSERVER_STUDIES_DISPATCHER={} WEBSERVER_TAGS=1 -WEBSERVER_TRACING=null +WEBSERVER_TRACING={} WEBSERVER_USERS={} diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index d8684350361c..b0fde245e8a2 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -13,6 +13,7 @@ Makefile @pcrespov @sanderegg /api/ @sanderegg @pcrespov @matusdrobuliak66 /ci/ @sanderegg @pcrespov /docs/ @pcrespov +/packages/celery-library/ @giancarloromeo /packages/common-library/ @giancarloromeo /packages/models-library/ @sanderegg @pcrespov @matusdrobuliak66 @giancarloromeo /packages/postgres-database/ @matusdrobuliak66 diff --git a/.github/ISSUE_TEMPLATE/1_bug_report.yml b/.github/ISSUE_TEMPLATE/1_bug_report.yml index a0af793a09d3..841ae3ec33ac 100644 --- a/.github/ISSUE_TEMPLATE/1_bug_report.yml +++ b/.github/ISSUE_TEMPLATE/1_bug_report.yml @@ -1,7 +1,20 @@ name: 🐛 Bug description: File a bug/issue -labels: ["bug", "t:bug"] -assignees: ["pcrespov"] +title: "[Bug]: " +labels: + - bug + - "t:bug" +assignees: + - bisgaard-itis + - giancarloromeo + - GitHK + - matusdrobuliak66 + - odeimaiz + - pcrespov + - sanderegg +projects: + - "ITISFoundation/projects/15" +type: "Bug" body: - type: checkboxes attributes: @@ -20,7 +33,7 @@ body: - "production aws (e.g. osparc.io)" - "staging aws (e.g. staging.osparc.io)" - "production on-premise (dalco)" - - "stating on-premise (dalco)" + - "staging on-premise (dalco)" - "development (master)" - "other (e.g. local)" validations: diff --git a/.github/ISSUE_TEMPLATE/2_feature_request.yml b/.github/ISSUE_TEMPLATE/2_feature_request.yml index 2c392adce857..69e05bc0f83f 100644 --- a/.github/ISSUE_TEMPLATE/2_feature_request.yml +++ b/.github/ISSUE_TEMPLATE/2_feature_request.yml @@ -1,7 +1,17 @@ name: ✨ Feature request -description: Suggest an idea to implement in the simcore plaform -labels: ["t:enhancement"] -assignees: ["pcrespov"] +description: Suggest an idea to implement in the simcore platform +title: "[Feature]: " +labels: + - t:enhancement +assignees: + - bisgaard-itis + - giancarloromeo + - GitHK + - matusdrobuliak66 + - odeimaiz + - pcrespov + - sanderegg +type: "Feature" body: - type: checkboxes attributes: diff --git a/.github/ISSUE_TEMPLATE/3_maintenance_issue.yml b/.github/ISSUE_TEMPLATE/3_maintenance_issue.yml index e4a0060c764c..65e5c262ec2d 100644 --- a/.github/ISSUE_TEMPLATE/3_maintenance_issue.yml +++ b/.github/ISSUE_TEMPLATE/3_maintenance_issue.yml @@ -1,7 +1,19 @@ name: 🏗️ Maintenance description: A change in the code to overcome technical debt -labels: ["t:maintenance"] -assignees: ["pcrespov"] +title: "[Maintenance]: " +labels: + - "t:maintenance" +assignees: + - bisgaard-itis + - giancarloromeo + - GitHK + - matusdrobuliak66 + - odeimaiz + - pcrespov + - sanderegg +projects: + - "ITISFoundation/projects/9" +type: "Task" body: - type: checkboxes attributes: diff --git a/.github/ISSUE_TEMPLATE/4_pre_release.yml b/.github/ISSUE_TEMPLATE/4_pre_release.yml index 1502fd0b34ff..14500b1a36b2 100644 --- a/.github/ISSUE_TEMPLATE/4_pre_release.yml +++ b/.github/ISSUE_TEMPLATE/4_pre_release.yml @@ -1,8 +1,13 @@ name: 🚀 Pre-release to staging (developers-only) description: Issue to plan and log pre-release from master to staging deploy (including staging hotfixes) title: "🚀 Pre-release master -> staging_" -labels: ["t:maintenance", "release"] -assignees: ["matusdrobuliak66"] +labels: + - "t:maintenance" + - release +assignees: + - matusdrobuliak66 + - YuryHrytsuk +type: "Task" body: - type: dropdown id: prerelease_kind @@ -34,8 +39,8 @@ body: attributes: label: Commit SHA description: | - Selets the commit from which the release takes placeholder. Check [commits](https://github.com/ITISFoundation/osparc-simcore/commits/master) - IMPORTANT: make sure t (i.e. tests passed and images were built and pushed) + Selects the commit from which the release takes place. Check [commits](https://github.com/ITISFoundation/osparc-simcore/commits/master) + IMPORTANT: make sure CI (i.e. tests passed and images were built and pushed) placeholder: 7d9dcc313f9ced0bd1e6508363148841683b6d7c validations: required: true @@ -49,7 +54,7 @@ body: required: true - type: checkboxes attributes: - label: Did the commit CI suceeded? + label: Did the commit CI succeed? description: Check the CI of the selected commit in the [repo commits](https://github.com/ITISFoundation/osparc-simcore/commits/master) options: - label: The commit CI succeeded. @@ -159,3 +164,4 @@ body: ``` md https://github.com/ITISFoundation/osparc-simcore/releases/tag/staging_ ``` + ``` diff --git a/.github/ISSUE_TEMPLATE/5_release.yml b/.github/ISSUE_TEMPLATE/5_release.yml index bcc15b5be7be..f765fb0cf16c 100644 --- a/.github/ISSUE_TEMPLATE/5_release.yml +++ b/.github/ISSUE_TEMPLATE/5_release.yml @@ -1,8 +1,13 @@ name: 🚀 Release to production (developers-only) description: Creates an issue to plan and log the release from staging to production title: "🚀 Release v" -labels: ["t:maintenance", "release"] -assignees: ["matusdrobuliak66"] +labels: + - "t:maintenance" + - release +assignees: + - matusdrobuliak66 + - YuryHrytsuk +type: "Task" body: - type: input id: version @@ -17,8 +22,8 @@ body: attributes: label: Commit SHA description: | - Selects the commit from which the release takes placeholder. Check [commits](https://github.com/ITISFoundation/osparc-simcore/commits/master) - IMPORTANT: make sure t (i.e. tests passed and images were built and pushed) + Selects the commit from which the release takes place. Check [commits](https://github.com/ITISFoundation/osparc-simcore/commits/master) + IMPORTANT: make sure CI (i.e. tests passed and images were built and pushed) placeholder: 7d9dcc313f9ced0bd1e6508363148841683b6d7c validations: required: true @@ -41,7 +46,7 @@ body: required: true - type: checkboxes attributes: - label: Did the commit CI suceeded? + label: Did the commit CI succeed? description: Check the CI of the selected commit in the [repo commits](https://github.com/ITISFoundation/osparc-simcore/commits/master) to make sure the images are built and ready options: - label: The commit CI succeeded. @@ -162,3 +167,4 @@ body: ``` md :tada: https://github.com/ITISFoundation/osparc-simcore/releases/tag/v ``` + ``` diff --git a/.github/ISSUE_TEMPLATE/6_hotfix.yml b/.github/ISSUE_TEMPLATE/6_hotfix.yml index 49c0a02ed72b..268ad58f6234 100644 --- a/.github/ISSUE_TEMPLATE/6_hotfix.yml +++ b/.github/ISSUE_TEMPLATE/6_hotfix.yml @@ -1,8 +1,13 @@ name: 🚑️ Hotfix (developers-only) description: Critical hotfix to staging or production title: "🚑️ Release hotfix/v (hotfix)" -labels: ["t:maintenance", "release"] -assignees: ["matusdrobuliak66"] +labels: + - "t:maintenance" + - release +assignees: + - matusdrobuliak66 + - YuryHrytsuk +type: "Task" body: - type: input id: version @@ -17,8 +22,8 @@ body: attributes: label: Branching Commit SHA description: | - Select the commit from which the hot-fix branch needs to emerge . Check [commits](https://github.com/ITISFoundation/osparc-simcore/commits/master) - IMPORTANT: make sure t (i.e. tests passed and images were built and pushed) + Select the commit from which the hot-fix branch needs to emerge. Check [commits](https://github.com/ITISFoundation/osparc-simcore/commits/master) + IMPORTANT: make sure CI (i.e. tests passed and images were built and pushed) placeholder: e.g. `8d9a27ebb3e64956e6a41f31839748b3f6a27074` validations: required: true @@ -78,8 +83,8 @@ body: attributes: label: Hotfix Commit SHA description: | - Selects the the head commit of the hotfix branch - IMPORTANT: make sure t (i.e. tests passed and images were built and pushed) + Selects the head commit of the hotfix branch + IMPORTANT: make sure CI (i.e. tests passed and images were built and pushed) placeholder: e.g. `d1f2e9ed-3b82-424e-8afa-17940614f042` validations: required: false @@ -108,3 +113,4 @@ body: ``` md :tada: https://github.com/ITISFoundation/osparc-simcore/releases/tag/v ``` + ``` diff --git a/.github/ISSUE_TEMPLATE/7_regular_maintenance.yml b/.github/ISSUE_TEMPLATE/7_regular_maintenance.yml index a6d821cdca1b..a515ba3484b0 100644 --- a/.github/ISSUE_TEMPLATE/7_regular_maintenance.yml +++ b/.github/ISSUE_TEMPLATE/7_regular_maintenance.yml @@ -1,8 +1,19 @@ name: ⬆️ Regular maintenance (developers-only) description: Creates an issue to plan a regular maintenance each sprint -title: " ⬆️ Regular maintenance " -labels: ["t:maintenance"] -assignees: ["matusdrobuliak66"] +title: "⬆️ Regular maintenance " +labels: + - "t:maintenance" +assignees: + - bisgaard-itis + - giancarloromeo + - GitHK + - matusdrobuliak66 + - odeimaiz + - pcrespov + - sanderegg +type: "Task" +projects: + - "ITISFoundation/projects/9" body: - type: input id: sprint @@ -13,13 +24,13 @@ body: required: true - type: textarea attributes: - label: ⬆️ Requirements + label: ⬆️ Requirements description: | Upgrade of Requirements value: | - Update of test & tools dependencies repository-wise - [ ] ``make reqs`` - - Update of most important libraries repository-wise make ``reqs-all upgrade=foo==1.2.3`` + - Update of most important libraries repository-wise make ``reqs-all startswith=pydantic`` - [ ] fastapi - [ ] pydantic - [ ] aio-pika diff --git a/.github/tips.md b/.github/TIPS.md similarity index 60% rename from .github/tips.md rename to .github/TIPS.md index 59e7d490b5e2..8c7c3770c5af 100644 --- a/.github/tips.md +++ b/.github/TIPS.md @@ -7,3 +7,15 @@ WARNING: do not name this file as README.md since it will be rendered in the mai - ``PULL_REQUEST_TEMPLATE.md`` - ``ISSUE_TEMPLATE`` folder: - Add [top-level syntax](https://docs.github.com/en/communities/using-templates-to-encourage-useful-issues-and-pull-requests/syntax-for-issue-forms#top-level-syntax) to ISSUE_TEMPLATE/*.md files to configure them and view them as [template in the github web](https://github.com/ITISFoundation/osparc-simcore/issues/new/choose) + + +--- + +## Copilot Usage Tips + +1. **Be Specific**: Provide clear and detailed prompts to Copilot for better suggestions. +2. **Iterate**: Review and refine Copilot's suggestions to ensure they meet project standards. +3. **Split Tasks**: Break down complex tasks into smaller, manageable parts for better suggestions. +4. **Test Suggestions**: Always test Copilot-generated code to ensure it works as expected. + +- SEE https://code.visualstudio.com/docs/copilot/copilot-customization#_custom-instructions diff --git a/.github/actions/download-load-docker-images/action.yml b/.github/actions/download-load-docker-images/action.yml new file mode 100644 index 000000000000..5f9f03be3c3c --- /dev/null +++ b/.github/actions/download-load-docker-images/action.yml @@ -0,0 +1,31 @@ +name: 'Download and Load Docker Images' +description: 'Downloads and loads Docker images for integration/system tests' + +inputs: + artifact-name-pattern: + description: 'Artifact name pattern for docker images (e.g., backend, or * for all)' + required: false + default: 'backend' + + download-path: + description: 'Path to download artifacts to' + required: false + default: '/${{ runner.temp }}/build' + +runs: + using: 'composite' + steps: + # FIXME: Workaround for https://github.com/actions/download-artifact/issues/249 + - name: download docker images with retry + uses: Wandalen/wretry.action@master + with: + action: actions/download-artifact@v4 + with: | + ${{ inputs.artifact-name-pattern == '*' && 'pattern' || 'name' }}: docker-buildx-images-${{ runner.os }}-${{ github.sha }}-${{ inputs.artifact-name-pattern }} + path: ${{ inputs.download-path }} + attempt_limit: 5 + attempt_delay: 1000 + + - name: load docker images + shell: bash + run: make load-images local-src=${{ inputs.download-path }} diff --git a/.github/actions/setup-simcore-env/action.yml b/.github/actions/setup-simcore-env/action.yml new file mode 100644 index 000000000000..f1cd34a85022 --- /dev/null +++ b/.github/actions/setup-simcore-env/action.yml @@ -0,0 +1,64 @@ +name: 'Setup SimCore Environment' +description: 'Sets up the common environment for SimCore CI jobs' + +inputs: + python-version: + description: 'Python version to use' + required: false + default: '3.11' + + uv-version: + description: 'UV version to use' + required: false + default: '0.8.x' + + cache-dependency-glob: + description: 'Glob pattern for cache dependency files' + required: false + default: '' + + setup-docker: + description: 'Whether to setup Docker BuildX' + required: false + default: 'true' + + show-system-versions: + description: 'Whether to show system versions' + required: false + default: 'true' + + expose-github-runtime: + description: 'Whether to expose GitHub runtime for buildx (needed for some integration tests)' + required: false + default: 'true' + +runs: + using: 'composite' + steps: + - name: Setup Docker BuildX + if: inputs.setup-docker == 'true' + id: buildx + uses: docker/setup-buildx-action@v3 + with: + driver: docker-container + + - name: Expose GitHub runtime for BuildX + if: inputs.expose-github-runtime == 'true' + uses: crazy-max/ghaction-github-runtime@v3 + + - name: Setup Python environment + uses: actions/setup-python@v5 + with: + python-version: ${{ inputs.python-version }} + + - name: Install UV + uses: astral-sh/setup-uv@v6 + with: + version: ${{ inputs.uv-version }} + enable-cache: false + cache-dependency-glob: ${{ inputs.cache-dependency-glob }} + + - name: Show system versions + if: inputs.show-system-versions == 'true' + shell: bash + run: ./ci/helpers/show_system_versions.bash diff --git a/.github/codeql/codeql-config.yml b/.github/codeql/codeql-config.yml index adac3b13795c..739b7c8a5a6c 100644 --- a/.github/codeql/codeql-config.yml +++ b/.github/codeql/codeql-config.yml @@ -4,6 +4,7 @@ disable-default-queries: false paths: - packages/aws-library/src + - packages/celery-library/src - packages/dask-task-models-library/src - packages/models-library/src/models_library - packages/postgres-database/src/simcore_postgres_database diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md deleted file mode 100644 index a5bb20c31e42..000000000000 --- a/.github/copilot-instructions.md +++ /dev/null @@ -1,55 +0,0 @@ -# GitHub Copilot Instructions - -This document provides guidelines and best practices for using GitHub Copilot in the `osparc-simcore` repository and other Python and Node.js projects. - -## General Guidelines - -1. **Use Python 3.11**: Ensure that all Python-related suggestions align with Python 3.11 features and syntax. -2. **Node.js Compatibility**: For Node.js projects, ensure compatibility with the version specified in the project (e.g., Node.js 14 or later). -3. **Follow Coding Conventions**: Adhere to the coding conventions outlined in the `docs/coding-conventions.md` file. -4. **Test-Driven Development**: Write unit tests for all new functions and features. Use `pytest` for Python and appropriate testing frameworks for Node.js. -5. **Environment Variables**: Use environment variables as specified in `docs/env-vars.md` for configuration. Avoid hardcoding sensitive information. -6. **Documentation**: Prefer self-explanatory code; add documentation only if explicitly requested by the developer. - -## Python-Specific Instructions - -- Always use type hints and annotations to improve code clarity and compatibility with tools like `mypy`. - - An exception to that rule is in `test_*` functions return type hint must not be added -- Follow the dependency management practices outlined in `requirements/`. -- Use `ruff` for code formatting and for linting. -- Use `black` for code formatting and `pylint` for linting. -- ensure we use `sqlalchemy` >2 compatible code. -- ensure we use `pydantic` >2 compatible code. -- ensure we use `fastapi` >0.100 compatible code -- use f-string formatting -- Only add comments in function if strictly necessary -- use relative imports -- imports should be at top of the file - - -### Json serialization - -- Generally use `json_dumps`/`json_loads` from `common_library.json_serialization` to built-in `json.dumps` / `json.loads`. -- Prefer Pydantic model methods (e.g., `model.model_dump_json()`) for serialization. - - -## Node.js-Specific Instructions - -- Use ES6+ syntax and features. -- Follow the `package.json` configuration for dependencies and scripts. -- Use `eslint` for linting and `prettier` for code formatting. -- Write modular and reusable code, adhering to the project's structure. - -## Copilot Usage Tips - -1. **Be Specific**: Provide clear and detailed prompts to Copilot for better suggestions. -2. **Iterate**: Review and refine Copilot's suggestions to ensure they meet project standards. -3. **Split Tasks**: Break down complex tasks into smaller, manageable parts for better suggestions. -4. **Test Suggestions**: Always test Copilot-generated code to ensure it works as expected. - -## Additional Resources - -- [Python Coding Conventions](../docs/coding-conventions.md) -- [Environment Variables Guide](../docs/env-vars.md) -- [Steps to Upgrade Python](../docs/steps-to-upgrade-python.md) -- [Node.js Installation Script](../scripts/install_nodejs_14.bash) diff --git a/.github/instructions/general.instructions.md b/.github/instructions/general.instructions.md new file mode 100644 index 000000000000..481c3462815a --- /dev/null +++ b/.github/instructions/general.instructions.md @@ -0,0 +1,11 @@ +--- +applyTo: '**' +--- +Provide project context and coding guidelines that AI should follow when generating code, answering questions, or reviewing changes. + +## General Guidelines + +1. **Test-Driven Development**: Write unit tests for all new functions and features. Use `pytest` for Python and appropriate testing frameworks for Node.js. +2. **Environment Variables**: Use [Environment Variables Guide](../../docs/env-vars.md) for configuration. Avoid hardcoding sensitive information. +3. **Documentation**: Prefer self-explanatory code; add documentation only if explicitly requested by the developer. Be concise. +4. **Code Reviews**: Participate in code reviews and provide constructive feedback. diff --git a/.github/instructions/node.instructions.md b/.github/instructions/node.instructions.md new file mode 100644 index 000000000000..504e7675be0c --- /dev/null +++ b/.github/instructions/node.instructions.md @@ -0,0 +1,11 @@ +--- +applyTo: '**/*.js' +--- +Provide project context and coding guidelines that AI should follow when generating code, answering questions, or reviewing changes. + +## 🛠️Coding Instructions for Node.js in This Repository + +* Use ES6+ syntax and features. +* Follow the `package.json` configuration for dependencies and scripts. +* Use `eslint` for linting and `prettier` for code formatting. +* Write modular and reusable code, adhering to the project's structure. diff --git a/.github/instructions/python.instructions.md b/.github/instructions/python.instructions.md new file mode 100644 index 000000000000..31dfa431a3e9 --- /dev/null +++ b/.github/instructions/python.instructions.md @@ -0,0 +1,63 @@ +--- +applyTo: '**/*.py' +--- +Provide project context and coding guidelines that AI should follow when generating code, answering questions, or reviewing changes. + +## 🛠️Coding Instructions for Python in This Repository + +Follow these rules **strictly** when generating Python code: + +### 1. Python Version + +* Use Python 3.11: Ensure all code uses features and syntax compatible with Python 3.11. + +### 2. **Type Annotations** + +* Always use full type annotations for all functions and class attributes. +* ❗ **Exception**: Do **not** add return type annotations in `test_*` functions. + +### 3. **Code Style & Formatting** + +* Follow [Python Coding Conventions](../../docs/coding-conventions.md) **strictly**. +* Format code with `black` and `ruff`. +* Lint code with `ruff` and `pylint`. + +### 4. **Library Compatibility** + +Ensure compatibility with the following library versions: + +* `sqlalchemy` ≥ 2.x +* `pydantic` ≥ 2.x +* `fastapi` ≥ 0.100 + + +### 5. **Code Practices** + +* Use `f-string` formatting for all string interpolation except for logging message strings. +* Use **relative imports** within the same package/module. + - For imports within the same repository/project, always use relative imports (e.g., `from ..constants import APP_SETTINGS_KEY` instead of `from simcore_service_webserver.constants import APP_SETTINGS_KEY`) + - Use absolute imports only for external libraries and packages +* Place **all imports at the top** of the file. +* Document functions when the code is not self-explanatory or if asked explicitly. + + +### 6. **JSON Serialization** + +* Prefer `json_dumps` / `json_loads` from `common_library.json_serialization` instead of the built-in `json.dumps` / `json.loads`. +* When using Pydantic models, prefer methods like `model.model_dump_json()` for serialization. + +### 7. **aiohttp Framework** + +* **Application Keys**: Always use `web.AppKey` for type-safe application storage instead of string keys + - Define keys with specific types: `APP_MY_KEY: Final = web.AppKey("APP_MY_KEY", MySpecificType)` + - Use precise types instead of generic `object` when the actual type is known + - Example: `APP_SETTINGS_KEY: Final = web.AppKey("APP_SETTINGS_KEY", ApplicationSettings)` + - Store and retrieve: `app[APP_MY_KEY] = value` and `data = app[APP_MY_KEY]` +* **Request Keys**: Use `web.AppKey` for request storage as well for consistency and type safety +* **Middleware**: Follow the repository's middleware patterns for cross-cutting concerns +* **Error Handling**: Use the established exception handling decorators and patterns +* **Route Definitions**: Use `web.RouteTableDef()` and organize routes logically within modules + +### 8. **Running tests** +* Use `--keep-docker-up` flag when testing to keep docker containers up between sessions. +* Always activate the python virtual environment before running pytest. diff --git a/docs/llm-prompts/pydantic-annotated-fields.md b/.github/prompts/pydantic-annotated-fields.prompt.md similarity index 87% rename from docs/llm-prompts/pydantic-annotated-fields.md rename to .github/prompts/pydantic-annotated-fields.prompt.md index 9b128e7bd72a..631a51a1b74b 100644 --- a/docs/llm-prompts/pydantic-annotated-fields.md +++ b/.github/prompts/pydantic-annotated-fields.prompt.md @@ -1,6 +1,9 @@ -# Prompt +--- +mode: 'edit' +description: 'Convert Pydantic model fields to use Annotated pattern' +--- + -``` Please convert all pydantic model fields that use `Field()` with default values to use the Annotated pattern instead. Follow these guidelines: @@ -10,7 +13,8 @@ Follow these guidelines: 4. Add the import: `from common_library.basic_types import DEFAULT_FACTORY` if it's not already present. 5. If `Field()` has no parameters (empty), don't use Annotated at all. Just use: `field_name: field_type = default_value`. 6. Leave any model validations, `model_config` settings, and `field_validators` untouched. -``` +7. Must keep the original Field descriptions and validation parameters intact (except for the `default` parameter). + ## Examples ### Before: @@ -53,13 +57,11 @@ class ProjectModel(BaseModel): id: str = Field(default_factory=uuid.uuid4, description="Unique project identifier") name: str = Field(default="Untitled Project", min_length=3, max_length=50) created_at: datetime = Field(default_factory=datetime.now) + value: int = Field(..., description="Project value") + str_with_default: str = Field(default="foo") + config: dict = Field(default={"version": "1.0", "theme": "default"}) - @field_validator("name") - def validate_name(cls, v): - if v.isdigit(): - raise ValueError("Name cannot be only digits") - return v ``` ### After: @@ -74,11 +76,9 @@ class ProjectModel(BaseModel): id: Annotated[str, Field(default_factory=uuid.uuid4, description="Unique project identifier")] = DEFAULT_FACTORY name: Annotated[str, Field(min_length=3, max_length=50)] = "Untitled Project" created_at: Annotated[datetime, Field(default_factory=datetime.now)] = DEFAULT_FACTORY + value: Annotated[int, Field(description="Project value")] + str_with_default: str = "foo" + config: dict = {"version": "1.0", "theme": "default"} - @field_validator("name") - def validate_name(cls, v): - if v.isdigit(): - raise ValueError("Name cannot be only digits") - return v ``` diff --git a/.github/prompts/update-user-messages.prompt.md b/.github/prompts/update-user-messages.prompt.md new file mode 100644 index 000000000000..29ababc2ef15 --- /dev/null +++ b/.github/prompts/update-user-messages.prompt.md @@ -0,0 +1,117 @@ +--- +mode: 'edit' +description: 'Update user messages' +model: Claude Sonnet 3.5 +--- + +This prompt guide is for updating user-facing messages in ${file} or ${selection} + +## What is a User Message? + +A user message is any string that will be displayed to end-users of the application. +In our codebase, these messages are marked with the `user_message` function: + +```python +from common_library.user_messages import user_message + +error_msg = user_message("Operation failed. Please try again later.") +``` + +## Guidelines for Updating User Messages + +When modifying user messages, follow **as close as possible** these rules: + +1. **Version Tracking**: Every modification to a user message must include an incremented `_version` parameter: + + ```python + # Before modification + user_message("Error: Unable to connect to the server.") + + # After modification, add _version or increment it if it already exists + user_message("Currently unable to establish connection to the server.", _version=1) + ``` + +2. **F-String Preservation**: When modifying messages that use f-strings, preserve all parameters and their formatting: + + ```python + # Before + user_message(f"Project {project_name} could not be loaded.") + + # After (correct) + user_message(f"Unable to load project {project_name}.", _version=1) + + # After (incorrect - lost the parameter) + user_message("Unable to load project.", _version=1) + ``` + +3. **Message Style**: Follow **STRICTLY ALL 10 GUIDELINES** in `${workspaceFolder}/docs/user-messages-guidelines.md`: + - Be Clear and Concise + - Provide Specific and Actionable Information + - Avoid Technical Jargon + - Use a Polite and Non-Blaming Tone + - Avoid Negative Words and Phrases + - Place Messages Appropriately + - Use Inline Validation When Possible + - Avoid Using All-Caps and Excessive Punctuation + - **Use Humor Sparingly** - Avoid casual phrases like "Oops!", "Whoops!", or overly informal language + - Offer Alternative Solutions or Support + +4. **Preserve Context**: Ensure the modified message conveys the same meaning and context as the original. + +5. **Incremental Versioning**: If a message already has a version, increment it by 1: + + ```python + # Before + user_message("Session expired.", _version=2) + + # After + user_message("Your session has expired. Please log in again.", _version=3) + ``` + +6. **Replace 'Study' by 'Project'**: If the message contains the word 'Study', replace it with 'Project' to align with our terminology. + +7. **Professional Tone**: Maintain a professional, helpful tone. Avoid humor, casual expressions, or overly informal language that might not be appropriate for all users or situations. + +## Examples + +### Example 1: Simple Message Update + +```python +# Before +error_dialog(user_message("Failed to save changes in this study.")) + +# After +error_dialog(user_message("Unable to save your changes in this project.", _version=1)) +``` + +### Example 2: F-string Message Update + +```python +# Before +raise ValueError(user_message(f"Invalid input parameter: {param_name}")) + +# After +raise ValueError(user_message(f"The parameter '{param_name}' contains a value that is not allowed.", _version=1)) +``` + +### Example 3: Already Versioned Message + +```python +# Before +return HttpErrorInfo(status.HTTP_404_NOT_FOUND, user_message("User not found.", _version=1)) + +# After +return HttpErrorInfo(status.HTTP_404_NOT_FOUND, user_message("The requested user could not be found.", _version=2)) +``` + +### Example 4: Removing Humor (Guideline 9) + +```python +# Before +user_message("Oops! Something went wrong, but we've noted it down and we'll sort it out ASAP. Thanks for your patience!") + +# After +user_message("Something went wrong on our end. We've been notified and will resolve this issue as soon as possible. Thank you for your patience.", _version=1) +``` + +Remember: The goal is to improve clarity and helpfulness for end-users while maintaining accurate versioning for tracking changes. **Always check that your updated messages comply with ALL 10 guidelines, especially avoiding humor and maintaining a professional tone.** diff --git a/.github/workflows/_reusable-build-images.yml b/.github/workflows/_reusable-build-images.yml index f634e1c2d78d..a586e64e3172 100644 --- a/.github/workflows/_reusable-build-images.yml +++ b/.github/workflows/_reusable-build-images.yml @@ -25,16 +25,12 @@ jobs: timeout-minutes: 30 runs-on: ${{ inputs.os }} steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 + - uses: actions/checkout@v5 + - name: Setup SimCore environment + uses: ./.github/actions/setup-simcore-env with: - driver: docker-container - - name: expose github runtime for buildx - uses: crazy-max/ghaction-github-runtime@v3 - - name: show system environs - run: ./ci/helpers/show_system_versions.bash + python-version: ${{ inputs.python-version }} + expose-github-runtime: 'true' - name: build backend images if: ${{ inputs.build-backend }} run: | diff --git a/.github/workflows/ci-arm-build.yml b/.github/workflows/ci-arm-build.yml index 1cc89874b06f..6917251bbc0a 100644 --- a/.github/workflows/ci-arm-build.yml +++ b/.github/workflows/ci-arm-build.yml @@ -25,7 +25,7 @@ jobs: # secrets can be set in settings/secrets on github DOCKER_REGISTRY: ${{ secrets.DOCKER_REGISTRY }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: setup QEMU uses: docker/setup-qemu-action@v3 - name: setup docker buildx @@ -38,7 +38,7 @@ jobs: - name: show system environs run: ./ci/helpers/show_system_versions.bash - name: login to Dockerhub - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} diff --git a/.github/workflows/ci-multi-architecture-fusing.yml b/.github/workflows/ci-multi-architecture-fusing.yml index dadeedf10389..7af4d4e16fba 100644 --- a/.github/workflows/ci-multi-architecture-fusing.yml +++ b/.github/workflows/ci-multi-architecture-fusing.yml @@ -26,7 +26,7 @@ jobs: # secrets can be set in settings/secrets on github DOCKER_REGISTRY: ${{ secrets.DOCKER_REGISTRY }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: setup QEMU uses: docker/setup-qemu-action@v3 - name: setup docker buildx @@ -39,7 +39,7 @@ jobs: - name: show system environs run: ./ci/helpers/show_system_versions.bash - name: login to Dockerhub - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} diff --git a/.github/workflows/ci-pact-master.yml b/.github/workflows/ci-pact-master.yml index c6cf1d508dfa..620fda0aed5d 100644 --- a/.github/workflows/ci-pact-master.yml +++ b/.github/workflows/ci-pact-master.yml @@ -22,7 +22,7 @@ jobs: PACT_BROKER_PASSWORD: ${{ secrets.PACT_BROKER_PASSWORD }} steps: - name: setup python environment - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: "3.11" - name: install uv @@ -31,7 +31,7 @@ jobs: version: "0.6.x" enable-cache: false - name: checkout source branch - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Run pact tests run: | make devenv diff --git a/.github/workflows/ci-release.yml b/.github/workflows/ci-release.yml index 6963b3a28c11..87ee1120c0fe 100644 --- a/.github/workflows/ci-release.yml +++ b/.github/workflows/ci-release.yml @@ -27,7 +27,7 @@ jobs: env: TO_TAG_PREFIX: release-github steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 0 - name: find branch name diff --git a/.github/workflows/ci-staging.yml b/.github/workflows/ci-staging.yml index dbef04653aae..c900117662f5 100644 --- a/.github/workflows/ci-staging.yml +++ b/.github/workflows/ci-staging.yml @@ -27,7 +27,7 @@ jobs: env: TO_TAG_PREFIX: staging-github steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 0 - name: find branch name diff --git a/.github/workflows/ci-testing-deploy.yml b/.github/workflows/ci-testing-deploy.yml index b98de763088f..678ce3905413 100644 --- a/.github/workflows/ci-testing-deploy.yml +++ b/.github/workflows/ci-testing-deploy.yml @@ -47,6 +47,7 @@ jobs: # Set job outputs to values from filter step outputs: aws-library: ${{ steps.filter.outputs.aws-library }} + celery-library: ${{ steps.filter.outputs.celery-library }} dask-task-models-library: ${{ steps.filter.outputs.dask-task-models-library }} models-library: ${{ steps.filter.outputs.models-library }} common-library: ${{ steps.filter.outputs.common-library }} @@ -82,7 +83,7 @@ jobs: anything-py: ${{ steps.filter.outputs.anything-py }} anything-js: ${{ steps.filter.outputs.anything-js }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 # For pull requests it's not necessary to checkout the code - uses: dorny/paths-filter@v3 id: filter @@ -94,6 +95,12 @@ jobs: - 'services/docker-compose*' - 'scripts/mypy/*' - 'mypy.ini' + celery-library: + - 'packages/celery-library/**' + - 'packages/pytest-simcore/**' + - 'services/docker-compose*' + - 'scripts/mypy/*' + - 'mypy.ini' dask-task-models-library: - 'packages/dask-task-models-library/**' - 'packages/pytest-simcore/**' @@ -131,6 +138,7 @@ jobs: - 'packages/pytest-simcore/**' - 'packages/service-integration/**' - 'services/docker-compose*' + - 'mypy.ini' service-library: - 'packages/pytest-simcore/**' - 'packages/service-library/**' @@ -141,9 +149,11 @@ jobs: - 'packages/pytest-simcore/**' - 'packages/settings-library/**' - 'services/docker-compose*' + - 'mypy.ini' simcore-sdk: - 'packages/**' - 'services/docker-compose*' + - 'mypy.ini' agent: - 'packages/**' - 'services/agent/**' @@ -158,6 +168,7 @@ jobs: - 'mypy.ini' api: - 'api/**' + - 'mypy.ini' api-server: - 'packages/**' - 'services/api-server/**' @@ -228,6 +239,7 @@ jobs: - 'packages/**' - 'services/migration/**' - 'services/docker-compose*' + - 'mypy.ini' payments: - 'packages/**' - 'services/payments/**' @@ -243,6 +255,7 @@ jobs: docker-api-proxy: - 'packages/**' - 'services/docker-api-proxy/**' + - 'mypy.ini' resource-usage-tracker: - 'packages/**' - 'services/resource-usage-tracker/**' @@ -262,12 +275,15 @@ jobs: - 'packages/**' - 'services/web/**' - 'services/docker-compose*' + - 'mypy.ini' anything: - 'packages/**' - 'services/**' - 'tests/**' + - 'mypy.ini' anything-py: - '**/*.py' + - 'mypy.ini' anything-js: - '**/*.js' build-test-images: @@ -296,36 +312,22 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/web/server/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install webserver run: ./ci/github/unit-testing/webserver.bash install - name: typecheck run: ./ci/github/unit-testing/webserver.bash typecheck - name: test isolated - if: ${{ !cancelled() }} run: ./ci/github/unit-testing/webserver.bash test_isolated - name: test - if: ${{ !cancelled() }} run: ./ci/github/unit-testing/webserver.bash test_with_db 01 - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -335,7 +337,7 @@ jobs: unit-test-webserver-02: needs: changes if: ${{ needs.changes.outputs.webserver == 'true' || github.event_name == 'push' || github.event.inputs.force_all_builds == 'true' }} - timeout-minutes: 25 # if this timeout gets too small, then split the tests + timeout-minutes: 35 # if this timeout gets too small, then split the tests name: "[unit] webserver 02" runs-on: ${{ matrix.os }} strategy: @@ -344,30 +346,18 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/web/server/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install webserver run: ./ci/github/unit-testing/webserver.bash install - name: test run: ./ci/github/unit-testing/webserver.bash test_with_db 02 - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -386,30 +376,18 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/web/server/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install webserver run: ./ci/github/unit-testing/webserver.bash install - name: test run: ./ci/github/unit-testing/webserver.bash test_with_db 03 - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -428,24 +406,13 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/web/server/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install webserver run: ./ci/github/unit-testing/webserver.bash install - name: test @@ -455,11 +422,7 @@ jobs: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: flags: unittests #optional - - name: Upload test results to Codecov - if: ${{ !cancelled() }} - uses: codecov/test-results-action@v1 - with: - token: ${{ secrets.CODECOV_TOKEN }} + unit-test-storage: needs: changes @@ -473,33 +436,20 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/storage/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install run: ./ci/github/unit-testing/storage.bash install - name: typecheck run: ./ci/github/unit-testing/storage.bash typecheck - name: test - if: ${{ !cancelled() }} run: ./ci/github/unit-testing/storage.bash test - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -518,35 +468,22 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} + cache-dependency-glob: "**/agent/requirements/ci.txt" - name: install rclone run: sudo ./ci/github/helpers/install_rclone.bash - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false - cache-dependency-glob: "**/agent/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install run: ./ci/github/unit-testing/agent.bash install - name: typecheck run: ./ci/github/unit-testing/agent.bash typecheck - name: test - if: ${{ !cancelled() }} run: ./ci/github/unit-testing/agent.bash test - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -564,33 +501,20 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/notifications/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install run: ./ci/github/unit-testing/notifications.bash install - name: typecheck run: ./ci/github/unit-testing/notifications.bash typecheck - name: test - if: ${{ !cancelled() }} run: ./ci/github/unit-testing/notifications.bash test - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -608,30 +532,18 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false - cache-dependency-glob: "**/api/tests/requirements.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash + cache-dependency-glob: "**/api/requirements/ci.txt" - name: install api run: ./ci/github/unit-testing/api.bash install - name: test run: ./ci/github/unit-testing/api.bash test - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -650,36 +562,22 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/api-server/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install run: ./ci/github/unit-testing/api-server.bash install - name: typecheck run: ./ci/github/unit-testing/api-server.bash typecheck - name: test - if: ${{ !cancelled() }} run: ./ci/github/unit-testing/api-server.bash test - name: OAS backwards compatibility check - if: ${{ !cancelled() }} run: ./ci/github/unit-testing/api-server.bash openapi-diff - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -689,7 +587,7 @@ jobs: unit-test-autoscaling: needs: changes if: ${{ needs.changes.outputs.autoscaling == 'true' || github.event_name == 'push' || github.event.inputs.force_all_builds == 'true' }} - timeout-minutes: 22 # temporary: mypy takes a huge amount of time to run here, maybe we should cache it + timeout-minutes: 18 # if this timeout gets too small, then split the tests name: "[unit] autoscaling" runs-on: ${{ matrix.os }} strategy: @@ -698,33 +596,20 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/autoscaling/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install run: ./ci/github/unit-testing/autoscaling.bash install - name: typecheck run: ./ci/github/unit-testing/autoscaling.bash typecheck - name: test - if: ${{ !cancelled() }} run: ./ci/github/unit-testing/autoscaling.bash test - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -743,30 +628,18 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/catalog/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install run: ./ci/github/unit-testing/catalog.bash install - name: typecheck run: ./ci/github/unit-testing/catalog.bash typecheck - name: test - if: ${{ !cancelled() }} run: ./ci/github/unit-testing/catalog.bash test - name: upload failed tests logs if: ${{ failure() }} @@ -775,7 +648,6 @@ jobs: name: ${{ github.job }}_docker_logs path: ./services/catalog/test_failures - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -794,24 +666,13 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/clusters-keeper/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install run: | make devenv @@ -825,13 +686,11 @@ jobs: pushd services/clusters-keeper && \ make mypy - name: test - if: ${{ !cancelled() }} run: | source .venv/bin/activate && \ pushd services/clusters-keeper && \ make test-ci-unit - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -850,30 +709,18 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/datcore-adapter/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install run: ./ci/github/unit-testing/datcore-adapter.bash install - name: typecheck run: ./ci/github/unit-testing/datcore-adapter.bash typecheck - name: test - if: ${{ !cancelled() }} run: ./ci/github/unit-testing/datcore-adapter.bash test - name: upload failed tests logs if: ${{ failure() }} @@ -882,7 +729,6 @@ jobs: name: ${{ github.job }}_docker_logs path: ./services/datcore-adapter/test_failures - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -901,30 +747,18 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/director/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install run: ./ci/github/unit-testing/director.bash install - name: typecheck run: ./ci/github/unit-testing/director.bash typecheck - name: test - if: ${{ !cancelled() }} run: ./ci/github/unit-testing/director.bash test - name: upload failed tests logs if: ${{ failure() }} @@ -933,7 +767,6 @@ jobs: name: ${{ github.job }}_docker_logs path: ./services/director/test_failures - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -952,30 +785,18 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/director-v2/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install run: ./ci/github/unit-testing/director-v2.bash install - name: typecheck run: ./ci/github/unit-testing/director-v2.bash typecheck - name: test - if: ${{ !cancelled() }} run: ./ci/github/unit-testing/director-v2.bash test - name: upload failed tests logs if: ${{ failure() }} @@ -984,7 +805,6 @@ jobs: name: ${{ github.job }}_docker_logs path: ./services/director-v2/test_failures - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -1003,38 +823,66 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/aws-library/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install run: ./ci/github/unit-testing/aws-library.bash install - name: typecheck run: ./ci/github/unit-testing/aws-library.bash typecheck - name: test - if: ${{ !cancelled() }} run: ./ci/github/unit-testing/aws-library.bash test - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: flags: unittests #optional + unit-test-celery-library: + needs: changes + if: ${{ needs.changes.outputs.celery-library == 'true' || github.event_name == 'push' || github.event.inputs.force_all_builds == 'true' }} + timeout-minutes: 18 # if this timeout gets too small, then split the tests + name: "[unit] celery-library" + runs-on: ${{ matrix.os }} + strategy: + matrix: + python: ["3.11"] + os: [ubuntu-24.04] + fail-fast: false + steps: + - uses: actions/checkout@v5 + - name: setup docker buildx + id: buildx + uses: docker/setup-buildx-action@v3 + with: + driver: docker-container + - name: setup python environment + uses: actions/setup-python@v6 + with: + python-version: ${{ matrix.python }} + - name: install uv + uses: astral-sh/setup-uv@v6 + with: + version: "0.6.x" + enable-cache: false + cache-dependency-glob: "**/celery-library/requirements/ci.txt" + - name: show system version + run: ./ci/helpers/show_system_versions.bash + - name: install + run: ./ci/github/unit-testing/celery-library.bash install + - name: typecheck + run: ./ci/github/unit-testing/celery-library.bash typecheck + - name: test + run: ./ci/github/unit-testing/celery-library.bash test + - uses: codecov/codecov-action@v5 + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + with: + flags: unittests #optional unit-test-dask-task-models-library: needs: changes @@ -1048,33 +896,20 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/dask-task-models-library/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install run: ./ci/github/unit-testing/dask-task-models-library.bash install - name: typecheck run: ./ci/github/unit-testing/dask-task-models-library.bash typecheck - name: test - if: ${{ !cancelled() }} run: ./ci/github/unit-testing/dask-task-models-library.bash test - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -1093,33 +928,20 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/dask-sidecar/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install run: ./ci/github/unit-testing/dask-sidecar.bash install - name: typecheck run: ./ci/github/unit-testing/dask-sidecar.bash typecheck - name: test - if: ${{ !cancelled() }} run: ./ci/github/unit-testing/dask-sidecar.bash test - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -1138,33 +960,20 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/payments/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install run: ./ci/github/unit-testing/payments.bash install - name: typecheck run: ./ci/github/unit-testing/payments.bash typecheck - name: test - if: ${{ !cancelled() }} run: ./ci/github/unit-testing/payments.bash test - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -1183,33 +992,20 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/dynamic-scheduler/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install run: ./ci/github/unit-testing/dynamic-scheduler.bash install - name: typecheck run: ./ci/github/unit-testing/dynamic-scheduler.bash typecheck - name: test - if: ${{ !cancelled() }} run: ./ci/github/unit-testing/dynamic-scheduler.bash test - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -1228,24 +1024,13 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/resource-usage-tracker/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install run: | make devenv @@ -1258,13 +1043,11 @@ jobs: pushd services/resource-usage-tracker && \ make mypy - name: test - if: ${{ !cancelled() }} run: | source .venv/bin/activate && \ pushd services/resource-usage-tracker && \ make test-ci-unit - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -1274,7 +1057,7 @@ jobs: unit-test-dynamic-sidecar: needs: changes if: ${{ needs.changes.outputs.dynamic-sidecar == 'true' || github.event_name == 'push' || github.event.inputs.force_all_builds == 'true' }} - timeout-minutes: 18 # if this timeout gets too small, then split the tests + timeout-minutes: 19 # if this timeout gets too small, then split the tests name: "[unit] dynamic-sidecar" runs-on: ${{ matrix.os }} strategy: @@ -1283,33 +1066,20 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/dynamic-sidecar/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install run: ./ci/github/unit-testing/dynamic-sidecar.bash install - name: typecheck run: ./ci/github/unit-testing/dynamic-sidecar.bash typecheck - name: test - if: ${{ !cancelled() }} run: ./ci/github/unit-testing/dynamic-sidecar.bash test - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -1328,24 +1098,13 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/efs-guardian/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install run: | make devenv @@ -1359,13 +1118,11 @@ jobs: pushd services/efs-guardian && \ make mypy - name: test - if: ${{ !cancelled() }} run: | source .venv/bin/activate && \ pushd services/efs-guardian && \ make test-ci-unit - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -1384,24 +1141,13 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false - cache-dependency-glob: "**/ci/helpers/requirements.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash + cache-dependency-glob: "**/pylint/requirements/ci.txt" - name: install run: ./ci/github/unit-testing/python-linting.bash install - name: test @@ -1419,33 +1165,20 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/postgres-database/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install run: ./ci/github/unit-testing/postgres-database.bash install - name: typecheck run: ./ci/github/unit-testing/postgres-database.bash typecheck - name: test - if: ${{ !cancelled() }} run: ./ci/github/unit-testing/postgres-database.bash test - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -1464,33 +1197,20 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false - cache-dependency-glob: "**/notifications-library/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash + cache-dependency-glob: "**/invitations/requirements/ci.txt" - name: install run: ./ci/github/unit-testing/invitations.bash install - name: typecheck run: ./ci/github/unit-testing/invitations.bash typecheck - name: test - if: ${{ !cancelled() }} run: ./ci/github/unit-testing/invitations.bash test - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -1509,33 +1229,20 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/service-integration/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install run: ./ci/github/unit-testing/service-integration.bash install - name: typecheck run: ./ci/github/unit-testing/service-integration.bash typecheck - name: test - if: ${{ !cancelled() }} run: ./ci/github/unit-testing/service-integration.bash test - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -1545,7 +1252,7 @@ jobs: unit-test-service-library: needs: changes if: ${{ needs.changes.outputs.service-library == 'true' || github.event_name == 'push' || github.event.inputs.force_all_builds == 'true' }} - timeout-minutes: 18 # if this timeout gets too small, then split the tests + timeout-minutes: 20 # if this timeout gets too small, then split the tests name: "[unit] service-library" runs-on: ${{ matrix.os }} strategy: @@ -1554,33 +1261,20 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false - cache-dependency-glob: "**/service-library/requirements/ci*.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash + cache-dependency-glob: "**/service-library/requirements/ci.txt" - name: install run: ./ci/github/unit-testing/service-library.bash install_all - name: typecheck run: ./ci/github/unit-testing/service-library.bash typecheck - name: test - if: ${{ !cancelled() }} run: ./ci/github/unit-testing/service-library.bash test_all - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -1599,33 +1293,20 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/settings-library/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install run: ./ci/github/unit-testing/settings-library.bash install - name: typecheck run: ./ci/github/unit-testing/settings-library.bash typecheck - name: test - if: ${{ !cancelled() }} run: ./ci/github/unit-testing/settings-library.bash test - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -1644,24 +1325,13 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/models-library/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install run: ./ci/github/unit-testing/models-library.bash install - name: typecheck @@ -1669,7 +1339,6 @@ jobs: - name: test run: ./ci/github/unit-testing/models-library.bash test - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -1688,24 +1357,13 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/common-library/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install run: ./ci/github/unit-testing/common-library.bash install - name: typecheck @@ -1713,7 +1371,6 @@ jobs: - name: test run: ./ci/github/unit-testing/common-library.bash test - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -1731,33 +1388,20 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/notifications-library/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install run: ./ci/github/unit-testing/notifications-library.bash install - name: typecheck run: ./ci/github/unit-testing/notifications-library.bash typecheck - name: test - if: ${{ !cancelled() }} run: ./ci/github/unit-testing/notifications-library.bash test - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -1776,35 +1420,22 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: install rclone - run: sudo ./ci/github/helpers/install_rclone.bash - - name: setup python environment - uses: actions/setup-python@v5 + - name: Checkout code + uses: actions/checkout@v5 + - name: Setup environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/simcore-sdk/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash + - name: install rclone + run: sudo ./ci/github/helpers/install_rclone.bash - name: install run: ./ci/github/unit-testing/simcore-sdk.bash install - name: typecheck run: ./ci/github/unit-testing/simcore-sdk.bash typecheck - name: test - if: ${{ !cancelled() }} run: ./ci/github/unit-testing/simcore-sdk.bash test - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -1824,6 +1455,7 @@ jobs: unit-test-clusters-keeper, unit-test-dask-sidecar, unit-test-aws-library, + unit-test-celery-library, unit-test-dask-task-models-library, unit-test-datcore-adapter, unit-test-director-v2, @@ -1851,14 +1483,13 @@ jobs: ] runs-on: ubuntu-latest steps: - - name: a previous unit-test job failed - if: ${{ contains(join(needs.*.result, ','), 'failure') }} + - name: a previous unit-test job failed or was cancelled + if: ${{ contains(join(needs.*.result, ','), 'failure') || contains(join(needs.*.result, ','), 'cancelled') }} run: | - echo "::error title=ERROR::one of the unit-tests failed!" - echo "${{ join(needs.*.result, ',') }}" + echo "::error title=ERROR::one of the unit-tests failed or was cancelled!" exit 1 - name: all the previous unit-tests were run successfully or skipped - if: ${{ !contains(join(needs.*.result, ','), 'failure') }} + if: ${{ !contains(join(needs.*.result, ','), 'failure') && !contains(join(needs.*.result, ','), 'cancelled') }} run: echo "::notice All good!" integration-test-webserver-01: @@ -1873,115 +1504,27 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - uses: actions/checkout@v5 + - name: Setup SimCore environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - # FIXME: Workaround for https://github.com/actions/download-artifact/issues/249 - - name: download docker images with retry - uses: Wandalen/wretry.action@master - with: - action: actions/download-artifact@v4 - with: | - name: docker-buildx-images-${{ runner.os }}-${{ github.sha }}-backend - path: /${{ runner.temp }}/build - attempt_limit: 5 - attempt_delay: 1000 - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/web/server/requirements/ci.txt" - - name: load docker images - run: make load-images local-src=/${{ runner.temp }}/build - - name: show system version - run: ./ci/helpers/show_system_versions.bash - - name: install - run: ./ci/github/integration-testing/webserver.bash install - - name: test - run: ./ci/github/integration-testing/webserver.bash test 01 - - name: upload failed tests logs - if: ${{ failure() }} - uses: actions/upload-artifact@v4 - with: - name: ${{ github.job }}_docker_logs - path: ./services/web/server/test_failures - - name: cleanup - if: ${{ !cancelled() }} - run: ./ci/github/integration-testing/webserver.bash clean_up - - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} - env: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} - with: - flags: integrationtests #optional - - integration-test-webserver-02: - needs: [changes, build-test-images] - if: ${{ needs.changes.outputs.anything-py == 'true' || needs.changes.outputs.webserver == 'true' || github.event_name == 'push' }} - timeout-minutes: 30 # if this timeout gets too small, then split the tests - name: "[int] webserver 02" - runs-on: ${{ matrix.os }} - strategy: - matrix: - python: ["3.11"] - os: [ubuntu-24.04] - fail-fast: false - steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 + - name: Download and load Docker images + uses: ./.github/actions/download-load-docker-images with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python }} - - name: expose github runtime for buildx - uses: crazy-max/ghaction-github-runtime@v3 - # FIXME: Workaround for https://github.com/actions/download-artifact/issues/249 - - name: download docker images with retry - uses: Wandalen/wretry.action@master - with: - action: actions/download-artifact@v4 - with: | - name: docker-buildx-images-${{ runner.os }}-${{ github.sha }}-backend - path: /${{ runner.temp }}/build - attempt_limit: 5 - attempt_delay: 1000 - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false - cache-dependency-glob: "**/web/server/requirements/ci.txt" - - name: load docker images - run: make load-images local-src=/${{ runner.temp }}/build - - name: show system version - run: ./ci/helpers/show_system_versions.bash + artifact-name-pattern: 'backend' - name: install run: ./ci/github/integration-testing/webserver.bash install - name: test - run: ./ci/github/integration-testing/webserver.bash test 02 + run: ./ci/github/integration-testing/webserver.bash test 01 - name: upload failed tests logs if: ${{ failure() }} uses: actions/upload-artifact@v4 with: name: ${{ github.job }}_docker_logs path: ./services/web/server/test_failures - - name: cleanup - if: ${{ !cancelled() }} - run: ./ci/github/integration-testing/webserver.bash clean_up - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -1999,38 +1542,16 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - uses: actions/checkout@v5 + - name: Setup SimCore environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: expose github runtime for buildx - uses: crazy-max/ghaction-github-runtime@v3 - # FIXME: Workaround for https://github.com/actions/download-artifact/issues/249 - - name: download docker images with retry - uses: Wandalen/wretry.action@master - with: - action: actions/download-artifact@v4 - with: | - name: docker-buildx-images-${{ runner.os }}-${{ github.sha }}-backend - path: /${{ runner.temp }}/build - attempt_limit: 5 - attempt_delay: 1000 - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/director-v2/requirements/ci.txt" - - name: load docker images - run: make load-images local-src=/${{ runner.temp }}/build - - name: show system version - run: ./ci/helpers/show_system_versions.bash + - name: Download and load Docker images + uses: ./.github/actions/download-load-docker-images + with: + artifact-name-pattern: 'backend' - name: install run: ./ci/github/integration-testing/director-v2.bash install - name: test @@ -2041,11 +1562,7 @@ jobs: with: name: ${{ github.job }}_docker_logs path: ./services/director-v2/test_failures - - name: cleanup - if: ${{ !cancelled() }} - run: ./ci/github/integration-testing/director-v2.bash clean_up - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -2066,44 +1583,21 @@ jobs: # NOTE: DIRECTOR_DEFAULT_MAX_* used for integration-tests that include `director` service DIRECTOR_DEFAULT_MAX_MEMORY: 268435456 DIRECTOR_DEFAULT_MAX_NANO_CPUS: 10000000 - DIRECTOR_TRACING: null steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 + - uses: actions/checkout@v5 + - name: Setup SimCore environment + uses: ./.github/actions/setup-simcore-env with: - driver: docker-container + python-version: ${{ matrix.python }} + cache-dependency-glob: "**/director-v2/requirements/ci.txt" - name: setup rclone docker volume plugin run: sudo ./ci/github/helpers/install_rclone_docker_volume_plugin.bash - - name: setup python environment - uses: actions/setup-python@v5 + - name: Download and load Docker images + uses: ./.github/actions/download-load-docker-images with: - python-version: ${{ matrix.python }} - - name: expose github runtime for buildx - uses: crazy-max/ghaction-github-runtime@v3 - # FIXME: Workaround for https://github.com/actions/download-artifact/issues/249 - - name: download docker images with retry - uses: Wandalen/wretry.action@master - with: - action: actions/download-artifact@v4 - with: | - name: docker-buildx-images-${{ runner.os }}-${{ github.sha }}-backend - path: /${{ runner.temp }}/build - attempt_limit: 5 - attempt_delay: 1000 + artifact-name-pattern: 'backend' - name: install rclone run: sudo ./ci/github/helpers/install_rclone.bash - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false - cache-dependency-glob: "**/director-v2/requirements/ci.txt" - - name: load docker images - run: make load-images local-src=/${{ runner.temp }}/build - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install run: ./ci/github/integration-testing/director-v2.bash install - name: test @@ -2114,11 +1608,7 @@ jobs: with: name: ${{ github.job }}_docker_logs path: ./services/director-v2/test_failures - - name: cleanup - if: ${{ !cancelled() }} - run: ./ci/github/integration-testing/director-v2.bash clean_up - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -2136,40 +1626,18 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - uses: actions/checkout@v5 + - name: Setup SimCore environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: expose github runtime for buildx - uses: crazy-max/ghaction-github-runtime@v3 - # FIXME: Workaround for https://github.com/actions/download-artifact/issues/249 - - name: download docker images with retry - uses: Wandalen/wretry.action@master - with: - action: actions/download-artifact@v4 - with: | - name: docker-buildx-images-${{ runner.os }}-${{ github.sha }}-backend - path: /${{ runner.temp }}/build - attempt_limit: 5 - attempt_delay: 1000 + cache-dependency-glob: "**/dynamic-sidecar/requirements/ci.txt" + - name: Download and load Docker images + uses: ./.github/actions/download-load-docker-images + with: + artifact-name-pattern: 'backend' - name: install rclone run: sudo ./ci/github/helpers/install_rclone.bash - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false - cache-dependency-glob: "**/dynamic-sidecar/requirements/ci.txt" - - name: load docker images - run: make load-images local-src=/${{ runner.temp }}/build - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install run: ./ci/github/integration-testing/dynamic-sidecar.bash install - name: test @@ -2180,11 +1648,7 @@ jobs: with: name: ${{ github.job }}_docker_logs path: ./services/dynamic-sidecar/test_failures - - name: cleanup - if: ${{ !cancelled() }} - run: ./ci/github/integration-testing/dynamic-sidecar.bash clean_up - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -2203,38 +1667,16 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - uses: actions/checkout@v5 + - name: Setup SimCore environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: expose github runtime for buildx - uses: crazy-max/ghaction-github-runtime@v3 - # FIXME: Workaround for https://github.com/actions/download-artifact/issues/249 - - name: download docker images with retry - uses: Wandalen/wretry.action@master - with: - action: actions/download-artifact@v4 - with: | - name: docker-buildx-images-${{ runner.os }}-${{ github.sha }}-backend - path: /${{ runner.temp }}/build - attempt_limit: 5 - attempt_delay: 1000 - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/docker-api-proxy/requirements/ci.txt" - - name: load docker images - run: make load-images local-src=/${{ runner.temp }}/build - - name: show system version - run: ./ci/helpers/show_system_versions.bash + - name: Download and load Docker images + uses: ./.github/actions/download-load-docker-images + with: + artifact-name-pattern: 'backend' - name: install run: ./ci/github/integration-testing/docker-api-proxy.bash install - name: test @@ -2245,11 +1687,7 @@ jobs: with: name: ${{ github.job }}_docker_logs path: ./services/docker-api-proxy/test_failures - - name: cleanup - if: ${{ !cancelled() }} - run: ./ci/github/integration-testing/docker-api-proxy.bash clean_up - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -2267,38 +1705,18 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 + - uses: actions/checkout@v5 + - name: Setup SimCore environment + uses: ./.github/actions/setup-simcore-env with: - driver: docker-container + python-version: ${{ matrix.python }} + cache-dependency-glob: "**/simcore-sdk/requirements/ci.txt" - name: install rclone run: sudo ./ci/github/helpers/install_rclone.bash - - name: setup python environment - uses: actions/setup-python@v5 + - name: Download and load Docker images + uses: ./.github/actions/download-load-docker-images with: - python-version: ${{ matrix.python }} - # FIXME: Workaround for https://github.com/actions/download-artifact/issues/249 - - name: download docker images with retry - uses: Wandalen/wretry.action@master - with: - action: actions/download-artifact@v4 - with: | - name: docker-buildx-images-${{ runner.os }}-${{ github.sha }}-backend - path: /${{ runner.temp }}/build - attempt_limit: 5 - attempt_delay: 1000 - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false - cache-dependency-glob: "**/simcore-sdk/requirements/ci.txt" - - name: load docker images - run: make load-images local-src=/${{ runner.temp }}/build - - name: show system version - run: ./ci/helpers/show_system_versions.bash + artifact-name-pattern: 'backend' - name: install run: ./ci/github/integration-testing/simcore-sdk.bash install - name: test @@ -2309,11 +1727,7 @@ jobs: with: name: ${{ github.job }}_docker_logs path: ./packages/simcore-sdk/test_failures - - name: cleanup - if: ${{ !cancelled() }} - run: ./ci/github/integration-testing/simcore-sdk.bash clean_up - uses: codecov/codecov-action@v5 - if: ${{ !cancelled() }} env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} with: @@ -2330,17 +1744,16 @@ jobs: integration-test-docker-api-proxy, integration-test-simcore-sdk, integration-test-webserver-01, - integration-test-webserver-02, ] runs-on: ubuntu-latest steps: - - name: a previous integration-test job failed - if: ${{ contains(join(needs.*.result, ','), 'failure') }} + - name: a previous integration-test job failed or was cancelled + if: ${{ contains(join(needs.*.result, ','), 'failure') || contains(join(needs.*.result, ','), 'cancelled') }} run: | - echo "::error title=ERROR::one of the integration-tests failed!" + echo "::error title=ERROR::one of the integration-tests failed or was cancelled!" exit 1 - name: all the previous integration-tests were run successfully or skipped - if: ${{ !contains(join(needs.*.result, ','), 'failure') }} + if: ${{ !contains(join(needs.*.result, ','), 'failure') && !contains(join(needs.*.result, ','), 'cancelled') }} run: echo "::notice All good!" system-test-public-api: @@ -2355,38 +1768,16 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - uses: actions/checkout@v5 + - name: Setup SimCore environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: expose github runtime for buildx - uses: crazy-max/ghaction-github-runtime@v3 - # FIXME: Workaround for https://github.com/actions/download-artifact/issues/249 - - name: download docker images with retry - uses: Wandalen/wretry.action@master - with: - action: actions/download-artifact@v4 - with: | - pattern: docker-buildx-images-${{ runner.os }}-${{ github.sha }}-* - path: /${{ runner.temp }}/build - attempt_limit: 5 - attempt_delay: 1000 - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/public-api/requirements/ci.txt" - - name: load docker images - run: make load-images local-src=/${{ runner.temp }}/build - - name: show system version - run: ./ci/helpers/show_system_versions.bash + - name: Download and load Docker images + uses: ./.github/actions/download-load-docker-images + with: + artifact-name-pattern: '*' - name: install env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -2399,9 +1790,6 @@ jobs: with: name: ${{ github.job }}_docker_logs path: ./test_failures - - name: cleanup - if: ${{ !cancelled() }} - run: ./ci/github/system-testing/public-api.bash clean_up system-test-swarm-deploy: needs: [changes, build-test-images] @@ -2415,38 +1803,16 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - uses: actions/checkout@v5 + - name: Setup SimCore environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: expose github runtime for buildx - uses: crazy-max/ghaction-github-runtime@v3 - # FIXME: Workaround for https://github.com/actions/download-artifact/issues/249 - - name: download docker images with retry - uses: Wandalen/wretry.action@master - with: - action: actions/download-artifact@v4 - with: | - pattern: docker-buildx-images-${{ runner.os }}-${{ github.sha }}-* - path: /${{ runner.temp }}/build - attempt_limit: 5 - attempt_delay: 1000 - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/swarm-deploy/requirements/ci.txt" - - name: load docker images - run: make load-images local-src=/${{ runner.temp }}/build - - name: show system version - run: ./ci/helpers/show_system_versions.bash + - name: Download and load Docker images + uses: ./.github/actions/download-load-docker-images + with: + artifact-name-pattern: '*' - name: install run: ./ci/github/system-testing/swarm-deploy.bash install - name: test @@ -2464,9 +1830,7 @@ jobs: with: name: ${{ github.job }}_docker_logs path: ./test_failures - - name: cleanup - if: ${{ !cancelled() }} - run: ./ci/github/system-testing/swarm-deploy.bash clean_up + system-test-e2e: needs: [changes, build-test-images] @@ -2481,72 +1845,50 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - uses: actions/checkout@v5 + - name: Setup SimCore environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - uses: actions/setup-node@v4.1.0 + cache-dependency-glob: "**/e2e/requirements/requirements.txt" + - uses: actions/setup-node@v5.0.0 with: node-version: ${{ matrix.node }} cache: "npm" cache-dependency-path: "tests/e2e/package-lock.json" - - name: expose github runtime for buildx - uses: crazy-max/ghaction-github-runtime@v3 - # FIXME: Workaround for https://github.com/actions/download-artifact/issues/249 - - name: download docker images with retry - uses: Wandalen/wretry.action@master - with: - action: actions/download-artifact@v4 - with: | - pattern: docker-buildx-images-${{ runner.os }}-${{ github.sha }}-* - path: /${{ runner.temp }}/build - attempt_limit: 5 - attempt_delay: 1000 - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false - cache-dependency-glob: "**/e2e/requirements/requirements.txt" - - name: load docker images - run: make load-images local-src=/${{ runner.temp }}/build - - name: show system version - run: ./ci/helpers/show_system_versions.bash + - name: Download and load Docker images + uses: ./.github/actions/download-load-docker-images + with: + artifact-name-pattern: '*' - name: setup run: ./ci/github/system-testing/e2e.bash install - name: test run: ./ci/github/system-testing/e2e.bash test + - name: wait before dumping logs + if: ${{ failure() }} + run: sleep 5 - name: dump docker logs + if: ${{ failure() }} id: docker_logs_dump - if: ${{ !cancelled() }} run: ./ci/github/system-testing/e2e.bash dump_docker_logs - name: upload docker logs - if: ${{ !cancelled() }} + if: ${{ failure() }} uses: actions/upload-artifact@v4 with: name: ${{ github.job }}_docker_logs path: ./tests/e2e/test_failures - name: upload screenshots - if: ${{ !cancelled() }} + if: ${{ failure() }} uses: actions/upload-artifact@v4 with: name: ${{ github.job }}_screenshots path: tests/e2e/screenshots - name: upload e2e logs - if: ${{ !cancelled() }} + if: ${{ failure() }} uses: actions/upload-artifact@v4 with: name: ${{ github.job }}_logs path: tests/e2e/logs - - name: cleanup - if: ${{ !cancelled() }} - run: ./ci/github/system-testing/e2e.bash clean_up system-test-e2e-playwright: needs: [changes, build-test-images] @@ -2563,53 +1905,39 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - uses: actions/checkout@v5 + - name: Setup SimCore environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/e2e-playwright/requirements/ci.txt" - - name: expose github runtime for buildx - uses: crazy-max/ghaction-github-runtime@v3 - - name: download docker images - uses: actions/download-artifact@v4 - with: - pattern: docker-buildx-images-${{ runner.os }}-${{ github.sha }}-* - path: /${{ runner.temp }}/build - - name: load docker images - run: make load-images local-src=/${{ runner.temp }}/build + - name: Download and load Docker images + uses: ./.github/actions/download-load-docker-images + with: + artifact-name-pattern: '*' - name: prepare devenv run: make devenv - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: setup run: | ./ci/github/system-testing/e2e-playwright.bash install - name: test run: | ./ci/github/system-testing/e2e-playwright.bash test + - name: wait before dumping logs + if: ${{ failure() }} + run: sleep 5 - name: dump docker logs + if: ${{ failure() }} id: docker_logs_dump - if: ${{ !cancelled() }} run: ./ci/github/system-testing/e2e-playwright.bash dump_docker_logs - name: upload docker logs - if: ${{ !cancelled() }} + if: ${{ failure() }} uses: actions/upload-artifact@v4 with: name: ${{ github.job }}_docker_logs path: ./tests/e2e-playwright/test_failures - name: upload tracing if failed - if: ${{ !cancelled() }} + if: ${{ failure() }} uses: actions/upload-artifact@v4 with: name: ${{ github.job }}_tracing @@ -2625,31 +1953,16 @@ jobs: os: [ubuntu-24.04] fail-fast: false steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: setup python environment - uses: actions/setup-python@v5 + - uses: actions/checkout@v5 + - name: Setup SimCore environment + uses: ./.github/actions/setup-simcore-env with: python-version: ${{ matrix.python }} - - name: install uv - uses: astral-sh/setup-uv@v6 - with: - version: "0.6.x" - enable-cache: false cache-dependency-glob: "**/environment-setup/requirements/ci.txt" - - name: show system version - run: ./ci/helpers/show_system_versions.bash - name: install run: ./ci/github/system-testing/environment-setup.bash install - name: test run: ./ci/github/system-testing/environment-setup.bash test - - name: cleanup - if: ${{ !cancelled() }} - run: ./ci/github/system-testing/environment-setup.bash clean_up system-tests: # NOTE: this is a github required status check! @@ -2664,13 +1977,13 @@ jobs: ] runs-on: ubuntu-latest steps: - - name: a previous system-test job failed - if: ${{ contains(join(needs.*.result, ','), 'failure') }} + - name: a previous system-test job failed or was cancelled + if: ${{ contains(join(needs.*.result, ','), 'failure') || contains(join(needs.*.result, ','), 'cancelled') }} run: | - echo "::error title=ERROR::one of the system-tests failed!" + echo "::error title=ERROR::one of the system-tests failed or was cancelled!" exit 1 - name: all the previous system-tests were run successfully or skipped - if: ${{ !contains(join(needs.*.result, ','), 'failure') }} + if: ${{ !contains(join(needs.*.result, ','), 'failure') && !contains(join(needs.*.result, ','), 'cancelled') }} run: echo "::notice All good!" deploy: @@ -2689,31 +2002,17 @@ jobs: DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} steps: - - uses: actions/checkout@v4 - - name: setup docker buildx - id: buildx - uses: docker/setup-buildx-action@v3 - with: - driver: docker-container - - name: install uv - uses: astral-sh/setup-uv@v6 + - uses: actions/checkout@v5 + - name: Setup SimCore environment + uses: ./.github/actions/setup-simcore-env with: - version: "0.6.x" - enable-cache: false + python-version: ${{ matrix.python }} cache-dependency-glob: "**/e2e-playwright/requirements/ci.txt" - # FIXME: Workaround for https://github.com/actions/download-artifact/issues/249 - - name: download docker images with retry - uses: Wandalen/wretry.action@master - with: - action: actions/download-artifact@v4 - with: | - pattern: docker-buildx-images-${{ runner.os }}-${{ github.sha }}-* - path: /${{ runner.temp }}/build - attempt_limit: 5 - attempt_delay: 1000 - - name: load docker images - run: | - make load-images local-src=/${{ runner.temp }}/build + show-system-versions: false + - name: Download and load Docker images + uses: ./.github/actions/download-load-docker-images + with: + artifact-name-pattern: '*' - name: set owner variable run: echo "OWNER=${GITHUB_REPOSITORY%/*}" >> $GITHUB_ENV - if: github.ref == 'refs/heads/master' diff --git a/.github/workflows/ci-testing-pull-request.yml b/.github/workflows/ci-testing-pull-request.yml index 6b06cb58c9e4..7725a75fe618 100644 --- a/.github/workflows/ci-testing-pull-request.yml +++ b/.github/workflows/ci-testing-pull-request.yml @@ -35,7 +35,7 @@ jobs: runs-on: ubuntu-latest steps: - name: setup python environment - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: "3.11" - name: install uv @@ -44,7 +44,7 @@ jobs: version: "0.6.x" enable-cache: false - name: checkout source branch - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Generate openapi specs run: | make devenv @@ -65,11 +65,11 @@ jobs: runs-on: ubuntu-latest steps: - name: setup python environment - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: "3.11" - name: checkout - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Set environment variables based on event type run: | if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then @@ -93,11 +93,11 @@ jobs: runs-on: ubuntu-latest steps: - name: setup python environment - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: "3.11" - name: checkout - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Set environment variables based on event type run: | if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then diff --git a/.github/workflows/cleanup-caches-by-branches.yml b/.github/workflows/cleanup-caches-by-branches.yml index bd3f5e3290b2..bcce070aba64 100644 --- a/.github/workflows/cleanup-caches-by-branches.yml +++ b/.github/workflows/cleanup-caches-by-branches.yml @@ -8,23 +8,23 @@ on: jobs: cleanup: runs-on: ubuntu-latest + permissions: + actions: write steps: - name: Cleanup run: | - gh extension install actions/gh-actions-cache - - echo "Fetching list of cache key" - cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH -L 100 | cut -f 1 ) + echo "Fetching list of cache keys" + cacheKeysForPR=$(gh cache list --ref $BRANCH --limit 100 --json id --jq '.[].id') ## Setting this to not fail the workflow while deleting cache keys. set +e echo "Deleting caches..." for cacheKey in $cacheKeysForPR do - gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm + gh cache delete $cacheKey done echo "Done" env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - REPO: ${{ github.repository }} + GH_TOKEN: ${{ github.token }} + GH_REPO: ${{ github.repository }} BRANCH: refs/pull/${{ github.event.pull_request.number }}/merge diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 1bd3c6b40c1a..288a86a6584f 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -26,7 +26,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Initialize CodeQL tools for scanning uses: github/codeql-action/init@v3 diff --git a/.gitignore b/.gitignore index 964ac9035e6a..4c2bf1934a9c 100644 --- a/.gitignore +++ b/.gitignore @@ -55,7 +55,6 @@ locust_report/ nosetests.xml test_failures/ - # Translations *.mo *.pot @@ -182,3 +181,6 @@ tests/public-api/osparc_python_wheels/* # osparc-config repo files repo.config + +# repomix +.repomix/* diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a3178f8c69a3..6a3d9e0dee7a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -22,7 +22,7 @@ repos: - id: no-commit-to-branch # NOTE: Keep order as pyupgrade (will update code) then pycln (remove unused imports), then isort (sort them) and black (final formatting) - repo: https://github.com/asottile/pyupgrade - rev: v3.19.1 + rev: v3.20.0 hooks: - id: pyupgrade args: @@ -36,7 +36,7 @@ repos: args: [--all, --expand-stars] name: prune imports - repo: https://github.com/PyCQA/isort - rev: 6.0.0 + rev: 6.0.1 hooks: - id: isort args: ["--profile", "black"] diff --git a/.vscode/launch.template.json b/.vscode/launch.template.json index dc3fd1cd481e..ea3d423bcb84 100644 --- a/.vscode/launch.template.json +++ b/.vscode/launch.template.json @@ -44,6 +44,28 @@ "justMyCode": false }, { + // This test adds --external-envfile and expects a file named ".secrets" in the workspace root. + "name": "Python: Test w/ repo.config", + "type": "debugpy", + "request": "launch", + "module": "pytest", + "args": [ + "--ff", + "--log-cli-level=INFO", + "--external-envfile=${workspaceFolder}/.secrets", + "--pdb", + "--setup-show", + "--durations=5", + "-s", + "-vv", + "${file}" + ], + "cwd": "${workspaceFolder}", + "console": "integratedTerminal", + "justMyCode": false + }, + { + // This tests enables the httpx spy and dumps captures in a json. Mainly for api-server "name": "Python: Test-Httpx-Spy", "type": "debugpy", "request": "launch", diff --git a/.vscode/settings.template.json b/.vscode/settings.template.json index 4ebda8488453..feec2786703f 100644 --- a/.vscode/settings.template.json +++ b/.vscode/settings.template.json @@ -1,7 +1,6 @@ // This is a template. Clone and replace extension ".template.json" by ".json" { "autoDocstring.docstringFormat": "pep257", - "editor.tabSize": 2, "editor.insertSpaces": true, "editor.detectIndentation": false, @@ -34,6 +33,9 @@ "python.analysis.typeCheckingMode": "basic", "python.analysis.extraPaths": [ "./packages/aws-library/src", + "./packages/celery-library/src", + "./packages/common-library/src", + "./packages/dask-task-models-library/src", "./packages/models-library/src", "./packages/postgres-database/src", "./packages/postgres-database/tests", diff --git a/Makefile b/Makefile index 61cae56cc0dd..61d42b4ff96e 100644 --- a/Makefile +++ b/Makefile @@ -89,7 +89,31 @@ export DOCKER_REGISTRY ?= itisfoundation MAKEFILES_WITH_OPENAPI_SPECS := $(shell find . -mindepth 2 -type f -name 'Makefile' -not -path '*/.*' -exec grep -l '^openapi-specs:' {} \; | xargs realpath) +# WSL 2 tricks +define _check_wsl_mirroring +$(shell \ + if [ "$(IS_WSL2)" = "WSL2" ]; then \ + win_user=$$(powershell.exe '$$env:UserName' | tr -d '\r' | tail -n 1 | xargs); \ + config_path="/mnt/c/Users/$$win_user/.wslconfig"; \ + if [ -f "$$config_path" ] && grep -q "networkingMode.*=.*mirrored" "$$config_path" 2>/dev/null; then \ + echo "true"; \ + else \ + echo "false"; \ + fi; \ + else \ + echo "false"; \ + fi \ +) +endef + +WSL_MIRRORED := $(_check_wsl_mirroring) + + +ifeq ($(WSL_MIRRORED),true) +get_my_ip := 127.0.0.1 +else get_my_ip := $(shell (hostname --all-ip-addresses || hostname -i) 2>/dev/null | cut --delimiter=" " --fields=1) +endif # NOTE: this is only for WSL2 as the WSL2 subsystem IP is changing on each reboot ifeq ($(IS_WSL2),WSL2) @@ -378,8 +402,8 @@ up-devel-frontend: .stack-simcore-development-frontend.yml .init-swarm ## Every @$(MAKE_C) services/dask-sidecar certificates # Deploy stack $(SWARM_STACK_NAME) [back-end] @docker stack deploy --detach=true --with-registry-auth -c $< $(SWARM_STACK_NAME) - @$(MAKE) .deploy-vendors @$(MAKE) .deploy-ops + @$(MAKE) .deploy-vendors @$(_show_endpoints) @$(MAKE_C) services/static-webserver/client follow-dev-logs @@ -389,8 +413,8 @@ ifeq ($(target),) @$(MAKE_C) services/dask-sidecar certificates # Deploy stack $(SWARM_STACK_NAME) @docker stack deploy --detach=true --with-registry-auth -c $< $(SWARM_STACK_NAME) - @$(MAKE) .deploy-vendors @$(MAKE) .deploy-ops + @$(MAKE) .deploy-vendors else # deploys ONLY $(target) service @docker compose --file $< up --detach $(target) @@ -502,11 +526,6 @@ push-version: tag-version .venv: .check-uv-installed @uv venv $@ @echo "# upgrading tools to latest version in" && $@/bin/python --version - @uv pip --quiet install --upgrade \ - pip~=24.0 \ - wheel \ - setuptools \ - uv @uv pip list devenv: .venv test_python_version .vscode/settings.json .vscode/launch.json ## create a development environment (configs, virtual-env, hooks, ...) @@ -676,6 +695,7 @@ local-registry: .env ## creates a local docker registry and configure simcore to sudo mv /tmp/daemon.json /etc/docker/daemon.json &&\ echo restarting engine... &&\ sudo service docker restart &&\ + sleep 5 &&\ echo done) @$(if $(shell docker ps --format="{{.Names}}" | grep registry),,\ @@ -687,7 +707,7 @@ local-registry: .env ## creates a local docker registry and configure simcore to --publish 5000:5000 \ --volume $(LOCAL_REGISTRY_VOLUME):/var/lib/registry \ --name $(LOCAL_REGISTRY_HOSTNAME) \ - registry:2) + registry:3) # WARNING: environment file .env is now setup to use local registry on port 5000 without any security (take care!)... @echo REGISTRY_AUTH=False >> .env diff --git a/api/specs/web-server/_auth.py b/api/specs/web-server/_auth.py index 7860ef98f03e..cc427a189cdc 100644 --- a/api/specs/web-server/_auth.py +++ b/api/specs/web-server/_auth.py @@ -4,7 +4,7 @@ # pylint: disable=too-many-arguments -from typing import Any +from typing import Annotated, Any from fastapi import APIRouter, status from models_library.api_schemas_webserver.auth import ( @@ -15,7 +15,7 @@ from models_library.rest_error import EnvelopedError, Log from pydantic import BaseModel, Field, confloat from simcore_service_webserver._meta import API_VTAG -from simcore_service_webserver.login._controller.rest.auth import ( +from simcore_service_webserver.login._controller.rest.auth_schemas import ( LoginBody, LoginNextPage, LoginTwoFactorAuthBody, @@ -30,7 +30,7 @@ PhoneConfirmationBody, ResetPasswordConfirmation, ) -from simcore_service_webserver.login._controller.rest.registration import ( +from simcore_service_webserver.login._controller.rest.registration_schemas import ( InvitationCheck, InvitationInfo, RegisterBody, @@ -153,17 +153,15 @@ async def logout(_body: LogoutBody): @router.get( "/auth:check", - operation_id="check_authentication", status_code=status.HTTP_204_NO_CONTENT, responses={ status.HTTP_401_UNAUTHORIZED: { "model": EnvelopedError, - "description": "unauthorized reset due to invalid token code", } }, ) async def check_auth(): - """checks if user is authenticated in the platform""" + """checks whether user request is authenticated""" @router.post( @@ -211,13 +209,16 @@ async def change_email(_body: ChangeEmailBody): class PasswordCheckSchema(BaseModel): - strength: confloat(ge=0.0, le=1.0) = Field( # type: ignore - ..., - description="The strength of the password ranges from 0 (extremely weak) and 1 (extremely strong)", - ) - rating: str | None = Field( - None, description="Human readable rating from infinitely weak to very strong" - ) + strength: Annotated[ + confloat(ge=0.0, le=1.0), + Field( + description="The strength of the password ranges from 0 (extremely weak) and 1 (extremely strong)", + ), + ] + rating: Annotated[ + str | None, + Field(description="Human readable rating from infinitely weak to very strong"), + ] = None improvements: Any | None = None diff --git a/api/specs/web-server/_auth_api_keys.py b/api/specs/web-server/_auth_api_keys.py index bcebb0423767..8b160f9df212 100644 --- a/api/specs/web-server/_auth_api_keys.py +++ b/api/specs/web-server/_auth_api_keys.py @@ -1,10 +1,12 @@ from typing import Annotated +from _common import as_query from fastapi import APIRouter, Depends, status from models_library.api_schemas_webserver.auth import ( ApiKeyCreateRequest, ApiKeyCreateResponse, ApiKeyGet, + ApiKeyListQueryParams, ) from models_library.generics import Envelope from models_library.rest_error import EnvelopedError @@ -39,7 +41,9 @@ async def create_api_key(_body: ApiKeyCreateRequest): response_model=Envelope[list[ApiKeyGet]], status_code=status.HTTP_200_OK, ) -async def list_api_keys(): +async def list_api_keys( + _query: Annotated[as_query(ApiKeyListQueryParams), Depends()], +): """lists API keys by this user""" diff --git a/api/specs/web-server/_computations.py b/api/specs/web-server/_computations.py index fb60fce41758..6264b578ee85 100644 --- a/api/specs/web-server/_computations.py +++ b/api/specs/web-server/_computations.py @@ -4,6 +4,11 @@ from fastapi import APIRouter, Depends, status from fastapi_pagination import Page from models_library.api_schemas_webserver.computations import ( + ComputationCollectionRunListQueryParams, + ComputationCollectionRunPathParams, + ComputationCollectionRunRestGet, + ComputationCollectionRunTaskListQueryParams, + ComputationCollectionRunTaskRestGet, ComputationGet, ComputationPathParams, ComputationRunIterationsLatestListQueryParams, @@ -95,3 +100,22 @@ async def list_computations_latest_iteration_tasks( _query: Annotated[as_query(ComputationTaskListQueryParams), Depends()], _path: Annotated[ComputationTaskPathParams, Depends()], ): ... + + +@router.get( + "/computation-collection-runs", + response_model=Page[ComputationCollectionRunRestGet], +) +async def list_computation_collection_runs( + _query: Annotated[as_query(ComputationCollectionRunListQueryParams), Depends()], +): ... + + +@router.get( + "/computation-collection-runs/{collection_run_id}/tasks", + response_model=Page[ComputationCollectionRunTaskRestGet], +) +async def list_computation_collection_run_tasks( + _query: Annotated[as_query(ComputationCollectionRunTaskListQueryParams), Depends()], + _path: Annotated[ComputationCollectionRunPathParams, Depends()], +): ... diff --git a/api/specs/web-server/_conversations.py b/api/specs/web-server/_conversations.py new file mode 100644 index 000000000000..96c04b9e3227 --- /dev/null +++ b/api/specs/web-server/_conversations.py @@ -0,0 +1,149 @@ +"""Helper script to automatically generate OAS + +This OAS are the source of truth +""" + +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + + +from typing import Annotated + +from _common import as_query +from fastapi import APIRouter, Depends, status +from models_library.api_schemas_webserver.conversations import ( + ConversationMessagePatch, + ConversationMessageRestGet, + ConversationPatch, + ConversationRestGet, +) +from models_library.generics import Envelope +from models_library.rest_pagination import Page +from simcore_service_webserver._meta import API_VTAG +from simcore_service_webserver.conversations._controller._common import ( + ConversationPathParams, +) +from simcore_service_webserver.conversations._controller._conversations_messages_rest import ( + _ConversationMessageCreateBodyParams, + _ConversationMessagePathParams, + _ListConversationMessageQueryParams, +) +from simcore_service_webserver.conversations._controller._conversations_rest import ( + _ConversationsCreateBodyParams, + _ListConversationsQueryParams, +) + +router = APIRouter( + prefix=f"/{API_VTAG}", + tags=[ + "conversations", + ], +) + + +# +# API entrypoints CONVERSATIONS +# + + +@router.post( + "/conversations", + response_model=Envelope[ConversationRestGet], + status_code=status.HTTP_201_CREATED, +) +async def create_conversation( + _body: _ConversationsCreateBodyParams, +): ... + + +@router.get( + "/conversations", + response_model=Page[ConversationRestGet], +) +async def list_conversations( + _query: Annotated[_ListConversationsQueryParams, Depends()], +): ... + + +@router.patch( + "/conversations/{conversation_id}", + response_model=Envelope[ConversationRestGet], +) +async def update_conversation( + _params: Annotated[ConversationPathParams, Depends()], + _body: ConversationPatch, +): ... + + +@router.delete( + "/conversations/{conversation_id}", + status_code=status.HTTP_204_NO_CONTENT, +) +async def delete_conversation( + _params: Annotated[ConversationPathParams, Depends()], +): ... + + +@router.get( + "/conversations/{conversation_id}", + response_model=Envelope[ConversationRestGet], +) +async def get_conversation( + _params: Annotated[ConversationPathParams, Depends()], +): ... + + +# +# API entrypoints CONVERSATION MESSAGES +# + + +@router.post( + "/conversations/{conversation_id}/messages", + response_model=Envelope[ConversationMessageRestGet], + status_code=status.HTTP_201_CREATED, +) +async def create_conversation_message( + _params: Annotated[ConversationPathParams, Depends()], + _body: _ConversationMessageCreateBodyParams, +): ... + + +@router.get( + "/conversations/{conversation_id}/messages", + response_model=Page[ConversationMessageRestGet], +) +async def list_conversation_messages( + _params: Annotated[ConversationPathParams, Depends()], + _query: Annotated[as_query(_ListConversationMessageQueryParams), Depends()], +): ... + + +@router.put( + "/conversations/{conversation_id}/messages/{message_id}", + response_model=Envelope[ConversationMessageRestGet], +) +async def update_conversation_message( + _params: Annotated[_ConversationMessagePathParams, Depends()], + _body: ConversationMessagePatch, +): ... + + +@router.delete( + "/conversations/{conversation_id}/messages/{message_id}", + status_code=status.HTTP_204_NO_CONTENT, +) +async def delete_conversation_message( + _params: Annotated[_ConversationMessagePathParams, Depends()], +): ... + + +@router.get( + "/conversations/{conversation_id}/messages/{message_id}", + response_model=Envelope[ConversationMessageRestGet], +) +async def get_conversation_message( + _params: Annotated[_ConversationMessagePathParams, Depends()], +): ... diff --git a/api/specs/web-server/_functions.py b/api/specs/web-server/_functions.py index 72f4c8af48ed..a58450703ffc 100644 --- a/api/specs/web-server/_functions.py +++ b/api/specs/web-server/_functions.py @@ -7,15 +7,26 @@ from typing import Annotated +from _common import as_query from fastapi import APIRouter, Depends, status from models_library.api_schemas_webserver.functions import ( + FunctionGroupAccessRightsGet, + FunctionGroupAccessRightsUpdate, FunctionToRegister, RegisteredFunctionGet, + RegisteredFunctionUpdate, ) from models_library.generics import Envelope +from models_library.groups import GroupID from simcore_service_webserver._meta import API_VTAG +from simcore_service_webserver.functions._controller._functions_rest import ( + FunctionGroupPathParams, +) from simcore_service_webserver.functions._controller._functions_rest_schemas import ( + FunctionDeleteQueryParams, + FunctionGetQueryParams, FunctionPathParams, + FunctionsListQueryParams, ) router = APIRouter( @@ -35,12 +46,32 @@ async def register_function( ) -> Envelope[RegisteredFunctionGet]: ... +@router.get( + "/functions", + response_model=Envelope[list[RegisteredFunctionGet]], +) +async def list_functions( + _query: Annotated[as_query(FunctionsListQueryParams), Depends()], +): ... + + @router.get( "/functions/{function_id}", response_model=Envelope[RegisteredFunctionGet], ) async def get_function( _path: Annotated[FunctionPathParams, Depends()], + _query: Annotated[as_query(FunctionGetQueryParams), Depends()], +): ... + + +@router.patch( + "/functions/{function_id}", + response_model=Envelope[RegisteredFunctionGet], +) +async def update_function( + _body: RegisteredFunctionUpdate, + _path: Annotated[FunctionPathParams, Depends()], ): ... @@ -50,4 +81,35 @@ async def get_function( ) async def delete_function( _path: Annotated[FunctionPathParams, Depends()], + _query: Annotated[as_query(FunctionDeleteQueryParams), Depends()], +): ... + + +@router.get( + "/functions/{function_id}/groups", + response_model=Envelope[dict[GroupID, FunctionGroupAccessRightsGet]], +) +async def get_function_groups( + _path: Annotated[FunctionPathParams, Depends()], +): ... + + +@router.put( + "/functions/{function_id}/groups/{group_id}", + summary="Create or update a Function Group", + response_model=Envelope[FunctionGroupAccessRightsGet], +) +async def create_or_update_function_group( + _path: Annotated[FunctionGroupPathParams, Depends()], + _body: FunctionGroupAccessRightsUpdate, +): ... + + +@router.delete( + "/functions/{function_id}/groups/{group_id}", + summary="Delete a Function Group", + status_code=status.HTTP_204_NO_CONTENT, +) +async def delete_function_group( + _path: Annotated[FunctionGroupPathParams, Depends()], ): ... diff --git a/api/specs/web-server/_long_running_tasks.py b/api/specs/web-server/_long_running_tasks.py index f204c1de5b42..77b979b2e3e8 100644 --- a/api/specs/web-server/_long_running_tasks.py +++ b/api/specs/web-server/_long_running_tasks.py @@ -10,7 +10,7 @@ from models_library.generics import Envelope from models_library.rest_error import EnvelopedError from servicelib.aiohttp.long_running_tasks._routes import _PathParam -from servicelib.long_running_tasks._models import TaskGet, TaskStatus +from servicelib.long_running_tasks.models import TaskGet, TaskStatus from simcore_service_webserver._meta import API_VTAG from simcore_service_webserver.tasks._exception_handlers import ( _TO_HTTP_ERROR_MAP as export_data_http_error_map, @@ -32,43 +32,40 @@ @router.get( "/tasks", response_model=Envelope[list[TaskGet]], - name="list_tasks", - description="Lists all long running tasks", responses=_export_data_responses, ) -def get_async_jobs(): ... +def get_async_jobs(): + """Lists all long running tasks""" @router.get( "/tasks/{task_id}", response_model=Envelope[TaskStatus], - name="get_task_status", - description="Retrieves the status of a task", responses=_export_data_responses, ) def get_async_job_status( _path_params: Annotated[_PathParam, Depends()], -): ... +): + """Retrieves the status of a task""" @router.delete( "/tasks/{task_id}", - name="cancel_and_delete_task", - description="Cancels and deletes a task", responses=_export_data_responses, status_code=status.HTTP_204_NO_CONTENT, ) def cancel_async_job( _path_params: Annotated[_PathParam, Depends()], -): ... +): + """Cancels and removes a task""" @router.get( "/tasks/{task_id}/result", - name="get_task_result", - description="Retrieves the result of a task", + response_model=Any, responses=_export_data_responses, ) def get_async_job_result( _path_params: Annotated[_PathParam, Depends()], -): ... +): + """Retrieves the result of a task""" diff --git a/api/specs/web-server/_long_running_tasks_legacy.py b/api/specs/web-server/_long_running_tasks_legacy.py index fcbe3508c4dc..59bc8881b0c3 100644 --- a/api/specs/web-server/_long_running_tasks_legacy.py +++ b/api/specs/web-server/_long_running_tasks_legacy.py @@ -4,12 +4,12 @@ # pylint: disable=too-many-arguments -from typing import Annotated +from typing import Annotated, Any from fastapi import APIRouter, Depends, status from models_library.generics import Envelope from servicelib.aiohttp.long_running_tasks._routes import _PathParam -from servicelib.long_running_tasks._models import TaskGet, TaskStatus +from servicelib.long_running_tasks.models import TaskGet, TaskStatus from simcore_service_webserver._meta import API_VTAG router = APIRouter( @@ -26,7 +26,7 @@ name="list_tasks", description="Lists all long running tasks", ) -def list_tasks(): ... +async def list_tasks(): ... @router.get( @@ -35,18 +35,18 @@ def list_tasks(): ... name="get_task_status", description="Retrieves the status of a task", ) -def get_task_status( +async def get_task_status( _path_params: Annotated[_PathParam, Depends()], ): ... @router.delete( "/{task_id}", - name="cancel_and_delete_task", - description="Cancels and deletes a task", + name="remove_task", + description="Cancels and removes a task", status_code=status.HTTP_204_NO_CONTENT, ) -def cancel_and_delete_task( +async def remove_task( _path_params: Annotated[_PathParam, Depends()], ): ... @@ -54,8 +54,9 @@ def cancel_and_delete_task( @router.get( "/{task_id}/result", name="get_task_result", + response_model=Any, description="Retrieves the result of a task", ) -def get_task_result( +async def get_task_result( _path_params: Annotated[_PathParam, Depends()], ): ... diff --git a/api/specs/web-server/_nih_sparc.py b/api/specs/web-server/_nih_sparc.py index 7d457be55786..8c3fdb4a91b5 100644 --- a/api/specs/web-server/_nih_sparc.py +++ b/api/specs/web-server/_nih_sparc.py @@ -8,7 +8,7 @@ from fastapi import APIRouter from models_library.generics import Envelope from simcore_service_webserver._meta import API_VTAG -from simcore_service_webserver.studies_dispatcher._rest_handlers import ( +from simcore_service_webserver.studies_dispatcher._controller.rest.nih_schemas import ( ServiceGet, Viewer, ) diff --git a/api/specs/web-server/_nih_sparc_redirections.py b/api/specs/web-server/_nih_sparc_redirections.py index df1693e28773..f13a7b99ce5d 100644 --- a/api/specs/web-server/_nih_sparc_redirections.py +++ b/api/specs/web-server/_nih_sparc_redirections.py @@ -1,5 +1,4 @@ -""" Helper script to generate OAS automatically NIH-sparc portal API section -""" +"""Helper script to generate OAS automatically NIH-sparc portal API section""" # pylint: disable=protected-access # pylint: disable=redefined-outer-name @@ -11,7 +10,7 @@ from fastapi import APIRouter, status from fastapi.responses import RedirectResponse from models_library.projects import ProjectID -from models_library.services import ServiceKey, ServiceKeyVersion +from models_library.services_types import ServiceKey, ServiceVersion from pydantic import HttpUrl, PositiveInt router = APIRouter( @@ -31,7 +30,7 @@ async def get_redirection_to_viewer( file_type: str, viewer_key: ServiceKey, - viewer_version: ServiceKeyVersion, + viewer_version: ServiceVersion, file_size: PositiveInt, download_link: HttpUrl, file_name: str | None = "unknown", diff --git a/api/specs/web-server/_projects_conversations.py b/api/specs/web-server/_projects_conversations.py index 044800772fd9..483298d298f9 100644 --- a/api/specs/web-server/_projects_conversations.py +++ b/api/specs/web-server/_projects_conversations.py @@ -12,7 +12,7 @@ from typing import Annotated from fastapi import APIRouter, Depends, status -from models_library.api_schemas_webserver.projects_conversations import ( +from models_library.api_schemas_webserver.conversations import ( ConversationMessageRestGet, ConversationRestGet, ) diff --git a/api/specs/web-server/_projects_states.py b/api/specs/web-server/_projects_states.py index 007b50c7d3f2..f6c0443f0b3b 100644 --- a/api/specs/web-server/_projects_states.py +++ b/api/specs/web-server/_projects_states.py @@ -7,9 +7,11 @@ from typing import Annotated from fastapi import APIRouter, Body, Depends -from models_library.api_schemas_webserver.projects import ProjectGet +from models_library.api_schemas_webserver.projects import ( + ProjectGet, + ProjectStateOutputSchema, +) from models_library.generics import Envelope -from models_library.projects_state import ProjectState from pydantic import ValidationError from servicelib.aiohttp import status from simcore_service_webserver._meta import API_VTAG @@ -80,7 +82,9 @@ def close_project( ): ... -@router.get("/projects/{project_id}/state", response_model=Envelope[ProjectState]) +@router.get( + "/projects/{project_id}/state", response_model=Envelope[ProjectStateOutputSchema] +) def get_project_state( _path_params: Annotated[ProjectPathParams, Depends()], ): ... diff --git a/api/specs/web-server/_users.py b/api/specs/web-server/_users.py index bd957858c493..09d56fd19829 100644 --- a/api/specs/web-server/_users.py +++ b/api/specs/web-server/_users.py @@ -4,45 +4,43 @@ # pylint: disable=too-many-arguments -from enum import Enum from typing import Annotated -from _common import as_query from fastapi import APIRouter, Depends, status from models_library.api_schemas_webserver.users import ( + MyFunctionPermissionsGet, MyPermissionGet, - MyProfileGet, - MyProfilePatch, + MyProfileRestGet, + MyProfileRestPatch, MyTokenCreate, MyTokenGet, - UserAccountApprove, - UserAccountGet, - UserAccountReject, - UserAccountSearchQueryParams, + TokenPathParams, UserGet, - UsersAccountListQueryParams, UsersSearch, ) from models_library.api_schemas_webserver.users_preferences import PatchRequestBody from models_library.generics import Envelope -from models_library.rest_pagination import Page from models_library.user_preferences import PreferenceIdentifier from simcore_service_webserver._meta import API_VTAG -from simcore_service_webserver.users._common.schemas import PreRegisteredUserGet -from simcore_service_webserver.users._notifications import ( +from simcore_service_webserver.user_notifications._controller.rest.user_notification_rest import ( + NotificationPathParams, +) +from simcore_service_webserver.user_notifications._models import ( UserNotification, UserNotificationCreate, UserNotificationPatch, ) -from simcore_service_webserver.users._notifications_rest import _NotificationPathParams -from simcore_service_webserver.users._tokens_rest import _TokenPathParams +from simcore_service_webserver.users._controller.rest._rest_schemas import ( + MyPhoneConfirm, + MyPhoneRegister, +) router = APIRouter(prefix=f"/{API_VTAG}", tags=["users"]) @router.get( "/me", - response_model=Envelope[MyProfileGet], + response_model=Envelope[MyProfileRestGet], ) async def get_my_profile(): ... @@ -51,7 +49,58 @@ async def get_my_profile(): ... "/me", status_code=status.HTTP_204_NO_CONTENT, ) -async def update_my_profile(_body: MyProfilePatch): ... +async def update_my_profile(_body: MyProfileRestPatch): ... + + +@router.post( + "/me/phone:register", + description="Starts the phone registration process", + status_code=status.HTTP_202_ACCEPTED, + responses={ + status.HTTP_202_ACCEPTED: {"description": "Phone registration initiated"}, + status.HTTP_401_UNAUTHORIZED: {"description": "Authentication required"}, + status.HTTP_403_FORBIDDEN: {"description": "Insufficient permissions"}, + status.HTTP_422_UNPROCESSABLE_ENTITY: { + "description": "Invalid phone number format" + }, + }, +) +async def my_phone_register(_body: MyPhoneRegister): ... + + +@router.post( + "/me/phone:resend", + description="Resends the phone registration code", + status_code=status.HTTP_202_ACCEPTED, + responses={ + status.HTTP_202_ACCEPTED: {"description": "Phone code resent"}, + status.HTTP_400_BAD_REQUEST: { + "description": "No pending phone registration found" + }, + status.HTTP_401_UNAUTHORIZED: {"description": "Authentication required"}, + status.HTTP_403_FORBIDDEN: {"description": "Insufficient permissions"}, + }, +) +async def my_phone_resend(): ... + + +@router.post( + "/me/phone:confirm", + description="Confirms the phone registration", + status_code=status.HTTP_204_NO_CONTENT, + responses={ + status.HTTP_204_NO_CONTENT: {"description": "Phone registration confirmed"}, + status.HTTP_400_BAD_REQUEST: { + "description": "No pending registration or invalid code" + }, + status.HTTP_401_UNAUTHORIZED: {"description": "Authentication required"}, + status.HTTP_403_FORBIDDEN: {"description": "Insufficient permissions"}, + status.HTTP_422_UNPROCESSABLE_ENTITY: { + "description": "Invalid confirmation code format" + }, + }, +) +async def my_phone_confirm(_body: MyPhoneConfirm): ... @router.patch( @@ -84,7 +133,7 @@ async def create_token(_body: MyTokenCreate): ... response_model=Envelope[MyTokenGet], ) async def get_token( - _path: Annotated[_TokenPathParams, Depends()], + _path: Annotated[TokenPathParams, Depends()], ): ... @@ -92,7 +141,7 @@ async def get_token( "/me/tokens/{service}", status_code=status.HTTP_204_NO_CONTENT, ) -async def delete_token(_path: Annotated[_TokenPathParams, Depends()]): ... +async def delete_token(_path: Annotated[TokenPathParams, Depends()]): ... @router.get( @@ -116,7 +165,7 @@ async def create_user_notification( status_code=status.HTTP_204_NO_CONTENT, ) async def mark_notification_as_read( - _path: Annotated[_NotificationPathParams, Depends()], + _path: Annotated[NotificationPathParams, Depends()], _body: UserNotificationPatch, ): ... @@ -128,6 +177,13 @@ async def mark_notification_as_read( async def list_user_permissions(): ... +@router.get( + "/me/function-permissions", + response_model=Envelope[MyFunctionPermissionsGet], +) +async def list_user_functions_permissions(): ... + + # # USERS public # @@ -139,56 +195,3 @@ async def list_user_permissions(): ... description="Search among users who are publicly visible to the caller (i.e., me) based on their privacy settings.", ) async def search_users(_body: UsersSearch): ... - - -# -# USERS admin -# - -_extra_tags: list[str | Enum] = ["admin"] - - -@router.get( - "/admin/user-accounts", - response_model=Page[UserAccountGet], - tags=_extra_tags, -) -async def list_users_accounts( - _query: Annotated[as_query(UsersAccountListQueryParams), Depends()], -): ... - - -@router.post( - "/admin/user-accounts:approve", - status_code=status.HTTP_204_NO_CONTENT, - tags=_extra_tags, -) -async def approve_user_account(_body: UserAccountApprove): ... - - -@router.post( - "/admin/user-accounts:reject", - status_code=status.HTTP_204_NO_CONTENT, - tags=_extra_tags, -) -async def reject_user_account(_body: UserAccountReject): ... - - -@router.get( - "/admin/user-accounts:search", - response_model=Envelope[list[UserAccountGet]], - tags=_extra_tags, -) -async def search_user_accounts( - _query: Annotated[UserAccountSearchQueryParams, Depends()], -): - # NOTE: see `Search` in `Common Custom Methods` in https://cloud.google.com/apis/design/custom_methods - ... - - -@router.post( - "/admin/user-accounts:pre-register", - response_model=Envelope[UserAccountGet], - tags=_extra_tags, -) -async def pre_register_user_account(_body: PreRegisteredUserGet): ... diff --git a/api/specs/web-server/_users_admin.py b/api/specs/web-server/_users_admin.py new file mode 100644 index 000000000000..2e4ff647c3cc --- /dev/null +++ b/api/specs/web-server/_users_admin.py @@ -0,0 +1,72 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + + +from enum import Enum +from typing import Annotated + +from _common import as_query +from fastapi import APIRouter, Depends, status +from models_library.api_schemas_webserver.users import ( + UserAccountApprove, + UserAccountGet, + UserAccountReject, + UserAccountSearchQueryParams, + UsersAccountListQueryParams, +) +from models_library.generics import Envelope +from models_library.rest_pagination import Page +from simcore_service_webserver._meta import API_VTAG +from simcore_service_webserver.users.schemas import UserAccountRestPreRegister + +router = APIRouter(prefix=f"/{API_VTAG}", tags=["users"]) + +_extra_tags: list[str | Enum] = ["admin"] + + +@router.get( + "/admin/user-accounts", + response_model=Page[UserAccountGet], + tags=_extra_tags, +) +async def list_users_accounts( + _query: Annotated[as_query(UsersAccountListQueryParams), Depends()], +): ... + + +@router.post( + "/admin/user-accounts:approve", + status_code=status.HTTP_204_NO_CONTENT, + tags=_extra_tags, +) +async def approve_user_account(_body: UserAccountApprove): ... + + +@router.post( + "/admin/user-accounts:reject", + status_code=status.HTTP_204_NO_CONTENT, + tags=_extra_tags, +) +async def reject_user_account(_body: UserAccountReject): ... + + +@router.get( + "/admin/user-accounts:search", + response_model=Envelope[list[UserAccountGet]], + tags=_extra_tags, +) +async def search_user_accounts( + _query: Annotated[UserAccountSearchQueryParams, Depends()], +): + # NOTE: see `Search` in `Common Custom Methods` in https://cloud.google.com/apis/design/custom_methods + ... + + +@router.post( + "/admin/user-accounts:pre-register", + response_model=Envelope[UserAccountGet], + tags=_extra_tags, +) +async def pre_register_user_account(_body: UserAccountRestPreRegister): ... diff --git a/api/specs/web-server/openapi.py b/api/specs/web-server/openapi.py index 700bdc7d63c0..2ecc07d19999 100644 --- a/api/specs/web-server/openapi.py +++ b/api/specs/web-server/openapi.py @@ -21,11 +21,13 @@ # core --- "_auth", "_auth_api_keys", + "_conversations", "_groups", "_tags", "_tags_groups", # after _tags "_products", "_users", + "_users_admin", # after _users "_wallets", # add-ons --- "_activity", diff --git a/api/tests/requirements.txt b/api/tests/requirements.txt index 15baef4e7825..cb1405f4c0e8 100644 --- a/api/tests/requirements.txt +++ b/api/tests/requirements.txt @@ -1,27 +1,27 @@ aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.11.18 +aiohttp==3.12.14 # via # -c ../../requirements/constraints.txt # -r requirements.in -aiosignal==1.3.2 +aiosignal==1.4.0 # via aiohttp -attrs==25.1.0 +attrs==25.3.0 # via # aiohttp # jsonschema # referencing -certifi==2025.1.31 +certifi==2025.7.9 # via # -c ../../requirements/constraints.txt # requests -charset-normalizer==3.4.1 +charset-normalizer==3.4.2 # via requests -coverage==7.6.12 +coverage==7.9.2 # via # -r requirements.in # pytest-cov -frozenlist==1.5.0 +frozenlist==1.7.0 # via # aiohttp # aiosignal @@ -29,11 +29,11 @@ idna==3.10 # via # requests # yarl -iniconfig==2.0.0 +iniconfig==2.1.0 # via pytest isodate==0.7.2 # via openapi-core -jsonschema==4.23.0 +jsonschema==4.24.0 # via # openapi-core # openapi-schema-validator @@ -42,29 +42,29 @@ jsonschema-path==0.3.4 # via # openapi-core # openapi-spec-validator -jsonschema-specifications==2024.10.1 +jsonschema-specifications==2025.4.1 # via # jsonschema # openapi-schema-validator -lazy-object-proxy==1.10.0 +lazy-object-proxy==1.11.0 # via openapi-spec-validator markupsafe==3.0.2 # via werkzeug -more-itertools==10.6.0 +more-itertools==10.7.0 # via openapi-core -multidict==6.1.0 +multidict==6.6.3 # via # aiohttp # yarl -openapi-core==0.19.4 +openapi-core==0.19.5 # via -r requirements.in openapi-schema-validator==0.6.3 # via # openapi-core # openapi-spec-validator -openapi-spec-validator==0.7.1 +openapi-spec-validator==0.7.2 # via openapi-core -packaging==24.2 +packaging==25.0 # via # pytest # pytest-sugar @@ -72,22 +72,26 @@ parse==1.20.2 # via openapi-core pathable==0.4.4 # via jsonschema-path -pluggy==1.5.0 - # via pytest -propcache==0.3.0 +pluggy==1.6.0 + # via + # pytest + # pytest-cov +propcache==0.3.2 # via # aiohttp # yarl -pytest==8.3.5 +pygments==2.19.2 + # via pytest +pytest==8.4.1 # via # -r requirements.in # pytest-asyncio # pytest-cov # pytest-instafail # pytest-sugar -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via -r requirements.in -pytest-cov==6.0.0 +pytest-cov==6.2.1 # via -r requirements.in pytest-instafail==0.5.0 # via -r requirements.in @@ -103,23 +107,27 @@ referencing==0.35.1 # jsonschema # jsonschema-path # jsonschema-specifications -requests==2.32.3 +requests==2.32.4 # via jsonschema-path rfc3339-validator==0.1.4 # via openapi-schema-validator -rpds-py==0.23.1 +rpds-py==0.26.0 # via # jsonschema # referencing six==1.17.0 # via rfc3339-validator -termcolor==2.5.0 +termcolor==3.1.0 # via pytest-sugar -urllib3==2.3.0 +typing-extensions==4.14.1 + # via + # aiosignal + # openapi-core +urllib3==2.5.0 # via # -c ../../requirements/constraints.txt # requests -werkzeug==3.1.3 +werkzeug==3.1.1 # via openapi-core -yarl==1.18.3 +yarl==1.20.1 # via aiohttp diff --git a/ci/github/system-testing/e2e-playwright.bash b/ci/github/system-testing/e2e-playwright.bash index b0a6e498f66c..f01e78530eb1 100755 --- a/ci/github/system-testing/e2e-playwright.bash +++ b/ci/github/system-testing/e2e-playwright.bash @@ -37,7 +37,7 @@ dump_docker_logs() { for service_id in $(docker service ls -q); do service_name=$(docker service inspect "$service_id" --format="{{.Spec.Name}}") echo "Dumping logs for $service_name" - (timeout 30 docker service logs --timestamps --tail=400 --details "$service_id" >"$out_dir/$service_name.log" 2>&1) || true + (timeout 30 docker service logs --timestamps --tail=500 --details "$service_id" >"$out_dir/$service_name.log" 2>&1) || true done } diff --git a/ci/github/unit-testing/celery-library.bash b/ci/github/unit-testing/celery-library.bash new file mode 100755 index 000000000000..a05004c677f1 --- /dev/null +++ b/ci/github/unit-testing/celery-library.bash @@ -0,0 +1,43 @@ +#!/bin/bash +# http://redsymbol.net/articles/unofficial-bash-strict-mode/ +set -o errexit # abort on nonzero exitstatus +set -o nounset # abort on unbound variable +set -o pipefail # don't hide errors within pipes +IFS=$'\n\t' + +install() { + make devenv + # shellcheck source=/dev/null + source .venv/bin/activate + pushd packages/celery-library + make install-ci + popd + uv pip list +} + +test() { + # shellcheck source=/dev/null + source .venv/bin/activate + pushd packages/celery-library + make tests-ci + popd +} + +typecheck() { + # shellcheck source=/dev/null + source .venv/bin/activate + uv pip install mypy + pushd packages/celery-library + make mypy + popd +} + +# Check if the function exists (bash specific) +if declare -f "$1" >/dev/null; then + # call arguments verbatim + "$@" +else + # Show a helpful error + echo "'$1' is not a known function name" >&2 + exit 1 +fi diff --git a/ci/helpers/ensure_python_pip.bash b/ci/helpers/ensure_python_pip.bash deleted file mode 100755 index e18b9839347b..000000000000 --- a/ci/helpers/ensure_python_pip.bash +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/bash -# -# Bootstrapping the pip installer -# -# SEE https://docs.python.org/3/library/ensurepip.html -# -# http://redsymbol.net/articles/unofficial-bash-strict-mode/ -set -o errexit # abort on nonzero exitstatus -set -o nounset # abort on unbound variable -set -o pipefail # don't hide errors within pipes -IFS=$'\n\t' - -# Pin pip version to a compatible release https://www.python.org/dev/peps/pep-0440/#compatible-release -PIP_VERSION=24.0 - -echo "INFO:" "$(python --version)" "@" "$(command -v python)" - -# installs pip if not in place -python -m ensurepip - -echo "INFO:" "$(pip --version)" "@" "$(command -v pip)" - -pip install --upgrade \ - pip~=$PIP_VERSION \ - wheel \ - setuptools diff --git a/ci/helpers/requirements/requirements.txt b/ci/helpers/requirements/requirements.txt index 872f14cc7e14..c90157d098d1 100644 --- a/ci/helpers/requirements/requirements.txt +++ b/ci/helpers/requirements/requirements.txt @@ -1,6 +1,6 @@ aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/requirements.in @@ -20,7 +20,7 @@ charset-normalizer==3.4.1 # via requests docker==7.1.0 # via -r requirements/requirements.in -fastapi==0.115.12 +fastapi==0.116.1 # via -r requirements/requirements.in frozenlist==1.4.1 # via @@ -39,26 +39,30 @@ propcache==0.3.1 # via # aiohttp # yarl -pydantic==2.10.5 +pydantic==2.11.7 # via # -c requirements/../../../requirements/constraints.txt # fastapi -pydantic-core==2.27.2 +pydantic-core==2.33.2 # via pydantic -requests==2.32.3 +requests==2.32.4 # via docker sniffio==1.3.1 # via anyio -starlette==0.46.2 +starlette==0.47.2 # via # -c requirements/../../../requirements/constraints.txt # fastapi -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # fastapi # pydantic # pydantic-core -urllib3==2.3.0 + # starlette + # typing-inspection +typing-inspection==0.4.1 + # via pydantic +urllib3==2.5.0 # via # -c requirements/../../../requirements/constraints.txt # docker diff --git a/docs/messages-guidelines.md b/docs/user-messages-guidelines.md similarity index 96% rename from docs/messages-guidelines.md rename to docs/user-messages-guidelines.md index cc07d2c2d1fd..dbd87e8cd607 100644 --- a/docs/messages-guidelines.md +++ b/docs/user-messages-guidelines.md @@ -1,6 +1,6 @@ -# Error and Warning Message Guidelines +# User Message Guidelines -These guidelines ensure that messages are user-friendly, clear, and helpful while maintaining a professional tone. 🚀 +These guidelines ensure that error and warnings user-facing messages are user-friendly, clear, and helpful while maintaining a professional tone. 🚀 Some details: diff --git a/mypy.ini b/mypy.ini index 9df50ed1d52f..d5b305e1b563 100644 --- a/mypy.ini +++ b/mypy.ini @@ -21,6 +21,7 @@ warn_return_any = True warn_unused_configs = True warn_unused_ignores = True + # SEE https://docs.pydantic.dev/mypy_plugin/#plugin-settings # SEE https://docs.pydantic.dev/1.10/mypy_plugin/#plugin-settings [pydantic-mypy] diff --git a/packages/aws-library/requirements/_base.txt b/packages/aws-library/requirements/_base.txt index b7fdc9536a8c..3b92a1e848b8 100644 --- a/packages/aws-library/requirements/_base.txt +++ b/packages/aws-library/requirements/_base.txt @@ -18,7 +18,7 @@ aiofiles==24.1.0 # aioboto3 aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -83,14 +83,8 @@ certifi==2025.1.31 # requests charset-normalizer==3.4.1 # via requests -click==8.1.8 +click==8.2.1 # via typer -deprecated==1.2.18 - # via - # opentelemetry-api - # opentelemetry-exporter-otlp-proto-grpc - # opentelemetry-exporter-otlp-proto-http - # opentelemetry-semantic-conventions dnspython==2.7.0 # via email-validator email-validator==2.2.0 @@ -105,7 +99,7 @@ frozenlist==1.5.0 # via # aiohttp # aiosignal -googleapis-common-protos==1.68.0 +googleapis-common-protos==1.70.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http @@ -124,6 +118,10 @@ jmespath==1.0.1 # aiobotocore # boto3 # botocore +jsonref==1.1.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema==4.23.0 # via # -r requirements/../../../packages/models-library/requirements/_base.in @@ -139,13 +137,14 @@ multidict==6.1.0 # aiobotocore # aiohttp # yarl -opentelemetry-api==1.30.0 +opentelemetry-api==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-botocore # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis @@ -153,53 +152,57 @@ opentelemetry-api==1.30.0 # opentelemetry-propagator-aws-xray # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.30.0 +opentelemetry-exporter-otlp==1.34.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.30.0 +opentelemetry-exporter-otlp-proto-common==1.34.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.30.0 +opentelemetry-exporter-otlp-proto-grpc==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.30.0 +opentelemetry-exporter-otlp-proto-http==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.51b0 +opentelemetry-instrumentation==0.55b1 # via # opentelemetry-instrumentation-aio-pika + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-botocore # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aio-pika==0.51b0 +opentelemetry-instrumentation-aio-pika==0.55b1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-asyncpg==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-botocore==0.51b0 +opentelemetry-instrumentation-botocore==0.55b1 # via -r requirements/_base.in -opentelemetry-instrumentation-logging==0.51b0 +opentelemetry-instrumentation-logging==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.51b0 +opentelemetry-instrumentation-redis==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.51b0 +opentelemetry-instrumentation-requests==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in opentelemetry-propagator-aws-xray==1.0.2 # via opentelemetry-instrumentation-botocore -opentelemetry-proto==1.30.0 +opentelemetry-proto==1.34.1 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.30.0 +opentelemetry-sdk==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.51b0 +opentelemetry-semantic-conventions==0.55b1 # via # opentelemetry-instrumentation + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-botocore # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.51b0 +opentelemetry-util-http==0.55b1 # via opentelemetry-instrumentation-requests orjson==3.10.15 # via @@ -231,7 +234,7 @@ propcache==0.3.0 # via # aiohttp # yarl -protobuf==5.29.3 +protobuf==5.29.5 # via # googleapis-common-protos # opentelemetry-proto @@ -239,7 +242,7 @@ psutil==7.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in pycryptodome==3.21.0 # via stream-zip -pydantic==2.10.6 +pydantic==2.11.7 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -268,9 +271,9 @@ pydantic==2.10.6 # fast-depends # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.2 +pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.10.2 +pydantic-extra-types==2.10.5 # via # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -355,9 +358,9 @@ referencing==0.35.1 # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications -requests==2.32.3 +requests==2.32.4 # via opentelemetry-exporter-otlp-proto-http -rich==13.9.4 +rich==14.1.0 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in @@ -384,7 +387,7 @@ toolz==1.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in tqdm==4.67.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.15.2 +typer==0.16.1 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in @@ -400,12 +403,16 @@ types-awscrt==0.23.10 # via botocore-stubs types-python-dateutil==2.9.0.20241206 # via arrow -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # aiodebug # anyio # faststream + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http # opentelemetry-sdk + # opentelemetry-semantic-conventions # pydantic # pydantic-core # pydantic-extra-types @@ -414,7 +421,10 @@ typing-extensions==4.12.2 # types-aiobotocore-ec2 # types-aiobotocore-s3 # types-aiobotocore-ssm -urllib3==2.3.0 + # typing-inspection +typing-inspection==0.4.1 + # via pydantic +urllib3==2.5.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -433,7 +443,6 @@ urllib3==2.3.0 wrapt==1.17.2 # via # aiobotocore - # deprecated # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-redis diff --git a/packages/aws-library/requirements/_test.txt b/packages/aws-library/requirements/_test.txt index 03936ad2eb04..7d53ea3e329c 100644 --- a/packages/aws-library/requirements/_test.txt +++ b/packages/aws-library/requirements/_test.txt @@ -53,7 +53,7 @@ charset-normalizer==3.4.1 # via # -c requirements/_base.txt # requests -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # flask @@ -70,13 +70,13 @@ docker==7.1.0 # via moto faker==36.1.1 # via -r requirements/_test.in -fastapi==0.115.12 +fastapi==0.116.1 # via -r requirements/_test.in flask==3.1.0 # via # flask-cors # moto -flask-cors==5.0.1 +flask-cors==6.0.1 # via moto flexcache==0.3 # via pint @@ -84,9 +84,9 @@ flexparser==0.4 # via pint graphql-core==3.2.6 # via moto -h11==0.14.0 +h11==0.16.0 # via httpcore -httpcore==1.0.7 +httpcore==1.0.9 # via httpx httpx==0.28.1 # via @@ -104,7 +104,7 @@ iniconfig==2.0.0 # via pytest itsdangerous==2.2.0 # via flask -jinja2==3.1.5 +jinja2==3.1.6 # via # -c requirements/../../../requirements/constraints.txt # flask @@ -163,7 +163,9 @@ pint==0.24.4 platformdirs==4.3.6 # via pint pluggy==1.5.0 - # via pytest + # via + # pytest + # pytest-cov ply==3.11 # via jsonpath-ng pprintpp==0.4.0 @@ -174,19 +176,23 @@ py-partiql-parser==0.6.1 # via moto pycparser==2.22 # via cffi -pydantic==2.10.6 +pydantic==2.11.7 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator # fastapi -pydantic-core==2.27.2 +pydantic-core==2.33.2 # via # -c requirements/_base.txt # pydantic +pygments==2.19.1 + # via + # -c requirements/_base.txt + # pytest pyparsing==3.2.1 # via moto -pytest==8.3.5 +pytest==8.4.1 # via # -r requirements/_test.in # pytest-asyncio @@ -196,17 +202,17 @@ pytest==8.3.5 # pytest-instafail # pytest-mock # pytest-sugar -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via -r requirements/_test.in pytest-benchmark==5.1.0 # via -r requirements/_test.in -pytest-cov==6.0.0 +pytest-cov==6.2.1 # via -r requirements/_test.in pytest-icdiff==0.9 # via -r requirements/_test.in pytest-instafail==0.5.0 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in @@ -239,7 +245,7 @@ referencing==0.35.1 # jsonschema-specifications regex==2024.11.6 # via cfn-lint -requests==2.32.3 +requests==2.32.4 # via # -c requirements/_base.txt # docker @@ -259,7 +265,7 @@ s3transfer==0.11.3 # via # -c requirements/_base.txt # boto3 -setuptools==75.8.2 +setuptools==80.9.0 # via moto six==1.17.0 # via @@ -270,7 +276,7 @@ sniffio==1.3.1 # via # -c requirements/_base.txt # anyio -starlette==0.46.0 +starlette==0.47.2 # via # -c requirements/../../../requirements/constraints.txt # fastapi @@ -294,7 +300,7 @@ types-s3transfer==0.11.3 # via # types-aioboto3 # types-boto3 -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # anyio @@ -306,12 +312,18 @@ typing-extensions==4.12.2 # pint # pydantic # pydantic-core + # starlette # types-aioboto3 # types-aiobotocore # types-boto3 + # typing-inspection +typing-inspection==0.4.1 + # via + # -c requirements/_base.txt + # pydantic tzdata==2025.1 # via faker -urllib3==2.3.0 +urllib3==2.5.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt diff --git a/packages/aws-library/requirements/_tools.txt b/packages/aws-library/requirements/_tools.txt index 51d5e1879cef..f694a66f3506 100644 --- a/packages/aws-library/requirements/_tools.txt +++ b/packages/aws-library/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # -c requirements/_test.txt @@ -28,9 +28,9 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via -r requirements/../../../requirements/devenv.txt -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via # black # mypy @@ -43,7 +43,9 @@ packaging==24.2 # black # build pathspec==0.12.1 - # via black + # via + # black + # mypy pip==25.0.1 # via pip-tools pip-tools==7.4.1 @@ -70,13 +72,13 @@ pyyaml==6.0.2 # pre-commit ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.2 +setuptools==80.9.0 # via # -c requirements/_test.txt # pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # -c requirements/_test.txt diff --git a/packages/aws-library/src/aws_library/ec2/__init__.py b/packages/aws-library/src/aws_library/ec2/__init__.py index 112c70861b29..0acff01ff0d6 100644 --- a/packages/aws-library/src/aws_library/ec2/__init__.py +++ b/packages/aws-library/src/aws_library/ec2/__init__.py @@ -1,5 +1,10 @@ from ._client import SimcoreEC2API -from ._errors import EC2AccessError, EC2NotConnectedError, EC2RuntimeError +from ._errors import ( + EC2AccessError, + EC2InsufficientCapacityError, + EC2NotConnectedError, + EC2RuntimeError, +) from ._models import ( AWS_TAG_KEY_MAX_LENGTH, AWS_TAG_KEY_MIN_LENGTH, @@ -16,22 +21,22 @@ ) __all__: tuple[str, ...] = ( - "AWSTagKey", - "AWSTagValue", - "AWS_TAG_KEY_MIN_LENGTH", "AWS_TAG_KEY_MAX_LENGTH", - "AWS_TAG_VALUE_MIN_LENGTH", + "AWS_TAG_KEY_MIN_LENGTH", "AWS_TAG_VALUE_MAX_LENGTH", + "AWS_TAG_VALUE_MIN_LENGTH", + "AWSTagKey", + "AWSTagValue", "EC2AccessError", "EC2InstanceBootSpecific", "EC2InstanceConfig", "EC2InstanceData", "EC2InstanceType", + "EC2InsufficientCapacityError", "EC2NotConnectedError", "EC2RuntimeError", "EC2Tags", "Resources", "SimcoreEC2API", ) - # nopycln: file diff --git a/packages/aws-library/src/aws_library/ec2/_client.py b/packages/aws-library/src/aws_library/ec2/_client.py index 970d6130e69a..911d04067ac4 100644 --- a/packages/aws-library/src/aws_library/ec2/_client.py +++ b/packages/aws-library/src/aws_library/ec2/_client.py @@ -13,10 +13,17 @@ from settings_library.ec2 import EC2Settings from types_aiobotocore_ec2 import EC2Client from types_aiobotocore_ec2.literals import InstanceStateNameType, InstanceTypeType -from types_aiobotocore_ec2.type_defs import FilterTypeDef, TagTypeDef +from types_aiobotocore_ec2.type_defs import ( + FilterTypeDef, + TagTypeDef, +) from ._error_handler import ec2_exception_handler -from ._errors import EC2InstanceNotFoundError, EC2TooManyInstancesError +from ._errors import ( + EC2InstanceNotFoundError, + EC2InsufficientCapacityError, + EC2SubnetsNotEnoughIPsError, +) from ._models import ( AWSTagKey, EC2InstanceConfig, @@ -25,7 +32,13 @@ EC2Tags, Resources, ) -from ._utils import compose_user_data, ec2_instance_data_from_aws_instance +from ._utils import ( + check_max_number_of_instances_not_exceeded, + compose_user_data, + ec2_instance_data_from_aws_instance, + get_subnet_azs, + get_subnet_capacity, +) _logger = logging.getLogger(__name__) @@ -92,6 +105,11 @@ async def get_ec2_instance_capabilities( list_instances: list[EC2InstanceType] = [] for instance in instance_types.get("InstanceTypes", []): with contextlib.suppress(KeyError): + assert "InstanceType" in instance # nosec + assert "VCpuInfo" in instance # nosec + assert "DefaultVCpus" in instance["VCpuInfo"] # nosec + assert "MemoryInfo" in instance # nosec + assert "SizeInMiB" in instance["MemoryInfo"] # nosec list_instances.append( EC2InstanceType( name=instance["InstanceType"], @@ -118,94 +136,145 @@ async def launch_instances( Arguments: instance_config -- The EC2 instance configuration - min_number_of_instances -- the minimal number of instances needed (fails if this amount cannot be reached) + min_number_of_instances -- the minimal number of instances required (fails if this amount cannot be reached) number_of_instances -- the ideal number of instances needed (it it cannot be reached AWS will return a number >=min_number_of_instances) - - Keyword Arguments: - max_total_number_of_instances -- The total maximum allowed number of instances for this given instance_config (default: {10}) + max_total_number_of_instances -- The total maximum allowed number of instances for this given instance_config Raises: - EC2TooManyInstancesError: + EC2TooManyInstancesError: max_total_number_of_instances would be exceeded + EC2SubnetsNotEnoughIPsError: not enough IPs in the subnets + EC2InsufficientCapacityError: not enough capacity in the subnets + Returns: The created instance data infos """ + with log_context( _logger, logging.INFO, - msg=f"launch {number_of_instances} AWS instance(s) {instance_config.type.name} with {instance_config.tags=}", + msg=f"launch {number_of_instances} AWS instance(s) {instance_config.type.name}" + f" with {instance_config.tags=} in {instance_config.subnet_ids=}", ): # first check the max amount is not already reached - current_instances = await self.get_instances( - key_names=[instance_config.key_name], tags=instance_config.tags + await check_max_number_of_instances_not_exceeded( + self, + instance_config, + required_number_instances=number_of_instances, + max_total_number_of_instances=max_total_number_of_instances, ) - if ( - len(current_instances) + number_of_instances - > max_total_number_of_instances - ): - raise EC2TooManyInstancesError( - num_instances=max_total_number_of_instances + + # NOTE: checking subnets capacity is not strictly needed as AWS will do it for us + # but it gives us a chance to give early feedback to the user + # and avoid trying to launch instances in subnets that are already full + # and also allows to circumvent a moto bug that does not raise + # InsufficientInstanceCapacity when a subnet is full + subnet_id_to_available_ips = await get_subnet_capacity( + self.client, subnet_ids=instance_config.subnet_ids + ) + + total_available_ips = sum(subnet_id_to_available_ips.values()) + if total_available_ips < min_number_of_instances: + raise EC2SubnetsNotEnoughIPsError( + subnet_ids=instance_config.subnet_ids, + instance_type=instance_config.type.name, + available_ips=total_available_ips, ) + # now let's not try to run instances in subnets that have not enough IPs + subnet_ids_with_capacity = [ + subnet_id + for subnet_id, capacity in subnet_id_to_available_ips.items() + if capacity >= min_number_of_instances + ] + resource_tags: list[TagTypeDef] = [ {"Key": tag_key, "Value": tag_value} for tag_key, tag_value in instance_config.tags.items() ] - instances = await self.client.run_instances( - ImageId=instance_config.ami_id, - MinCount=min_number_of_instances, - MaxCount=number_of_instances, - IamInstanceProfile=( - {"Arn": instance_config.iam_instance_profile} - if instance_config.iam_instance_profile - else {} - ), - InstanceType=instance_config.type.name, - InstanceInitiatedShutdownBehavior="terminate", - KeyName=instance_config.key_name, - TagSpecifications=[ - {"ResourceType": "instance", "Tags": resource_tags}, - {"ResourceType": "volume", "Tags": resource_tags}, - {"ResourceType": "network-interface", "Tags": resource_tags}, - ], - UserData=compose_user_data(instance_config.startup_script), - NetworkInterfaces=[ - { - "AssociatePublicIpAddress": True, - "DeviceIndex": 0, - "SubnetId": instance_config.subnet_id, - "Groups": instance_config.security_group_ids, - } - ], - ) - instance_ids = [i["InstanceId"] for i in instances["Instances"]] - _logger.info( - "%s New instances launched: %s, waiting for them to start now...", - len(instance_ids), - instance_ids, - ) + # Try each subnet in order until one succeeds + for subnet_id in subnet_ids_with_capacity: + try: + _logger.debug( + "Attempting to launch instances in subnet %s", subnet_id + ) - # wait for the instance to be in a pending state - # NOTE: reference to EC2 states https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-lifecycle.html - waiter = self.client.get_waiter("instance_exists") - await waiter.wait(InstanceIds=instance_ids) - _logger.debug("instances %s exists now.", instance_ids) + instances = await self.client.run_instances( + ImageId=instance_config.ami_id, + MinCount=min_number_of_instances, + MaxCount=number_of_instances, + IamInstanceProfile=( + {"Arn": instance_config.iam_instance_profile} + if instance_config.iam_instance_profile + else {} + ), + InstanceType=instance_config.type.name, + InstanceInitiatedShutdownBehavior="terminate", + KeyName=instance_config.key_name, + TagSpecifications=[ + {"ResourceType": "instance", "Tags": resource_tags}, + {"ResourceType": "volume", "Tags": resource_tags}, + { + "ResourceType": "network-interface", + "Tags": resource_tags, + }, + ], + UserData=compose_user_data(instance_config.startup_script), + NetworkInterfaces=[ + { + "AssociatePublicIpAddress": True, + "DeviceIndex": 0, + "SubnetId": subnet_id, + "Groups": instance_config.security_group_ids, + } + ], + ) + # If we get here, the launch succeeded + break + except botocore.exceptions.ClientError as exc: + error_code = exc.response.get("Error", {}).get("Code") + if error_code == "InsufficientInstanceCapacity": + _logger.warning( + "Insufficient capacity in subnet %s for instance type %s, trying next subnet", + subnet_id, + instance_config.type.name, + ) + continue + # For any other ClientError, re-raise to let the decorator handle it + raise + + else: + subnet_zones = await get_subnet_azs( + self.client, subnet_ids=subnet_ids_with_capacity + ) + raise EC2InsufficientCapacityError( + availability_zones=subnet_zones, + instance_type=instance_config.type.name, + ) + instance_ids = [ + i["InstanceId"] # pyright: ignore[reportTypedDictNotRequiredAccess] + for i in instances["Instances"] + ] + with log_context( + _logger, + logging.INFO, + msg=f"{len(instance_ids)} instances: {instance_ids=} launched. Wait to reach pending state", + ): + # wait for the instance to be in a pending state + # NOTE: reference to EC2 states https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-lifecycle.html + waiter = self.client.get_waiter("instance_exists") + await waiter.wait(InstanceIds=instance_ids) - # NOTE: waiting for pending ensure we get all the IPs back + # NOTE: waiting for pending ensures we get all the IPs back described_instances = await self.client.describe_instances( InstanceIds=instance_ids ) assert "Instances" in described_instances["Reservations"][0] # nosec - instance_datas = [ + return [ await ec2_instance_data_from_aws_instance(self, i) for i in described_instances["Reservations"][0]["Instances"] ] - _logger.info( - "%s are pending now", - f"{instance_ids=}", - ) - return instance_datas @ec2_exception_handler(_logger) async def get_instances( diff --git a/packages/aws-library/src/aws_library/ec2/_error_handler.py b/packages/aws-library/src/aws_library/ec2/_error_handler.py index 8984cf6a0a36..a8ed79717aa7 100644 --- a/packages/aws-library/src/aws_library/ec2/_error_handler.py +++ b/packages/aws-library/src/aws_library/ec2/_error_handler.py @@ -1,7 +1,16 @@ import functools import logging +import re from collections.abc import Callable, Coroutine -from typing import TYPE_CHECKING, Any, Concatenate, ParamSpec, TypeVar +from typing import ( + TYPE_CHECKING, + Any, + Concatenate, + Final, + ParamSpec, + TypeVar, + cast, +) from botocore import exceptions as botocore_exc @@ -9,6 +18,7 @@ EC2AccessError, EC2InstanceNotFoundError, EC2InstanceTypeInvalidError, + EC2InsufficientCapacityError, EC2NotConnectedError, EC2RuntimeError, EC2TimeoutError, @@ -26,30 +36,46 @@ Self = TypeVar("Self", bound="SimcoreEC2API") +_INSUFFICIENT_CAPACITY_ERROR_MSG_PATTERN: Final[re.Pattern] = re.compile( + r"sufficient (?P\S+) capacity in the Availability Zone you requested " + r"\((?P\S+)\)" +) + + def _map_botocore_client_exception( botocore_error: botocore_exc.ClientError, *args, # pylint: disable=unused-argument # noqa: ARG001 **kwargs, # pylint: disable=unused-argument # noqa: ARG001 ) -> EC2AccessError: - status_code = int( - botocore_error.response.get("ResponseMetadata", {}).get("HTTPStatusCode") - or botocore_error.response.get("Error", {}).get("Code", -1) + # see https://boto3.amazonaws.com/v1/documentation/api/latest/guide/error-handling.html#parsing-error-responses-and-catching-exceptions-from-aws-services + status_code = cast( + int, + botocore_error.response.get("ResponseMetadata", {}).get("HTTPStatusCode", "-1"), ) + error_code = botocore_error.response.get("Error", {}).get("Code", "Unknown") + error_msg = botocore_error.response.get("Error", {}).get("Message", "Unknown") operation_name = botocore_error.operation_name - match status_code, operation_name: - case 400, "StartInstances": + match error_code: + case "InvalidInstanceID.NotFound": return EC2InstanceNotFoundError() - case 400, "StopInstances": - return EC2InstanceNotFoundError() - case 400, "TerminateInstances": - return EC2InstanceNotFoundError() - case 400, "DescribeInstanceTypes": + case "InvalidInstanceType": return EC2InstanceTypeInvalidError() + case "InsufficientInstanceCapacity": + availability_zone = "unknown" + instance_type = "unknown" + if match := re.search(_INSUFFICIENT_CAPACITY_ERROR_MSG_PATTERN, error_msg): + instance_type = match.group("instance_type") + availability_zone = match.group("failed_az") + + raise EC2InsufficientCapacityError( + availability_zones=availability_zone, instance_type=instance_type + ) case _: return EC2AccessError( + status_code=status_code, operation_name=operation_name, - code=status_code, - error=f"{botocore_error}", + code=error_code, + error=error_msg, ) diff --git a/packages/aws-library/src/aws_library/ec2/_errors.py b/packages/aws-library/src/aws_library/ec2/_errors.py index 4fb0e611ed2b..81a0d0c1d695 100644 --- a/packages/aws-library/src/aws_library/ec2/_errors.py +++ b/packages/aws-library/src/aws_library/ec2/_errors.py @@ -16,7 +16,7 @@ class EC2NotConnectedError(EC2RuntimeError): class EC2AccessError(EC2RuntimeError): msg_template: str = ( - "Unexpected error while accessing EC2 backend: {operation_name}:{code}:{error}" + "Unexpected error while accessing EC2 backend responded with {status_code}: {operation_name}:{code}:{error}" ) @@ -36,3 +36,16 @@ class EC2TooManyInstancesError(EC2AccessError): msg_template: str = ( "The maximum amount of instances {num_instances} is already reached!" ) + + +class EC2InsufficientCapacityError(EC2AccessError): + msg_template: str = ( + "Insufficient capacity in {availability_zones} for {instance_type}" + ) + + +class EC2SubnetsNotEnoughIPsError(EC2AccessError): + msg_template: str = ( + "Not enough free IPs in subnet(s) {subnet_ids} for {num_instances} instances" + ". Only {available_ips} IPs available." + ) diff --git a/packages/aws-library/src/aws_library/ec2/_models.py b/packages/aws-library/src/aws_library/ec2/_models.py index 621adc0f4eed..b136fb503007 100644 --- a/packages/aws-library/src/aws_library/ec2/_models.py +++ b/packages/aws-library/src/aws_library/ec2/_models.py @@ -5,6 +5,7 @@ from typing import Annotated, Final, TypeAlias import sh # type: ignore[import-untyped] +from common_library.basic_types import DEFAULT_FACTORY from models_library.docker import DockerGenericTag from pydantic import ( BaseModel, @@ -16,6 +17,7 @@ StringConstraints, field_validator, ) +from pydantic.config import JsonDict from types_aiobotocore_ec2.literals import InstanceStateNameType, InstanceTypeType @@ -133,7 +135,7 @@ class EC2InstanceConfig: ami_id: str key_name: str security_group_ids: list[str] - subnet_id: str + subnet_ids: list[str] iam_instance_profile: str @@ -143,23 +145,32 @@ class EC2InstanceConfig: class EC2InstanceBootSpecific(BaseModel): ami_id: AMIIdStr - custom_boot_scripts: list[CommandStr] = Field( - default_factory=list, - description="script(s) to run on EC2 instance startup (be careful!), " - "each entry is run one after the other using '&&' operator", - ) - pre_pull_images: list[DockerGenericTag] = Field( - default_factory=list, - description="a list of docker image/tags to pull on instance cold start", - ) - pre_pull_images_cron_interval: datetime.timedelta = Field( - default=datetime.timedelta(minutes=30), - description="time interval between pulls of images (minimum is 1 minute) " - "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)", - ) - buffer_count: NonNegativeInt = Field( - default=0, description="number of buffer EC2s to keep (defaults to 0)" - ) + custom_boot_scripts: Annotated[ + list[CommandStr], + Field( + default_factory=list, + description="script(s) to run on EC2 instance startup (be careful!), " + "each entry is run one after the other using '&&' operator", + ), + ] = DEFAULT_FACTORY + pre_pull_images: Annotated[ + list[DockerGenericTag], + Field( + default_factory=list, + description="a list of docker image/tags to pull on instance cold start", + ), + ] = DEFAULT_FACTORY + pre_pull_images_cron_interval: Annotated[ + datetime.timedelta, + Field( + description="time interval between pulls of images (minimum is 1 minute) " + "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)", + ), + ] = datetime.timedelta(minutes=30) + buffer_count: Annotated[ + NonNegativeInt, + Field(description="number of buffer EC2s to keep (defaults to 0)"), + ] = 0 @field_validator("custom_boot_scripts") @classmethod @@ -169,68 +180,74 @@ def validate_bash_calls(cls, v): temp_file.writelines(v) temp_file.flush() # NOTE: this will not capture runtime errors, but at least some syntax errors such as invalid quotes - sh.bash("-n", temp_file.name) + sh.bash("-n", temp_file.name) # pyright: ignore[reportCallIssue] # sh is untyped, but this call is safe for bash syntax checking except sh.ErrorReturnCode as exc: msg = f"Invalid bash call in custom_boot_scripts: {v}, Error: {exc.stderr}" raise ValueError(msg) from exc return v + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + { + # just AMI + "ami_id": "ami-123456789abcdef", + }, + { + # AMI + scripts + "ami_id": "ami-123456789abcdef", + "custom_boot_scripts": ["ls -tlah", "echo blahblah"], + }, + { + # AMI + scripts + pre-pull + "ami_id": "ami-123456789abcdef", + "custom_boot_scripts": ["ls -tlah", "echo blahblah"], + "pre_pull_images": [ + "nginx:latest", + "itisfoundation/my-very-nice-service:latest", + "simcore/services/dynamic/another-nice-one:2.4.5", + "asd", + ], + }, + { + # AMI + pre-pull + "ami_id": "ami-123456789abcdef", + "pre_pull_images": [ + "nginx:latest", + "itisfoundation/my-very-nice-service:latest", + "simcore/services/dynamic/another-nice-one:2.4.5", + "asd", + ], + }, + { + # AMI + pre-pull + cron + "ami_id": "ami-123456789abcdef", + "pre_pull_images": [ + "nginx:latest", + "itisfoundation/my-very-nice-service:latest", + "simcore/services/dynamic/another-nice-one:2.4.5", + "asd", + ], + "pre_pull_images_cron_interval": "01:00:00", + }, + { + # AMI + pre-pull + buffer count + "ami_id": "ami-123456789abcdef", + "pre_pull_images": [ + "nginx:latest", + "itisfoundation/my-very-nice-service:latest", + "simcore/services/dynamic/another-nice-one:2.4.5", + "asd", + ], + "buffer_count": 10, + }, + ] + } + ) + model_config = ConfigDict( - json_schema_extra={ - "examples": [ - { - # just AMI - "ami_id": "ami-123456789abcdef", - }, - { - # AMI + scripts - "ami_id": "ami-123456789abcdef", - "custom_boot_scripts": ["ls -tlah", "echo blahblah"], - }, - { - # AMI + scripts + pre-pull - "ami_id": "ami-123456789abcdef", - "custom_boot_scripts": ["ls -tlah", "echo blahblah"], - "pre_pull_images": [ - "nginx:latest", - "itisfoundation/my-very-nice-service:latest", - "simcore/services/dynamic/another-nice-one:2.4.5", - "asd", - ], - }, - { - # AMI + pre-pull - "ami_id": "ami-123456789abcdef", - "pre_pull_images": [ - "nginx:latest", - "itisfoundation/my-very-nice-service:latest", - "simcore/services/dynamic/another-nice-one:2.4.5", - "asd", - ], - }, - { - # AMI + pre-pull + cron - "ami_id": "ami-123456789abcdef", - "pre_pull_images": [ - "nginx:latest", - "itisfoundation/my-very-nice-service:latest", - "simcore/services/dynamic/another-nice-one:2.4.5", - "asd", - ], - "pre_pull_images_cron_interval": "01:00:00", - }, - { - # AMI + pre-pull + buffer count - "ami_id": "ami-123456789abcdef", - "pre_pull_images": [ - "nginx:latest", - "itisfoundation/my-very-nice-service:latest", - "simcore/services/dynamic/another-nice-one:2.4.5", - "asd", - ], - "buffer_count": 10, - }, - ] - } + json_schema_extra=_update_json_schema_extra, ) diff --git a/packages/aws-library/src/aws_library/ec2/_utils.py b/packages/aws-library/src/aws_library/ec2/_utils.py index d16be2cf9ead..33e31c5356b4 100644 --- a/packages/aws-library/src/aws_library/ec2/_utils.py +++ b/packages/aws-library/src/aws_library/ec2/_utils.py @@ -1,9 +1,14 @@ from textwrap import dedent from typing import TYPE_CHECKING, cast -from types_aiobotocore_ec2.type_defs import InstanceTypeDef +from types_aiobotocore_ec2 import EC2Client +from types_aiobotocore_ec2.type_defs import ( + InstanceTypeDef, + SubnetTypeDef, +) -from ._models import EC2InstanceData, EC2Tags +from ._errors import EC2TooManyInstancesError +from ._models import EC2InstanceConfig, EC2InstanceData, EC2Tags if TYPE_CHECKING: from ._client import SimcoreEC2API @@ -43,3 +48,60 @@ async def ec2_instance_data_from_aws_instance( resources=ec2_instance_types[0].resources, tags=cast(EC2Tags, {tag["Key"]: tag["Value"] for tag in instance["Tags"]}), ) + + +async def check_max_number_of_instances_not_exceeded( + ec2_client: "SimcoreEC2API", + instance_config: EC2InstanceConfig, + *, + required_number_instances: int, + max_total_number_of_instances: int, +) -> None: + current_instances = await ec2_client.get_instances( + key_names=[instance_config.key_name], tags=instance_config.tags + ) + if ( + len(current_instances) + required_number_instances + > max_total_number_of_instances + ): + raise EC2TooManyInstancesError(num_instances=max_total_number_of_instances) + + +async def get_subnet_capacity( + aioboto_ec2_client: EC2Client, *, subnet_ids: list[str] +) -> dict[str, int]: + subnets = await aioboto_ec2_client.describe_subnets(SubnetIds=subnet_ids) + assert "Subnets" in subnets # nosec + subnet_id_to_subnet_map: dict[str, SubnetTypeDef] = { + subnet["SubnetId"]: subnet # pyright: ignore[reportTypedDictNotRequiredAccess] + for subnet in subnets["Subnets"] + } + # preserve the order of instance_config.subnet_ids + + subnet_id_to_available_ips: dict[str, int] = { + subnet_id: subnet_id_to_subnet_map[subnet_id][ + "AvailableIpAddressCount" + ] # pyright: ignore[reportTypedDictNotRequiredAccess] + for subnet_id in subnet_ids + } + return subnet_id_to_available_ips + + +async def get_subnet_azs( + aioboto_ec2_client: EC2Client, *, subnet_ids: list[str] +) -> list[str]: + subnets = await aioboto_ec2_client.describe_subnets(SubnetIds=subnet_ids) + assert "Subnets" in subnets # nosec + subnet_id_to_subnet_map: dict[str, SubnetTypeDef] = { + subnet["SubnetId"]: subnet # pyright: ignore[reportTypedDictNotRequiredAccess] + for subnet in subnets["Subnets"] + } + # preserve the order of instance_config.subnet_ids + + subnet_azs: list[str] = [ + subnet_id_to_subnet_map[subnet_id][ + "AvailabilityZone" + ] # pyright: ignore[reportTypedDictNotRequiredAccess] + for subnet_id in subnet_ids + ] + return subnet_azs diff --git a/packages/aws-library/src/aws_library/s3/__init__.py b/packages/aws-library/src/aws_library/s3/__init__.py index ea8f6264d604..8a9a85f1279e 100644 --- a/packages/aws-library/src/aws_library/s3/__init__.py +++ b/packages/aws-library/src/aws_library/s3/__init__.py @@ -22,10 +22,10 @@ ) __all__: tuple[str, ...] = ( - "CopiedBytesTransferredCallback", - "MultiPartUploadLinks", "PRESIGNED_LINK_MAX_SIZE", "S3_MAX_FILE_SIZE", + "CopiedBytesTransferredCallback", + "MultiPartUploadLinks", "S3AccessError", "S3BucketInvalidError", "S3DestinationNotEmptyError", @@ -37,8 +37,8 @@ "S3RuntimeError", "S3UploadNotFoundError", "SimcoreS3API", - "UploadedBytesTransferredCallback", "UploadID", + "UploadedBytesTransferredCallback", ) # nopycln: file diff --git a/packages/aws-library/src/aws_library/s3/_models.py b/packages/aws-library/src/aws_library/s3/_models.py index 4d722386526a..e20ef13d0e3c 100644 --- a/packages/aws-library/src/aws_library/s3/_models.py +++ b/packages/aws-library/src/aws_library/s3/_models.py @@ -1,6 +1,6 @@ import datetime from pathlib import Path -from typing import TypeAlias, cast +from typing import Annotated, TypeAlias, cast from models_library.api_schemas_storage.storage_schemas import ETag from models_library.basic_types import SHA256Str @@ -54,9 +54,10 @@ def as_path(self) -> Path: class S3DirectoryMetaData(BaseModel, frozen=True): prefix: S3ObjectPrefix - size: ByteSize | None = Field( - ..., description="Size of the directory if computed, None if unknown" - ) + size: Annotated[ + ByteSize | None, + Field(description="Size of the directory if computed, None if unknown"), + ] def as_path(self) -> Path: return self.prefix diff --git a/packages/aws-library/tests/conftest.py b/packages/aws-library/tests/conftest.py index 47fcdd327e39..0300b9de0758 100644 --- a/packages/aws-library/tests/conftest.py +++ b/packages/aws-library/tests/conftest.py @@ -14,6 +14,7 @@ "pytest_simcore.aws_ssm_service", "pytest_simcore.environment_configs", "pytest_simcore.file_extra", + "pytest_simcore.logging", "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", "pytest_simcore.repository_paths", diff --git a/packages/aws-library/tests/test_ec2_client.py b/packages/aws-library/tests/test_ec2_client.py index a1cbdf55c570..bfa54403248c 100644 --- a/packages/aws-library/tests/test_ec2_client.py +++ b/packages/aws-library/tests/test_ec2_client.py @@ -4,9 +4,9 @@ import random -from collections.abc import AsyncIterator, Callable +from collections.abc import AsyncIterator, Awaitable, Callable from dataclasses import fields -from typing import cast, get_args +from typing import Any, Final, cast, get_args import botocore.exceptions import pytest @@ -14,6 +14,8 @@ from aws_library.ec2._errors import ( EC2InstanceNotFoundError, EC2InstanceTypeInvalidError, + EC2InsufficientCapacityError, + EC2SubnetsNotEnoughIPsError, EC2TooManyInstancesError, ) from aws_library.ec2._models import ( @@ -25,13 +27,15 @@ ) from faker import Faker from moto.server import ThreadedMotoServer +from pydantic import TypeAdapter +from pytest_mock import MockerFixture from settings_library.ec2 import EC2Settings from types_aiobotocore_ec2 import EC2Client from types_aiobotocore_ec2.literals import InstanceStateNameType, InstanceTypeType def _ec2_allowed_types() -> list[InstanceTypeType]: - return ["t2.nano", "m5.12xlarge", "g4dn.4xlarge"] + return ["m5.12xlarge"] @pytest.fixture(scope="session") @@ -97,7 +101,7 @@ def ec2_instance_config( ami_id=aws_ami_id, key_name=faker.pystr(), security_group_ids=[aws_security_group_id], - subnet_id=aws_subnet_id, + subnet_ids=[aws_subnet_id], iam_instance_profile="", ) @@ -415,6 +419,63 @@ async def test_stop_start_instances( assert getattr(s, f.name) == getattr(c, f.name) +async def test_start_instances_with_insufficient_instance_capacity( + simcore_ec2_api: SimcoreEC2API, + ec2_client: EC2Client, + faker: Faker, + ec2_instance_config: EC2InstanceConfig, + mocker: MockerFixture, +): + # we have nothing running now in ec2 + await _assert_no_instances_in_ec2(ec2_client) + # create some instance + _NUM_INSTANCES = 10 + num_instances = faker.pyint(min_value=1, max_value=_NUM_INSTANCES) + created_instances = await simcore_ec2_api.launch_instances( + ec2_instance_config, + min_number_of_instances=num_instances, + number_of_instances=num_instances, + ) + await _assert_instances_in_ec2( + ec2_client, + expected_num_reservations=1, + expected_num_instances=num_instances, + expected_instance_type=ec2_instance_config.type, + expected_tags=ec2_instance_config.tags, + expected_state="running", + ) + # stop the instances + await simcore_ec2_api.stop_instances(created_instances) + await _assert_instances_in_ec2( + ec2_client, + expected_num_reservations=1, + expected_num_instances=num_instances, + expected_instance_type=ec2_instance_config.type, + expected_tags=ec2_instance_config.tags, + expected_state="stopped", + ) + + # Mock the EC2 client to simulate InsufficientInstanceCapacity on first subnet + async def mock_start_instances(*args, **kwargs) -> Any: + # no more machines, simulate insufficient capacity + error_response: dict[str, Any] = { + "Error": { + "Code": "InsufficientInstanceCapacity", + "Message": "An error occurred (InsufficientInstanceCapacity) when calling the StartInstances operation (reached max retries: 4): Insufficient capacity.", + }, + } + raise botocore.exceptions.ClientError(error_response, "StartInstances") # type: ignore + + # Apply the mock + mocker.patch.object( + simcore_ec2_api.client, "start_instances", side_effect=mock_start_instances + ) + + # start the instances now + with pytest.raises(EC2InsufficientCapacityError): + await simcore_ec2_api.start_instances(created_instances) + + async def test_terminate_instance( simcore_ec2_api: SimcoreEC2API, ec2_client: EC2Client, @@ -529,7 +590,8 @@ async def test_set_instance_tags( # now remove some, this should do nothing await simcore_ec2_api.remove_instances_tags( - created_instances, tag_keys=[AWSTagKey("whatever_i_dont_exist")] + created_instances, + tag_keys=[TypeAdapter(AWSTagKey).validate_python("whatever_i_dont_exist")], ) await _assert_instances_in_ec2( ec2_client, @@ -575,3 +637,426 @@ async def test_remove_instance_tags_not_existing_raises( await simcore_ec2_api.remove_instances_tags( [fake_ec2_instance_data()], tag_keys=[] ) + + +async def test_launch_instances_insufficient_capacity_fallback( + simcore_ec2_api: SimcoreEC2API, + ec2_client: EC2Client, + fake_ec2_instance_type: EC2InstanceType, + faker: Faker, + aws_subnet_id: str, + create_aws_subnet_id: Callable[[], Awaitable[str]], + aws_security_group_id: str, + aws_ami_id: str, + mocker: MockerFixture, +): + await _assert_no_instances_in_ec2(ec2_client) + + # Create additional valid subnets for testing + subnet1_id = aws_subnet_id + subnet2_id = await create_aws_subnet_id() + + # Create a config with multiple valid subnet IDs + ec2_instance_config = EC2InstanceConfig( + type=fake_ec2_instance_type, + tags=faker.pydict(allowed_types=(str,)), + startup_script=faker.pystr(), + ami_id=aws_ami_id, + key_name=faker.pystr(), + security_group_ids=[aws_security_group_id], + subnet_ids=[subnet1_id, subnet2_id], + iam_instance_profile="", + ) + + # Mock the EC2 client to simulate InsufficientInstanceCapacity on first subnet + original_run_instances = simcore_ec2_api.client.run_instances + call_count = 0 + + async def mock_run_instances(*args, **kwargs) -> Any: + nonlocal call_count + call_count += 1 + if call_count == 1: + assert kwargs["NetworkInterfaces"][0]["SubnetId"] == subnet1_id + # First call (first subnet) - simulate insufficient capacity + error_response: dict[str, Any] = { + "Error": { + "Code": "InsufficientInstanceCapacity", + "Message": "An error occurred (InsufficientInstanceCapacity) when calling the RunInstances operation (reached max retries: 4): We currently do not have sufficient g4dn.4xlarge capacity in the Availability Zone you requested (us-east-1a). Our system will be working on provisioning additional capacity. You can currently get g4dn.4xlarge capacity by not specifying an Availability Zone in your request or choosing us-east-1b, us-east-1c, us-east-1d, us-east-1f", + }, + } + raise botocore.exceptions.ClientError(error_response, "RunInstances") # type: ignore + # Second call (second subnet) - succeed normally + assert kwargs["NetworkInterfaces"][0]["SubnetId"] == subnet2_id + return await original_run_instances(*args, **kwargs) + + # Apply the mock + mocker.patch.object( + simcore_ec2_api.client, "run_instances", side_effect=mock_run_instances + ) + instances = await simcore_ec2_api.launch_instances( + ec2_instance_config, + min_number_of_instances=1, + number_of_instances=1, + ) + + # Verify that run_instances was called twice (once for each subnet) + assert call_count == 2 + + # Verify that the instance was created (in the second subnet) + await _assert_instances_in_ec2( + ec2_client, + expected_num_reservations=1, + expected_num_instances=1, + expected_instance_type=ec2_instance_config.type, + expected_tags=ec2_instance_config.tags, + expected_state="running", + ) + + # Verify the instance was created in the second subnet (since first failed) + instance_details = await ec2_client.describe_instances( + InstanceIds=[instances[0].id] + ) + assert "Reservations" in instance_details + assert len(instance_details["Reservations"]) >= 1 + assert "Instances" in instance_details["Reservations"][0] + assert len(instance_details["Reservations"][0]["Instances"]) >= 1 + instance = instance_details["Reservations"][0]["Instances"][0] + assert "SubnetId" in instance + assert instance["SubnetId"] == subnet2_id + + +async def test_launch_instances_all_subnets_insufficient_capacity_raises_error( + simcore_ec2_api: SimcoreEC2API, + ec2_client: EC2Client, + fake_ec2_instance_type: EC2InstanceType, + faker: Faker, + aws_subnet_id: str, + create_aws_subnet_id: Callable[[], Awaitable[str]], + aws_security_group_id: str, + aws_ami_id: str, + mocker: MockerFixture, +): + await _assert_no_instances_in_ec2(ec2_client) + + # Create additional valid subnets for testing + subnet1_id = aws_subnet_id + subnet2_id = await create_aws_subnet_id() + subnet3_id = await create_aws_subnet_id() + + # Create a config with multiple valid subnet IDs + ec2_instance_config = EC2InstanceConfig( + type=fake_ec2_instance_type, + tags=faker.pydict(allowed_types=(str,)), + startup_script=faker.pystr(), + ami_id=aws_ami_id, + key_name=faker.pystr(), + security_group_ids=[aws_security_group_id], + subnet_ids=[subnet1_id, subnet2_id, subnet3_id], + iam_instance_profile="", + ) + + # Mock the EC2 client to simulate InsufficientInstanceCapacity on ALL subnets + call_count = 0 + + async def mock_run_instances(*args, **kwargs) -> Any: + nonlocal call_count + call_count += 1 + # Always simulate insufficient capacity + error_response = { + "Error": { + "Code": "InsufficientInstanceCapacity", + "Message": "An error occurred (InsufficientInstanceCapacity) when calling the RunInstances operation (reached max retries: 4): We currently do not have sufficient g4dn.4xlarge capacity in the Availability Zone you requested (us-east-1a). Our system will be working on provisioning additional capacity. You can currently get g4dn.4xlarge capacity by not specifying an Availability Zone in your request or choosing us-east-1b, us-east-1c, us-east-1d, us-east-1f", + }, + } + raise botocore.exceptions.ClientError(error_response, "RunInstances") # type: ignore + + # Apply the mock and expect EC2InsufficientCapacityError + mocker.patch.object( + simcore_ec2_api.client, "run_instances", side_effect=mock_run_instances + ) + with pytest.raises( + EC2InsufficientCapacityError, match=fake_ec2_instance_type.name + ) as exc_info: + await simcore_ec2_api.launch_instances( + ec2_instance_config, + min_number_of_instances=1, + number_of_instances=1, + ) + + # Verify that run_instances was called for both subnets + assert call_count == 3 + + # Verify the error contains the expected information + assert hasattr(exc_info.value, "instance_type") + assert exc_info.value.instance_type == fake_ec2_instance_type.name # type: ignore + + # Verify no instances were created + await _assert_no_instances_in_ec2(ec2_client) + + +async def test_launch_instances_partial_capacity_then_insufficient_capacity( + simcore_ec2_api: SimcoreEC2API, + ec2_client: EC2Client, + fake_ec2_instance_type: EC2InstanceType, + faker: Faker, + aws_subnet_id: str, + aws_security_group_id: str, + aws_ami_id: str, + mocker: MockerFixture, +): + """Test that launch_instances handles partial capacity correctly. + + First call: ask for 3 instances (min 1) -> should get 2, no error + Second call: ask for 3 instances (min 1) -> should raise EC2InsufficientCapacityError + """ + await _assert_no_instances_in_ec2(ec2_client) + + # Create a config with a single subnet (as requested) + ec2_instance_config = EC2InstanceConfig( + type=fake_ec2_instance_type, + tags=faker.pydict(allowed_types=(str,)), + startup_script=faker.pystr(), + ami_id=aws_ami_id, + key_name=faker.pystr(), + security_group_ids=[aws_security_group_id], + subnet_ids=[aws_subnet_id], # Single subnet only + iam_instance_profile="", + ) + + # Mock the EC2 client to simulate partial capacity behavior + original_run_instances = simcore_ec2_api.client.run_instances + call_count = 0 + + async def mock_run_instances(*args, **kwargs): + nonlocal call_count + call_count += 1 + + if call_count == 1: + # First call: return only 2 instances when 3 were requested + # Simulate that the subnet has capacity for only 2 machines + required_instances = kwargs["MaxCount"] + kwargs_copy = kwargs.copy() + kwargs_copy["MinCount"] = required_instances - 1 + kwargs_copy["MaxCount"] = required_instances - 1 + return await original_run_instances(*args, **kwargs_copy) + + # Second call: simulate insufficient capacity (subnet is full) + error_response = { + "Error": { + "Code": "InsufficientInstanceCapacity", + "Message": "An error occurred (InsufficientInstanceCapacity) when calling the RunInstances operation (reached max retries: 4): We currently do not have sufficient g4dn.4xlarge capacity in the Availability Zone you requested (us-east-1a). Our system will be working on provisioning additional capacity. You can currently get g4dn.4xlarge capacity by not specifying an Availability Zone in your request or choosing us-east-1b, us-east-1c, us-east-1d, us-east-1f", + }, + } + raise botocore.exceptions.ClientError(error_response, "RunInstances") # type: ignore + + # Apply the mock for the first call + mocker.patch.object( + simcore_ec2_api.client, "run_instances", side_effect=mock_run_instances + ) + # First call: ask for 3 instances (min 1) -> should get 2, no error + instances = await simcore_ec2_api.launch_instances( + ec2_instance_config, + min_number_of_instances=1, + number_of_instances=3, + ) + + # Verify we got 2 instances (partial capacity) + assert len(instances) == 2 + assert call_count == 1 + + # Verify instances were created + await _assert_instances_in_ec2( + ec2_client, + expected_num_reservations=1, + expected_num_instances=2, + expected_instance_type=ec2_instance_config.type, + expected_tags=ec2_instance_config.tags, + expected_state="running", + ) + + # Second call: ask for 3 instances (min 1) -> should raise EC2InsufficientCapacityError + with pytest.raises(EC2InsufficientCapacityError) as exc_info: + await simcore_ec2_api.launch_instances( + ec2_instance_config, + min_number_of_instances=1, + number_of_instances=3, + ) + + # Verify that run_instances was called twice total + assert call_count == 2 + + # Verify the error contains the expected information + subnet_desc = await ec2_client.describe_subnets(SubnetIds=[aws_subnet_id]) + assert hasattr(exc_info.value, "instance_type") + assert exc_info.value.instance_type == fake_ec2_instance_type.name # type: ignore + assert exc_info.value.availability_zones == [ # type: ignore + subnet_desc["Subnets"][0]["AvailabilityZone"] # type: ignore + ] + + # Verify still only 2 instances exist (no new ones were created) + await _assert_instances_in_ec2( + ec2_client, + expected_num_reservations=1, + expected_num_instances=2, + expected_instance_type=ec2_instance_config.type, + expected_tags=ec2_instance_config.tags, + expected_state="running", + ) + + +_RESERVED_IPS: Final[int] = 5 # AWS reserves 5 IPs in each subnet + + +@pytest.fixture +async def with_small_subnet( + create_aws_subnet_id: Callable[..., Awaitable[str]], +) -> tuple[str, int]: + """Creates a subnet with a single IP address to simulate InsufficientInstanceCapacity""" + single_ip_cidr = ( + "10.0.11.0/29" # /29 is the minimum allowed by AWS, gives 8 addresses + ) + return ( + await create_aws_subnet_id(single_ip_cidr), + 8 - _RESERVED_IPS, + ) # 5 are reserved by AWS + + +async def test_launch_instances_with_small_subnet( + simcore_ec2_api: SimcoreEC2API, + ec2_client: EC2Client, + fake_ec2_instance_type: EC2InstanceType, + faker: Faker, + with_small_subnet: tuple[str, int], + aws_subnet_id: str, + aws_security_group_id: str, + aws_ami_id: str, + mocker: MockerFixture, +): + await _assert_no_instances_in_ec2(ec2_client) + small_subnet_id, capacity = with_small_subnet + # Create a config with a single subnet (as requested) + ec2_instance_config = EC2InstanceConfig( + type=fake_ec2_instance_type, + tags=faker.pydict(allowed_types=(str,)), + startup_script=faker.pystr(), + ami_id=aws_ami_id, + key_name=faker.pystr(), + security_group_ids=[aws_security_group_id], + subnet_ids=[small_subnet_id, aws_subnet_id], + iam_instance_profile="", + ) + + # first call shall work in the first subnet + instances = await simcore_ec2_api.launch_instances( + ec2_instance_config, + min_number_of_instances=capacity, + number_of_instances=capacity, + ) + + # Verify we got 2 instances (partial capacity) + assert len(instances) == capacity + + # Verify instances were created + await _assert_instances_in_ec2( + ec2_client, + expected_num_reservations=1, + expected_num_instances=capacity, + expected_instance_type=ec2_instance_config.type, + expected_tags=ec2_instance_config.tags, + expected_state="running", + ) + + instances = await simcore_ec2_api.launch_instances( + ec2_instance_config, + min_number_of_instances=1, + number_of_instances=1, + ) + + +async def test_launch_instances_raises_ec2_subnets_not_enough_ips_error( + simcore_ec2_api: SimcoreEC2API, + ec2_client: EC2Client, + fake_ec2_instance_type: EC2InstanceType, + faker: Faker, + create_aws_subnet_id: Callable[..., Awaitable[str]], + aws_security_group_id: str, + aws_ami_id: str, + mocker: MockerFixture, +) -> None: + """Test that EC2SubnetsNotEnoughIPsError is raised when subnets don't have enough IPs.""" + await _assert_no_instances_in_ec2(ec2_client) + + # Create additional small subnets + subnet1_id = await create_aws_subnet_id("10.0.200.0/29") # 3 usable IPs + subnet2_id = await create_aws_subnet_id("10.0.201.0/29") # 3 usable IPs + + ec2_instance_config = EC2InstanceConfig( + type=fake_ec2_instance_type, + tags=faker.pydict(allowed_types=(str,)), + startup_script=faker.pystr(), + ami_id=aws_ami_id, + key_name=faker.pystr(), + security_group_ids=[aws_security_group_id], + subnet_ids=[subnet1_id, subnet2_id], + iam_instance_profile="", + ) + + with pytest.raises(EC2SubnetsNotEnoughIPsError) as exc_info: + await simcore_ec2_api.launch_instances( + ec2_instance_config, + min_number_of_instances=7, + number_of_instances=7, + ) + + error = exc_info.value + assert error.subnet_ids == [subnet1_id, subnet2_id] # type: ignore + assert error.instance_type == fake_ec2_instance_type.name # type: ignore + assert error.available_ips == 6 # type: ignore + + +@pytest.mark.xfail( + reason="if the user asks for a minimum number of instances that cannot fit a subnet, then it currently raises! " + "it is currently not required that the instances are distributed among subnets" +) +async def test_launch_instances_distributes_instances_among_subnets( + simcore_ec2_api: SimcoreEC2API, + ec2_client: EC2Client, + fake_ec2_instance_type: EC2InstanceType, + faker: Faker, + create_aws_subnet_id: Callable[..., Awaitable[str]], + aws_security_group_id: str, + aws_ami_id: str, + mocker: MockerFixture, +) -> None: + """Test that EC2SubnetsNotEnoughIPsError is raised when subnets don't have enough IPs.""" + await _assert_no_instances_in_ec2(ec2_client) + + # Create additional small subnets + subnet1_id = await create_aws_subnet_id("10.0.200.0/29") # 3 usable IPs + subnet2_id = await create_aws_subnet_id("10.0.201.0/29") # 3 usable IPs + + ec2_instance_config = EC2InstanceConfig( + type=fake_ec2_instance_type, + tags=faker.pydict(allowed_types=(str,)), + startup_script=faker.pystr(), + ami_id=aws_ami_id, + key_name=faker.pystr(), + security_group_ids=[aws_security_group_id], + subnet_ids=[subnet1_id, subnet2_id], + iam_instance_profile="", + ) + + await simcore_ec2_api.launch_instances( + ec2_instance_config, + min_number_of_instances=5, + number_of_instances=5, + ) + + await _assert_instances_in_ec2( + ec2_client, + expected_num_reservations=1, + expected_num_instances=5, + expected_instance_type=ec2_instance_config.type, + expected_tags=ec2_instance_config.tags, + expected_state="running", + ) diff --git a/packages/celery-library/Makefile b/packages/celery-library/Makefile new file mode 100644 index 000000000000..ccce149f8530 --- /dev/null +++ b/packages/celery-library/Makefile @@ -0,0 +1,52 @@ +# +# Targets for DEVELOPMENT of Celery Library +# +include ../../scripts/common.Makefile +include ../../scripts/common-package.Makefile + +.PHONY: requirements +requirements: ## compiles pip requirements (.in -> .txt) + @$(MAKE_C) requirements reqs + + +.PHONY: install-dev install-prod install-ci +install-dev install-prod install-ci: _check_venv_active ## install app in development/production or CI mode + # installing in $(subst install-,,$@) mode + @uv pip sync requirements/$(subst install-,,$@).txt + + +.PHONY: tests tests-ci +tests: ## runs unit tests + # running unit tests + @pytest \ + --asyncio-mode=auto \ + --color=yes \ + --cov-config=../../.coveragerc \ + --cov-report=term-missing \ + --cov=celery_library \ + --durations=10 \ + --exitfirst \ + --failed-first \ + --keep-docker-up \ + --pdb \ + -vv \ + $(CURDIR)/tests + +tests-ci: ## runs unit tests + # running unit tests + @pytest \ + --asyncio-mode=auto \ + --color=yes \ + --cov-append \ + --cov-config=../../.coveragerc \ + --cov-report=term-missing \ + --cov-report=xml \ + --junitxml=junit.xml -o junit_family=legacy \ + --keep-docker-up \ + --cov=celery_library \ + --durations=10 \ + --log-date-format="%Y-%m-%d %H:%M:%S" \ + --log-format="%(asctime)s %(levelname)s %(message)s" \ + --verbose \ + -m "not heavy_load" \ + $(CURDIR)/tests diff --git a/packages/celery-library/README.md b/packages/celery-library/README.md new file mode 100644 index 000000000000..b64223cfcc6f --- /dev/null +++ b/packages/celery-library/README.md @@ -0,0 +1,20 @@ +# simcore Celery library + +Provides a wrapper around Celery library [1]. + +## Installation + +```console +make help +make install-dev +``` + +## Test + +```console +make help +make test-dev +``` + + +[1] https://github.com/celery/celery diff --git a/packages/celery-library/VERSION b/packages/celery-library/VERSION new file mode 100644 index 000000000000..6e8bf73aa550 --- /dev/null +++ b/packages/celery-library/VERSION @@ -0,0 +1 @@ +0.1.0 diff --git a/packages/celery-library/requirements/Makefile b/packages/celery-library/requirements/Makefile new file mode 100644 index 000000000000..3f25442b790e --- /dev/null +++ b/packages/celery-library/requirements/Makefile @@ -0,0 +1,6 @@ +# +# Targets to pip-compile requirements +# +include ../../../requirements/base.Makefile + +# Add here any extra explicit dependency: e.g. _migration.txt: _base.txt diff --git a/packages/celery-library/requirements/_base.in b/packages/celery-library/requirements/_base.in new file mode 100644 index 000000000000..3fcf6a9e24c4 --- /dev/null +++ b/packages/celery-library/requirements/_base.in @@ -0,0 +1,10 @@ +# +# Specifies third-party dependencies for 'celery-library' +# +--constraint ../../../requirements/constraints.txt +--requirement ../../../packages/common-library/requirements/_base.in +--requirement ../../../packages/models-library/requirements/_base.in +--requirement ../../../packages/service-library/requirements/_base.in +--requirement ../../../packages/settings-library/requirements/_base.in + +celery[redis] diff --git a/packages/celery-library/requirements/_base.txt b/packages/celery-library/requirements/_base.txt new file mode 100644 index 000000000000..9837fc16e80e --- /dev/null +++ b/packages/celery-library/requirements/_base.txt @@ -0,0 +1,433 @@ +aio-pika==9.5.5 + # via -r requirements/../../../packages/service-library/requirements/_base.in +aiocache==0.12.3 + # via -r requirements/../../../packages/service-library/requirements/_base.in +aiodebug==2.3.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +aiodocker==0.24.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +aiofiles==24.1.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +aiohappyeyeballs==2.6.1 + # via aiohttp +aiohttp==3.11.18 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # aiodocker +aiormq==6.8.1 + # via aio-pika +aiosignal==1.3.2 + # via aiohttp +amqp==5.3.1 + # via kombu +annotated-types==0.7.0 + # via pydantic +anyio==4.9.0 + # via + # fast-depends + # faststream +arrow==1.3.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in +attrs==25.3.0 + # via + # aiohttp + # jsonschema + # referencing +billiard==4.2.1 + # via celery +celery==5.5.2 + # via -r requirements/_base.in +certifi==2025.4.26 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # requests +charset-normalizer==3.4.2 + # via requests +click==8.2.1 + # via + # celery + # click-didyoumean + # click-plugins + # click-repl + # typer +click-didyoumean==0.3.1 + # via celery +click-plugins==1.1.1 + # via celery +click-repl==0.3.0 + # via celery +dnspython==2.7.0 + # via email-validator +email-validator==2.2.0 + # via pydantic +exceptiongroup==1.3.0 + # via aio-pika +fast-depends==2.4.12 + # via faststream +faststream==0.5.41 + # via -r requirements/../../../packages/service-library/requirements/_base.in +frozenlist==1.6.0 + # via + # aiohttp + # aiosignal +googleapis-common-protos==1.70.0 + # via + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http +grpcio==1.71.0 + # via opentelemetry-exporter-otlp-proto-grpc +idna==3.10 + # via + # anyio + # email-validator + # requests + # yarl +importlib-metadata==8.6.1 + # via opentelemetry-api +jsonref==1.1.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +jsonschema==4.23.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +jsonschema-specifications==2025.4.1 + # via jsonschema +kombu==5.5.3 + # via celery +markdown-it-py==3.0.0 + # via rich +mdurl==0.1.2 + # via markdown-it-py +multidict==6.4.3 + # via + # aiohttp + # yarl +opentelemetry-api==1.34.1 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http + # opentelemetry-instrumentation + # opentelemetry-instrumentation-aio-pika + # opentelemetry-instrumentation-asyncpg + # opentelemetry-instrumentation-logging + # opentelemetry-instrumentation-redis + # opentelemetry-instrumentation-requests + # opentelemetry-sdk + # opentelemetry-semantic-conventions +opentelemetry-exporter-otlp==1.34.1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-exporter-otlp-proto-common==1.34.1 + # via + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http +opentelemetry-exporter-otlp-proto-grpc==1.34.1 + # via opentelemetry-exporter-otlp +opentelemetry-exporter-otlp-proto-http==1.34.1 + # via opentelemetry-exporter-otlp +opentelemetry-instrumentation==0.55b1 + # via + # opentelemetry-instrumentation-aio-pika + # opentelemetry-instrumentation-asyncpg + # opentelemetry-instrumentation-logging + # opentelemetry-instrumentation-redis + # opentelemetry-instrumentation-requests +opentelemetry-instrumentation-aio-pika==0.55b1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-asyncpg==0.55b1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-logging==0.55b1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-redis==0.55b1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-requests==0.55b1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-proto==1.34.1 + # via + # opentelemetry-exporter-otlp-proto-common + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http +opentelemetry-sdk==1.34.1 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http +opentelemetry-semantic-conventions==0.55b1 + # via + # opentelemetry-instrumentation + # opentelemetry-instrumentation-asyncpg + # opentelemetry-instrumentation-redis + # opentelemetry-instrumentation-requests + # opentelemetry-sdk +opentelemetry-util-http==0.55b1 + # via opentelemetry-instrumentation-requests +orjson==3.10.18 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in +packaging==25.0 + # via opentelemetry-instrumentation +pamqp==3.3.0 + # via aiormq +prompt-toolkit==3.0.51 + # via click-repl +propcache==0.3.1 + # via + # aiohttp + # yarl +protobuf==5.29.5 + # via + # googleapis-common-protos + # opentelemetry-proto +psutil==7.0.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +pycryptodome==3.23.0 + # via stream-zip +pydantic==2.11.7 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in + # fast-depends + # pydantic-extra-types + # pydantic-settings +pydantic-core==2.33.2 + # via pydantic +pydantic-extra-types==2.10.5 + # via + # -r requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in +pydantic-settings==2.7.0 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in +pygments==2.19.1 + # via rich +pyinstrument==5.0.1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +pyjwt==2.9.0 + # via redis +python-dateutil==2.9.0.post0 + # via + # arrow + # celery +python-dotenv==1.1.0 + # via pydantic-settings +pyyaml==6.0.2 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/service-library/requirements/_base.in +redis==5.3.0 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/service-library/requirements/_base.in + # celery +referencing==0.35.1 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # jsonschema + # jsonschema-specifications +requests==2.32.4 + # via opentelemetry-exporter-otlp-proto-http +rich==14.1.0 + # via + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in + # typer +rpds-py==0.25.0 + # via + # jsonschema + # referencing +shellingham==1.5.4 + # via typer +six==1.17.0 + # via python-dateutil +sniffio==1.3.1 + # via anyio +stream-zip==0.0.83 + # via -r requirements/../../../packages/service-library/requirements/_base.in +tenacity==9.1.2 + # via -r requirements/../../../packages/service-library/requirements/_base.in +toolz==1.0.0 + # via -r requirements/../../../packages/service-library/requirements/_base.in +tqdm==4.67.1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +typer==0.16.1 + # via + # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/settings-library/requirements/_base.in +types-python-dateutil==2.9.0.20250516 + # via arrow +typing-extensions==4.14.1 + # via + # aiodebug + # anyio + # exceptiongroup + # faststream + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http + # opentelemetry-sdk + # opentelemetry-semantic-conventions + # pydantic + # pydantic-core + # pydantic-extra-types + # typer + # typing-inspection +typing-inspection==0.4.0 + # via pydantic +tzdata==2025.2 + # via kombu +urllib3==2.5.0 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # requests +vine==5.1.0 + # via + # amqp + # celery + # kombu +wcwidth==0.2.13 + # via prompt-toolkit +wrapt==1.17.2 + # via + # opentelemetry-instrumentation + # opentelemetry-instrumentation-aio-pika + # opentelemetry-instrumentation-redis +yarl==1.20.0 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # aio-pika + # aiohttp + # aiormq +zipp==3.21.0 + # via importlib-metadata diff --git a/packages/celery-library/requirements/_test.in b/packages/celery-library/requirements/_test.in new file mode 100644 index 000000000000..e6d3bd92107c --- /dev/null +++ b/packages/celery-library/requirements/_test.in @@ -0,0 +1,28 @@ +# +# Specifies dependencies required to run 'celery-library' +# +--constraint ../../../requirements/constraints.txt + +# Adds base AS CONSTRAINT specs, not requirement. +# - Resulting _text.txt is a frozen list of EXTRA packages for testing, besides _base.txt +# +--constraint _base.txt + +# testing +coverage +faker +fakeredis[lua] +httpx +pint +pytest +pytest-asyncio +pytest-benchmark +pytest-celery +pytest-cov +pytest-icdiff +pytest-instafail +pytest-mock +pytest-runner +pytest-sugar +python-dotenv +pyyaml diff --git a/packages/celery-library/requirements/_test.txt b/packages/celery-library/requirements/_test.txt new file mode 100644 index 000000000000..eaca25adec0f --- /dev/null +++ b/packages/celery-library/requirements/_test.txt @@ -0,0 +1,221 @@ +amqp==5.3.1 + # via + # -c requirements/_base.txt + # kombu +anyio==4.9.0 + # via + # -c requirements/_base.txt + # httpx +billiard==4.2.1 + # via + # -c requirements/_base.txt + # celery +celery==5.5.2 + # via + # -c requirements/_base.txt + # pytest-celery +certifi==2025.4.26 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # httpcore + # httpx + # requests +charset-normalizer==3.4.2 + # via + # -c requirements/_base.txt + # requests +click==8.2.1 + # via + # -c requirements/_base.txt + # celery + # click-didyoumean + # click-plugins + # click-repl +click-didyoumean==0.3.1 + # via + # -c requirements/_base.txt + # celery +click-plugins==1.1.1 + # via + # -c requirements/_base.txt + # celery +click-repl==0.3.0 + # via + # -c requirements/_base.txt + # celery +coverage==7.8.0 + # via + # -r requirements/_test.in + # pytest-cov +debugpy==1.8.14 + # via pytest-celery +docker==7.1.0 + # via + # pytest-celery + # pytest-docker-tools +faker==37.3.0 + # via -r requirements/_test.in +fakeredis==2.30.3 + # via -r requirements/_test.in +flexcache==0.3 + # via pint +flexparser==0.4 + # via pint +h11==0.16.0 + # via httpcore +httpcore==1.0.9 + # via httpx +httpx==0.28.1 + # via + # -c requirements/../../../requirements/constraints.txt + # -r requirements/_test.in +icdiff==2.0.7 + # via pytest-icdiff +idna==3.10 + # via + # -c requirements/_base.txt + # anyio + # httpx + # requests +iniconfig==2.1.0 + # via pytest +kombu==5.5.3 + # via + # -c requirements/_base.txt + # celery + # pytest-celery +lupa==2.5 + # via fakeredis +packaging==25.0 + # via + # -c requirements/_base.txt + # pytest + # pytest-sugar +pint==0.24.4 + # via -r requirements/_test.in +platformdirs==4.3.8 + # via pint +pluggy==1.6.0 + # via + # pytest + # pytest-cov +pprintpp==0.4.0 + # via pytest-icdiff +prompt-toolkit==3.0.51 + # via + # -c requirements/_base.txt + # click-repl +psutil==7.0.0 + # via + # -c requirements/_base.txt + # pytest-celery +py-cpuinfo==9.0.0 + # via pytest-benchmark +pygments==2.19.1 + # via + # -c requirements/_base.txt + # pytest +pyjwt==2.9.0 + # via + # -c requirements/_base.txt + # redis +pytest==8.4.1 + # via + # -r requirements/_test.in + # pytest-asyncio + # pytest-benchmark + # pytest-cov + # pytest-docker-tools + # pytest-icdiff + # pytest-instafail + # pytest-mock + # pytest-sugar +pytest-asyncio==1.0.0 + # via -r requirements/_test.in +pytest-benchmark==5.1.0 + # via -r requirements/_test.in +pytest-celery==1.2.0 + # via -r requirements/_test.in +pytest-cov==6.2.1 + # via -r requirements/_test.in +pytest-docker-tools==3.1.9 + # via pytest-celery +pytest-icdiff==0.9 + # via -r requirements/_test.in +pytest-instafail==0.5.0 + # via -r requirements/_test.in +pytest-mock==3.14.1 + # via -r requirements/_test.in +pytest-runner==6.0.1 + # via -r requirements/_test.in +pytest-sugar==1.0.0 + # via -r requirements/_test.in +python-dateutil==2.9.0.post0 + # via + # -c requirements/_base.txt + # celery +python-dotenv==1.1.0 + # via + # -c requirements/_base.txt + # -r requirements/_test.in +pyyaml==6.0.2 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # -r requirements/_test.in +redis==5.3.0 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # fakeredis +requests==2.32.4 + # via + # -c requirements/_base.txt + # docker +setuptools==80.9.0 + # via pytest-celery +six==1.17.0 + # via + # -c requirements/_base.txt + # python-dateutil +sniffio==1.3.1 + # via + # -c requirements/_base.txt + # anyio +sortedcontainers==2.4.0 + # via fakeredis +tenacity==9.1.2 + # via + # -c requirements/_base.txt + # pytest-celery +termcolor==3.1.0 + # via pytest-sugar +typing-extensions==4.14.1 + # via + # -c requirements/_base.txt + # anyio + # flexcache + # flexparser + # pint +tzdata==2025.2 + # via + # -c requirements/_base.txt + # faker + # kombu +urllib3==2.5.0 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # docker + # requests +vine==5.1.0 + # via + # -c requirements/_base.txt + # amqp + # celery + # kombu +wcwidth==0.2.13 + # via + # -c requirements/_base.txt + # prompt-toolkit diff --git a/packages/celery-library/requirements/_tools.in b/packages/celery-library/requirements/_tools.in new file mode 100644 index 000000000000..1def82c12a30 --- /dev/null +++ b/packages/celery-library/requirements/_tools.in @@ -0,0 +1,5 @@ +--constraint ../../../requirements/constraints.txt +--constraint _base.txt +--constraint _test.txt + +--requirement ../../../requirements/devenv.txt diff --git a/packages/celery-library/requirements/_tools.txt b/packages/celery-library/requirements/_tools.txt new file mode 100644 index 000000000000..24a39a78e9b6 --- /dev/null +++ b/packages/celery-library/requirements/_tools.txt @@ -0,0 +1,89 @@ +astroid==3.3.10 + # via pylint +black==25.1.0 + # via -r requirements/../../../requirements/devenv.txt +build==1.2.2.post1 + # via pip-tools +bump2version==1.0.1 + # via -r requirements/../../../requirements/devenv.txt +cfgv==3.4.0 + # via pre-commit +click==8.2.1 + # via + # -c requirements/_base.txt + # -c requirements/_test.txt + # black + # pip-tools +dill==0.4.0 + # via pylint +distlib==0.3.9 + # via virtualenv +filelock==3.18.0 + # via virtualenv +identify==2.6.10 + # via pre-commit +isort==6.0.1 + # via + # -r requirements/../../../requirements/devenv.txt + # pylint +mccabe==0.7.0 + # via pylint +mypy==1.16.1 + # via -r requirements/../../../requirements/devenv.txt +mypy-extensions==1.1.0 + # via + # black + # mypy +nodeenv==1.9.1 + # via pre-commit +packaging==25.0 + # via + # -c requirements/_base.txt + # -c requirements/_test.txt + # black + # build +pathspec==0.12.1 + # via + # black + # mypy +pip==25.1.1 + # via pip-tools +pip-tools==7.4.1 + # via -r requirements/../../../requirements/devenv.txt +platformdirs==4.3.8 + # via + # -c requirements/_test.txt + # black + # pylint + # virtualenv +pre-commit==4.2.0 + # via -r requirements/../../../requirements/devenv.txt +pylint==3.3.7 + # via -r requirements/../../../requirements/devenv.txt +pyproject-hooks==1.2.0 + # via + # build + # pip-tools +pyyaml==6.0.2 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # -c requirements/_test.txt + # pre-commit +ruff==0.11.10 + # via -r requirements/../../../requirements/devenv.txt +setuptools==80.9.0 + # via + # -c requirements/_test.txt + # pip-tools +tomlkit==0.13.2 + # via pylint +typing-extensions==4.14.1 + # via + # -c requirements/_base.txt + # -c requirements/_test.txt + # mypy +virtualenv==20.31.2 + # via pre-commit +wheel==0.45.1 + # via pip-tools diff --git a/packages/celery-library/requirements/ci.txt b/packages/celery-library/requirements/ci.txt new file mode 100644 index 000000000000..ccbecf3196f8 --- /dev/null +++ b/packages/celery-library/requirements/ci.txt @@ -0,0 +1,22 @@ +# Shortcut to install all packages for the contigous integration (CI) of 'celery-library' +# +# - As ci.txt but w/ tests +# +# Usage: +# pip install -r requirements/ci.txt +# + +# installs base + tests requirements +--requirement _base.txt +--requirement _test.txt +--requirement _tools.txt + +# installs this repo's packages +simcore-common-library @ ../common-library +simcore-models-library @ ../models-library/ +pytest-simcore @ ../pytest-simcore +simcore-service-library @ ../service-library/ +simcore-settings-library @ ../settings-library/ + +# current module +simcore-celery-library @ . diff --git a/packages/celery-library/requirements/dev.txt b/packages/celery-library/requirements/dev.txt new file mode 100644 index 000000000000..115e5cfb20d2 --- /dev/null +++ b/packages/celery-library/requirements/dev.txt @@ -0,0 +1,22 @@ +# Shortcut to install all packages needed to develop 'celery-library' +# +# - As ci.txt but with current and repo packages in develop (edit) mode +# +# Usage: +# pip install -r requirements/dev.txt +# + +# installs base + tests requirements +--requirement _base.txt +--requirement _test.txt +--requirement _tools.txt + +# installs this repo's packages +--editable ../common-library/ +--editable ../models-library/ +--editable ../pytest-simcore/ +--editable ../service-library/ +--editable ../settings-library/ + +# current module +--editable . diff --git a/packages/celery-library/setup.cfg b/packages/celery-library/setup.cfg new file mode 100644 index 000000000000..21600ef13506 --- /dev/null +++ b/packages/celery-library/setup.cfg @@ -0,0 +1,22 @@ +[bumpversion] +current_version = 0.1.0 +commit = True +message = packages/celery-library version: {current_version} → {new_version} +tag = False +commit_args = --no-verify + +[bumpversion:file:VERSION] + +[bdist_wheel] +universal = 1 + +[aliases] +test = pytest + +[tool:pytest] +asyncio_mode = auto +asyncio_default_fixture_loop_scope = function + +[mypy] +plugins = + pydantic.mypy diff --git a/packages/celery-library/setup.py b/packages/celery-library/setup.py new file mode 100644 index 000000000000..226e43f0bf80 --- /dev/null +++ b/packages/celery-library/setup.py @@ -0,0 +1,59 @@ +import re +import sys +from pathlib import Path + +from setuptools import find_packages, setup + + +def read_reqs(reqs_path: Path) -> set[str]: + return { + r + for r in re.findall( + r"(^[^#\n-][\w\[,\]]+[-~>=<.\w]*)", + reqs_path.read_text(), + re.MULTILINE, + ) + if isinstance(r, str) + } + + +CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent + +INSTALL_REQUIREMENTS = tuple( + read_reqs(CURRENT_DIR / "requirements" / "_base.in") +) # WEAK requirements + +TEST_REQUIREMENTS = tuple( + read_reqs(CURRENT_DIR / "requirements" / "_test.txt") +) # STRICT requirements + + +SETUP = { + "name": "simcore-celery-library", + "version": Path(CURRENT_DIR / "VERSION").read_text().strip(), + "author": "Giancarlo Romeo (giancarloromeo)", + "description": "Core service library for Celery", + "python_requires": "~=3.11", + "classifiers": [ + "Development Status :: 2 - Pre-Alpha", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Programming Language :: Python :: 3.10", + ], + "long_description": Path(CURRENT_DIR / "README.md").read_text(), + "license": "MIT license", + "install_requires": INSTALL_REQUIREMENTS, + "packages": find_packages(where="src"), + "package_data": {"": ["py.typed"]}, + "package_dir": {"": "src"}, + "include_package_data": True, + "test_suite": "tests", + "tests_require": TEST_REQUIREMENTS, + "extras_require": {"test": TEST_REQUIREMENTS}, + "zip_safe": False, +} + + +if __name__ == "__main__": + setup(**SETUP) diff --git a/services/storage/src/simcore_service_storage/modules/celery/backends/__init__.py b/packages/celery-library/src/celery_library/__init__.py similarity index 100% rename from services/storage/src/simcore_service_storage/modules/celery/backends/__init__.py rename to packages/celery-library/src/celery_library/__init__.py diff --git a/services/web/server/src/simcore_service_webserver/users/_common/__init__.py b/packages/celery-library/src/celery_library/backends/__init__.py similarity index 100% rename from services/web/server/src/simcore_service_webserver/users/_common/__init__.py rename to packages/celery-library/src/celery_library/backends/__init__.py diff --git a/services/storage/src/simcore_service_storage/modules/celery/backends/_redis.py b/packages/celery-library/src/celery_library/backends/redis.py similarity index 58% rename from services/storage/src/simcore_service_storage/modules/celery/backends/_redis.py rename to packages/celery-library/src/celery_library/backends/redis.py index 3fd9984fb2ab..cc19becbbcf5 100644 --- a/services/storage/src/simcore_service_storage/modules/celery/backends/_redis.py +++ b/packages/celery-library/src/celery_library/backends/redis.py @@ -1,25 +1,23 @@ import contextlib import logging from datetime import timedelta -from typing import Final +from typing import TYPE_CHECKING, Final from models_library.progress_bar import ProgressReport from pydantic import ValidationError -from servicelib.redis._client import RedisClientSDK - -from ..models import ( +from servicelib.celery.models import ( + WILDCARD, + ExecutionMetadata, + OwnerMetadata, Task, - TaskContext, TaskID, - TaskMetadata, - TaskUUID, - build_task_id_prefix, + TaskInfoStore, ) +from servicelib.redis import RedisClientSDK, handle_redis_returns_union_types _CELERY_TASK_INFO_PREFIX: Final[str] = "celery-task-info-" _CELERY_TASK_ID_KEY_ENCODING = "utf-8" -_CELERY_TASK_ID_KEY_SEPARATOR: Final[str] = ":" -_CELERY_TASK_SCAN_COUNT_PER_BATCH: Final[int] = 10000 +_CELERY_TASK_SCAN_COUNT_PER_BATCH: Final[int] = 1000 _CELERY_TASK_METADATA_KEY: Final[str] = "metadata" _CELERY_TASK_PROGRESS_KEY: Final[str] = "progress" @@ -37,32 +35,33 @@ def __init__(self, redis_client_sdk: RedisClientSDK) -> None: async def create_task( self, task_id: TaskID, - task_metadata: TaskMetadata, + execution_metadata: ExecutionMetadata, expiry: timedelta, ) -> None: task_key = _build_key(task_id) - await self._redis_client_sdk.redis.hset( - name=task_key, - key=_CELERY_TASK_METADATA_KEY, - value=task_metadata.model_dump_json(), - ) # type: ignore + await handle_redis_returns_union_types( + self._redis_client_sdk.redis.hset( + name=task_key, + key=_CELERY_TASK_METADATA_KEY, + value=execution_metadata.model_dump_json(), + ) + ) await self._redis_client_sdk.redis.expire( task_key, expiry, ) - async def exists_task(self, task_id: TaskID) -> bool: - n = await self._redis_client_sdk.redis.exists(_build_key(task_id)) - assert isinstance(n, int) # nosec - return n > 0 - - async def get_task_metadata(self, task_id: TaskID) -> TaskMetadata | None: - raw_result = await self._redis_client_sdk.redis.hget(_build_key(task_id), _CELERY_TASK_METADATA_KEY) # type: ignore + async def get_task_metadata(self, task_id: TaskID) -> ExecutionMetadata | None: + raw_result = await handle_redis_returns_union_types( + self._redis_client_sdk.redis.hget( + _build_key(task_id), _CELERY_TASK_METADATA_KEY + ) + ) if not raw_result: return None try: - return TaskMetadata.model_validate_json(raw_result) + return ExecutionMetadata.model_validate_json(raw_result) except ValidationError as exc: _logger.debug( "Failed to deserialize task metadata for task %s: %s", task_id, f"{exc}" @@ -70,7 +69,11 @@ async def get_task_metadata(self, task_id: TaskID) -> TaskMetadata | None: return None async def get_task_progress(self, task_id: TaskID) -> ProgressReport | None: - raw_result = await self._redis_client_sdk.redis.hget(_build_key(task_id), _CELERY_TASK_PROGRESS_KEY) # type: ignore + raw_result = await handle_redis_returns_union_types( + self._redis_client_sdk.redis.hget( + _build_key(task_id), _CELERY_TASK_PROGRESS_KEY + ) + ) if not raw_result: return None @@ -82,18 +85,15 @@ async def get_task_progress(self, task_id: TaskID) -> ProgressReport | None: ) return None - async def list_tasks(self, task_context: TaskContext) -> list[Task]: - search_key = ( - _CELERY_TASK_INFO_PREFIX - + build_task_id_prefix(task_context) - + _CELERY_TASK_ID_KEY_SEPARATOR + async def list_tasks(self, owner_metadata: OwnerMetadata) -> list[Task]: + search_key = _CELERY_TASK_INFO_PREFIX + owner_metadata.model_dump_task_id( + task_uuid=WILDCARD ) - search_key_len = len(search_key) keys: list[str] = [] pipeline = self._redis_client_sdk.redis.pipeline() async for key in self._redis_client_sdk.redis.scan_iter( - match=search_key + "*", count=_CELERY_TASK_SCAN_COUNT_PER_BATCH + match=search_key, count=_CELERY_TASK_SCAN_COUNT_PER_BATCH ): # fake redis (tests) returns bytes, real redis returns str _key = ( @@ -112,11 +112,11 @@ async def list_tasks(self, task_context: TaskContext) -> list[Task]: continue with contextlib.suppress(ValidationError): - task_metadata = TaskMetadata.model_validate_json(raw_metadata) + execution_metadata = ExecutionMetadata.model_validate_json(raw_metadata) tasks.append( Task( - uuid=TaskUUID(key[search_key_len:]), - metadata=task_metadata, + uuid=OwnerMetadata.get_task_uuid(key), + metadata=execution_metadata, ) ) @@ -126,8 +126,19 @@ async def remove_task(self, task_id: TaskID) -> None: await self._redis_client_sdk.redis.delete(_build_key(task_id)) async def set_task_progress(self, task_id: TaskID, report: ProgressReport) -> None: - await self._redis_client_sdk.redis.hset( - name=_build_key(task_id), - key=_CELERY_TASK_PROGRESS_KEY, - value=report.model_dump_json(), - ) # type: ignore + await handle_redis_returns_union_types( + self._redis_client_sdk.redis.hset( + name=_build_key(task_id), + key=_CELERY_TASK_PROGRESS_KEY, + value=report.model_dump_json(), + ) + ) + + async def task_exists(self, task_id: TaskID) -> bool: + n = await self._redis_client_sdk.redis.exists(_build_key(task_id)) + assert isinstance(n, int) # nosec + return n > 0 + + +if TYPE_CHECKING: + _: type[TaskInfoStore] = RedisTaskInfoStore diff --git a/services/storage/src/simcore_service_storage/modules/celery/_common.py b/packages/celery-library/src/celery_library/common.py similarity index 73% rename from services/storage/src/simcore_service_storage/modules/celery/_common.py rename to packages/celery-library/src/celery_library/common.py index 545bb98f682e..ef45ef4c8b92 100644 --- a/services/storage/src/simcore_service_storage/modules/celery/_common.py +++ b/packages/celery-library/src/celery_library/common.py @@ -1,4 +1,3 @@ -import logging import ssl from typing import Any @@ -6,8 +5,6 @@ from settings_library.celery import CelerySettings from settings_library.redis import RedisDatabase -_logger = logging.getLogger(__name__) - def _celery_configure(celery_settings: CelerySettings) -> dict[str, Any]: base_config = { @@ -25,13 +22,13 @@ def _celery_configure(celery_settings: CelerySettings) -> dict[str, Any]: return base_config -def create_app(celery_settings: CelerySettings) -> Celery: - assert celery_settings +def create_app(settings: CelerySettings) -> Celery: + assert settings return Celery( - broker=celery_settings.CELERY_RABBIT_BROKER.dsn, - backend=celery_settings.CELERY_REDIS_RESULT_BACKEND.build_redis_dsn( + broker=settings.CELERY_RABBIT_BROKER.dsn, + backend=settings.CELERY_REDIS_RESULT_BACKEND.build_redis_dsn( RedisDatabase.CELERY_TASKS, ), - **_celery_configure(celery_settings), + **_celery_configure(settings), ) diff --git a/services/storage/src/simcore_service_storage/modules/celery/errors.py b/packages/celery-library/src/celery_library/errors.py similarity index 69% rename from services/storage/src/simcore_service_storage/modules/celery/errors.py rename to packages/celery-library/src/celery_library/errors.py index 0e340f35e714..4575bbdeaa22 100644 --- a/services/storage/src/simcore_service_storage/modules/celery/errors.py +++ b/packages/celery-library/src/celery_library/errors.py @@ -1,6 +1,8 @@ import base64 import pickle +from common_library.errors_classes import OsparcErrorMixin + class TransferrableCeleryError(Exception): def __repr__(self) -> str: @@ -11,7 +13,7 @@ def __str__(self) -> str: return f"{decode_celery_transferrable_error(self)}" -def encore_celery_transferrable_error(error: Exception) -> TransferrableCeleryError: +def encode_celery_transferrable_error(error: Exception) -> TransferrableCeleryError: # NOTE: Celery modifies exceptions during serialization, which can cause # the original error context to be lost. This mechanism ensures the same # error can be recreated on the caller side exactly as it was raised here. @@ -22,3 +24,13 @@ def decode_celery_transferrable_error(error: TransferrableCeleryError) -> Except assert isinstance(error, TransferrableCeleryError) # nosec result: Exception = pickle.loads(base64.b64decode(error.args[0])) # noqa: S301 return result + + +class TaskSubmissionError(OsparcErrorMixin, Exception): + msg_template = ( + "Unable to submit task {task_name} with id '{task_id}' and params {task_params}" + ) + + +class TaskNotFoundError(OsparcErrorMixin, Exception): + msg_template = "Task with id '{task_id}' was not found" diff --git a/packages/celery-library/src/celery_library/py.typed b/packages/celery-library/src/celery_library/py.typed new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/celery-library/src/celery_library/rpc/__init__.py b/packages/celery-library/src/celery_library/rpc/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py b/packages/celery-library/src/celery_library/rpc/_async_jobs.py similarity index 62% rename from services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py rename to packages/celery-library/src/celery_library/rpc/_async_jobs.py index 3186237eb7e8..9af35a588d2c 100644 --- a/services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py +++ b/packages/celery-library/src/celery_library/rpc/_async_jobs.py @@ -3,59 +3,65 @@ import logging from celery.exceptions import CeleryError # type: ignore[import-untyped] -from fastapi import FastAPI from models_library.api_schemas_rpc_async_jobs.async_jobs import ( AsyncJobGet, AsyncJobId, - AsyncJobNameData, AsyncJobResult, AsyncJobStatus, ) from models_library.api_schemas_rpc_async_jobs.exceptions import ( JobAbortedError, JobError, + JobMissingError, JobNotDoneError, JobSchedulerError, ) +from servicelib.celery.models import OwnerMetadata, TaskState +from servicelib.celery.task_manager import TaskManager from servicelib.logging_utils import log_catch from servicelib.rabbitmq import RPCRouter -from ...modules.celery import get_celery_client -from ...modules.celery.errors import ( +from ..errors import ( + TaskNotFoundError, TransferrableCeleryError, decode_celery_transferrable_error, ) -from ...modules.celery.models import TaskState _logger = logging.getLogger(__name__) router = RPCRouter() -@router.expose(reraise_if_error_type=(JobSchedulerError,)) -async def cancel(app: FastAPI, job_id: AsyncJobId, job_id_data: AsyncJobNameData): - assert app # nosec - assert job_id_data # nosec +@router.expose(reraise_if_error_type=(JobSchedulerError, JobMissingError)) +async def cancel( + task_manager: TaskManager, job_id: AsyncJobId, owner_metadata: OwnerMetadata +): + assert task_manager # nosec + assert owner_metadata # nosec try: - await get_celery_client(app).cancel_task( - task_context=job_id_data.model_dump(), + await task_manager.cancel_task( + owner_metadata=owner_metadata, task_uuid=job_id, ) + except TaskNotFoundError as exc: + raise JobMissingError(job_id=job_id) from exc except CeleryError as exc: raise JobSchedulerError(exc=f"{exc}") from exc -@router.expose(reraise_if_error_type=(JobSchedulerError,)) +@router.expose(reraise_if_error_type=(JobSchedulerError, JobMissingError)) async def status( - app: FastAPI, job_id: AsyncJobId, job_id_data: AsyncJobNameData + task_manager: TaskManager, job_id: AsyncJobId, owner_metadata: OwnerMetadata ) -> AsyncJobStatus: - assert app # nosec - assert job_id_data # nosec + assert task_manager # nosec + assert owner_metadata # nosec try: - task_status = await get_celery_client(app).get_task_status( - task_context=job_id_data.model_dump(), + task_status = await task_manager.get_task_status( + owner_metadata=owner_metadata, task_uuid=job_id, ) + except TaskNotFoundError as exc: + raise JobMissingError(job_id=job_id) from exc except CeleryError as exc: raise JobSchedulerError(exc=f"{exc}") from exc @@ -68,35 +74,36 @@ async def status( @router.expose( reraise_if_error_type=( + JobAbortedError, JobError, + JobMissingError, JobNotDoneError, - JobAbortedError, JobSchedulerError, ) ) async def result( - app: FastAPI, job_id: AsyncJobId, job_id_data: AsyncJobNameData + task_manager: TaskManager, job_id: AsyncJobId, owner_metadata: OwnerMetadata ) -> AsyncJobResult: - assert app # nosec + assert task_manager # nosec assert job_id # nosec - assert job_id_data # nosec + assert owner_metadata # nosec try: - _status = await get_celery_client(app).get_task_status( - task_context=job_id_data.model_dump(), + _status = await task_manager.get_task_status( + owner_metadata=owner_metadata, task_uuid=job_id, ) if not _status.is_done: raise JobNotDoneError(job_id=job_id) - _result = await get_celery_client(app).get_task_result( - task_context=job_id_data.model_dump(), + _result = await task_manager.get_task_result( + owner_metadata=owner_metadata, task_uuid=job_id, ) + except TaskNotFoundError as exc: + raise JobMissingError(job_id=job_id) from exc except CeleryError as exc: raise JobSchedulerError(exc=f"{exc}") from exc - if _status.task_state == TaskState.ABORTED: - raise JobAbortedError(job_id=job_id) if _status.task_state == TaskState.FAILURE: # fallback exception to report exc_type = type(_result).__name__ @@ -122,13 +129,12 @@ async def result( @router.expose(reraise_if_error_type=(JobSchedulerError,)) async def list_jobs( - app: FastAPI, filter_: str, job_id_data: AsyncJobNameData + task_manager: TaskManager, owner_metadata: OwnerMetadata ) -> list[AsyncJobGet]: - _ = filter_ - assert app # nosec + assert task_manager # nosec try: - tasks = await get_celery_client(app).list_tasks( - task_context=job_id_data.model_dump(), + tasks = await task_manager.list_tasks( + owner_metadata=owner_metadata, ) except CeleryError as exc: raise JobSchedulerError(exc=f"{exc}") from exc diff --git a/packages/celery-library/src/celery_library/signals.py b/packages/celery-library/src/celery_library/signals.py new file mode 100644 index 000000000000..02f1a56f0ec2 --- /dev/null +++ b/packages/celery-library/src/celery_library/signals.py @@ -0,0 +1,52 @@ +import asyncio +import logging +import threading + +from celery import Celery # type: ignore[import-untyped] +from celery.worker.worker import WorkController # type: ignore[import-untyped] +from servicelib.celery.app_server import BaseAppServer +from servicelib.logging_utils import log_context + +from .utils import get_app_server, set_app_server + +_logger = logging.getLogger(__name__) + + +def on_worker_init( + sender: WorkController, + app_server: BaseAppServer, + **_kwargs, +) -> None: + startup_complete_event = threading.Event() + + def _init(startup_complete_event: threading.Event) -> None: + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + + assert sender.app # nosec + assert isinstance(sender.app, Celery) # nosec + + set_app_server(sender.app, app_server) + + app_server.event_loop = loop + + loop.run_until_complete(app_server.run_until_shutdown(startup_complete_event)) + + thread = threading.Thread( + group=None, + target=_init, + name="app_server_init", + args=(startup_complete_event,), + daemon=True, + ) + thread.start() + + startup_complete_event.wait() + + +def on_worker_shutdown(sender, **_kwargs) -> None: + with log_context(_logger, logging.INFO, "Worker shutdown"): + assert isinstance(sender.app, Celery) + app_server = get_app_server(sender.app) + + app_server.shutdown_event.set() diff --git a/services/storage/src/simcore_service_storage/modules/celery/_task.py b/packages/celery-library/src/celery_library/task.py similarity index 71% rename from services/storage/src/simcore_service_storage/modules/celery/_task.py rename to packages/celery-library/src/celery_library/task.py index e367a3a73dae..c3efc7ead141 100644 --- a/services/storage/src/simcore_service_storage/modules/celery/_task.py +++ b/packages/celery-library/src/celery_library/task.py @@ -6,19 +6,14 @@ from functools import wraps from typing import Any, Concatenate, Final, ParamSpec, TypeVar, overload -from celery import Celery # type: ignore[import-untyped] -from celery.contrib.abortable import ( # type: ignore[import-untyped] - AbortableAsyncResult, - AbortableTask, -) +from celery import Celery, Task # type: ignore[import-untyped] from celery.exceptions import Ignore # type: ignore[import-untyped] +from common_library.async_tools import cancel_wait_task from pydantic import NonNegativeInt -from servicelib.async_utils import cancel_wait_task +from servicelib.celery.models import TaskID -from . import get_event_loop -from .errors import encore_celery_transferrable_error -from .models import TaskID, TaskId -from .utils import get_fastapi_app +from .errors import encode_celery_transferrable_error +from .utils import get_app_server _logger = logging.getLogger(__name__) @@ -40,42 +35,42 @@ class TaskAbortedError(Exception): ... def _async_task_wrapper( app: Celery, ) -> Callable[ - [Callable[Concatenate[AbortableTask, TaskId, P], Coroutine[Any, Any, R]]], - Callable[Concatenate[AbortableTask, P], R], + [Callable[Concatenate[Task, P], Coroutine[Any, Any, R]]], + Callable[Concatenate[Task, P], R], ]: def decorator( - coro: Callable[Concatenate[AbortableTask, TaskId, P], Coroutine[Any, Any, R]], - ) -> Callable[Concatenate[AbortableTask, P], R]: + coro: Callable[Concatenate[Task, P], Coroutine[Any, Any, R]], + ) -> Callable[Concatenate[Task, P], R]: @wraps(coro) - def wrapper(task: AbortableTask, *args: P.args, **kwargs: P.kwargs) -> R: - fastapi_app = get_fastapi_app(app) + def wrapper(task: Task, *args: P.args, **kwargs: P.kwargs) -> R: + app_server = get_app_server(app) # NOTE: task.request is a thread local object, so we need to pass the id explicitly assert task.request.id is not None # nosec - async def run_task(task_id: TaskID) -> R: + async def _run_task(task_id: TaskID) -> R: try: async with asyncio.TaskGroup() as tg: - main_task = tg.create_task( - coro(task, task_id, *args, **kwargs), + async_io_task = tg.create_task( + coro(task, *args, **kwargs), ) - async def abort_monitor(): - abortable_result = AbortableAsyncResult(task_id, app=app) - while not main_task.done(): - if abortable_result.is_aborted(): + async def _abort_monitor(): + while not async_io_task.done(): + if not await app_server.task_manager.task_exists( + task_id + ): await cancel_wait_task( - main_task, + async_io_task, max_delay=_DEFAULT_CANCEL_TASK_TIMEOUT.total_seconds(), ) - AbortableAsyncResult(task_id, app=app).forget() raise TaskAbortedError await asyncio.sleep( _DEFAULT_ABORT_TASK_TIMEOUT.total_seconds() ) - tg.create_task(abort_monitor()) + tg.create_task(_abort_monitor()) - return main_task.result() + return async_io_task.result() except BaseExceptionGroup as eg: task_aborted_errors, other_errors = eg.split(TaskAbortedError) @@ -89,8 +84,8 @@ async def abort_monitor(): raise other_errors.exceptions[0] from eg return asyncio.run_coroutine_threadsafe( - run_task(task.request.id), - get_event_loop(fastapi_app), + _run_task(task.request.id), + app_server.event_loop, ).result() return wrapper @@ -103,14 +98,14 @@ def _error_handling( delay_between_retries: timedelta, dont_autoretry_for: tuple[type[Exception], ...], ) -> Callable[ - [Callable[Concatenate[AbortableTask, P], R]], - Callable[Concatenate[AbortableTask, P], R], + [Callable[Concatenate[Task, P], R]], + Callable[Concatenate[Task, P], R], ]: def decorator( - func: Callable[Concatenate[AbortableTask, P], R], - ) -> Callable[Concatenate[AbortableTask, P], R]: + func: Callable[Concatenate[Task, P], R], + ) -> Callable[Concatenate[Task, P], R]: @wraps(func) - def wrapper(task: AbortableTask, *args: P.args, **kwargs: P.kwargs) -> R: + def wrapper(task: Task, *args: P.args, **kwargs: P.kwargs) -> R: try: return func(task, *args, **kwargs) except TaskAbortedError as exc: @@ -120,7 +115,7 @@ def wrapper(task: AbortableTask, *args: P.args, **kwargs: P.kwargs) -> R: if isinstance(exc, dont_autoretry_for): _logger.debug("Not retrying for exception %s", type(exc).__name__) # propagate without retry - raise encore_celery_transferrable_error(exc) from exc + raise encode_celery_transferrable_error(exc) from exc exc_type = type(exc).__name__ exc_message = f"{exc}" @@ -134,7 +129,7 @@ def wrapper(task: AbortableTask, *args: P.args, **kwargs: P.kwargs) -> R: raise task.retry( max_retries=max_retries, countdown=delay_between_retries.total_seconds(), - exc=encore_celery_transferrable_error(exc), + exc=encode_celery_transferrable_error(exc), ) from exc return wrapper @@ -145,7 +140,7 @@ def wrapper(task: AbortableTask, *args: P.args, **kwargs: P.kwargs) -> R: @overload def register_task( app: Celery, - fn: Callable[Concatenate[AbortableTask, TaskId, P], Coroutine[Any, Any, R]], + fn: Callable[Concatenate[Task, TaskID, P], Coroutine[Any, Any, R]], task_name: str | None = None, timeout: timedelta | None = _DEFAULT_TASK_TIMEOUT, max_retries: NonNegativeInt = _DEFAULT_MAX_RETRIES, @@ -157,7 +152,7 @@ def register_task( @overload def register_task( app: Celery, - fn: Callable[Concatenate[AbortableTask, P], R], + fn: Callable[Concatenate[Task, P], R], task_name: str | None = None, timeout: timedelta | None = _DEFAULT_TASK_TIMEOUT, max_retries: NonNegativeInt = _DEFAULT_MAX_RETRIES, @@ -169,8 +164,8 @@ def register_task( def register_task( # type: ignore[misc] app: Celery, fn: ( - Callable[Concatenate[AbortableTask, TaskId, P], Coroutine[Any, Any, R]] - | Callable[Concatenate[AbortableTask, P], R] + Callable[Concatenate[Task, TaskID, P], Coroutine[Any, Any, R]] + | Callable[Concatenate[Task, P], R] ), task_name: str | None = None, timeout: timedelta | None = _DEFAULT_TASK_TIMEOUT, @@ -187,7 +182,7 @@ def register_task( # type: ignore[misc] delay_between_retries -- dealy between each attempt in case of error (default: {_DEFAULT_WAIT_BEFORE_RETRY}) dont_autoretry_for -- exceptions that should not be retried when raised by the task """ - wrapped_fn: Callable[Concatenate[AbortableTask, P], R] + wrapped_fn: Callable[Concatenate[Task, P], R] if asyncio.iscoroutinefunction(fn): wrapped_fn = _async_task_wrapper(app)(fn) else: @@ -203,6 +198,6 @@ def register_task( # type: ignore[misc] app.task( name=task_name or fn.__name__, bind=True, - base=AbortableTask, time_limit=None if timeout is None else timeout.total_seconds(), + pydantic=True, )(wrapped_fn) diff --git a/packages/celery-library/src/celery_library/task_manager.py b/packages/celery-library/src/celery_library/task_manager.py new file mode 100644 index 000000000000..9697fd6e4fb3 --- /dev/null +++ b/packages/celery-library/src/celery_library/task_manager.py @@ -0,0 +1,190 @@ +import logging +from dataclasses import dataclass +from typing import TYPE_CHECKING, Any +from uuid import uuid4 + +from celery import Celery # type: ignore[import-untyped] +from celery.exceptions import CeleryError # type: ignore[import-untyped] +from common_library.async_tools import make_async +from models_library.progress_bar import ProgressReport +from servicelib.celery.models import ( + TASK_DONE_STATES, + ExecutionMetadata, + OwnerMetadata, + Task, + TaskID, + TaskInfoStore, + TaskState, + TaskStatus, + TaskUUID, +) +from servicelib.celery.task_manager import TaskManager +from servicelib.logging_utils import log_context +from settings_library.celery import CelerySettings + +from .errors import TaskNotFoundError, TaskSubmissionError + +_logger = logging.getLogger(__name__) + + +_MIN_PROGRESS_VALUE = 0.0 +_MAX_PROGRESS_VALUE = 1.0 + + +@dataclass(frozen=True) +class CeleryTaskManager: + _celery_app: Celery + _celery_settings: CelerySettings + _task_info_store: TaskInfoStore + + async def submit_task( + self, + execution_metadata: ExecutionMetadata, + *, + owner_metadata: OwnerMetadata, + **task_params, + ) -> TaskUUID: + with log_context( + _logger, + logging.DEBUG, + msg=f"Submit {execution_metadata.name=}: {owner_metadata=} {task_params=}", + ): + task_uuid = uuid4() + task_id = owner_metadata.model_dump_task_id(task_uuid=task_uuid) + + expiry = ( + self._celery_settings.CELERY_EPHEMERAL_RESULT_EXPIRES + if execution_metadata.ephemeral + else self._celery_settings.CELERY_RESULT_EXPIRES + ) + + try: + await self._task_info_store.create_task( + task_id, execution_metadata, expiry=expiry + ) + self._celery_app.send_task( + execution_metadata.name, + task_id=task_id, + kwargs={"task_id": task_id} | task_params, + queue=execution_metadata.queue.value, + ) + except CeleryError as exc: + try: + await self._task_info_store.remove_task(task_id) + except CeleryError: + _logger.warning( + "Unable to cleanup task '%s' during error handling", + task_id, + exc_info=True, + ) + raise TaskSubmissionError( + task_name=execution_metadata.name, + task_id=task_id, + task_params=task_params, + ) from exc + + return task_uuid + + async def cancel_task( + self, owner_metadata: OwnerMetadata, task_uuid: TaskUUID + ) -> None: + with log_context( + _logger, + logging.DEBUG, + msg=f"task cancellation: {owner_metadata=} {task_uuid=}", + ): + task_id = owner_metadata.model_dump_task_id(task_uuid=task_uuid) + if not await self.task_exists(task_id): + raise TaskNotFoundError(task_id=task_id) + + await self._task_info_store.remove_task(task_id) + await self._forget_task(task_id) + + async def task_exists(self, task_id: TaskID) -> bool: + return await self._task_info_store.task_exists(task_id) + + @make_async() + def _forget_task(self, task_id: TaskID) -> None: + self._celery_app.AsyncResult(task_id).forget() + + async def get_task_result( + self, owner_metadata: OwnerMetadata, task_uuid: TaskUUID + ) -> Any: + with log_context( + _logger, + logging.DEBUG, + msg=f"Get task result: {owner_metadata=} {task_uuid=}", + ): + task_id = owner_metadata.model_dump_task_id(task_uuid=task_uuid) + if not await self.task_exists(task_id): + raise TaskNotFoundError(task_id=task_id) + + async_result = self._celery_app.AsyncResult(task_id) + result = async_result.result + if async_result.ready(): + task_metadata = await self._task_info_store.get_task_metadata(task_id) + if task_metadata is not None and task_metadata.ephemeral: + await self._task_info_store.remove_task(task_id) + await self._forget_task(task_id) + return result + + async def _get_task_progress_report( + self, task_id: TaskID, task_state: TaskState + ) -> ProgressReport: + if task_state in (TaskState.STARTED, TaskState.RETRY): + progress = await self._task_info_store.get_task_progress(task_id) + if progress is not None: + return progress + + if task_state in TASK_DONE_STATES: + return ProgressReport( + actual_value=_MAX_PROGRESS_VALUE, total=_MAX_PROGRESS_VALUE + ) + + # task is pending + return ProgressReport( + actual_value=_MIN_PROGRESS_VALUE, total=_MAX_PROGRESS_VALUE + ) + + @make_async() + def _get_task_celery_state(self, task_id: TaskID) -> TaskState: + return TaskState(self._celery_app.AsyncResult(task_id).state) + + async def get_task_status( + self, owner_metadata: OwnerMetadata, task_uuid: TaskUUID + ) -> TaskStatus: + with log_context( + _logger, + logging.DEBUG, + msg=f"Getting task status: {owner_metadata=} {task_uuid=}", + ): + task_id = owner_metadata.model_dump_task_id(task_uuid=task_uuid) + if not await self.task_exists(task_id): + raise TaskNotFoundError(task_id=task_id) + + task_state = await self._get_task_celery_state(task_id) + return TaskStatus( + task_uuid=task_uuid, + task_state=task_state, + progress_report=await self._get_task_progress_report( + task_id, task_state + ), + ) + + async def list_tasks(self, owner_metadata: OwnerMetadata) -> list[Task]: + with log_context( + _logger, + logging.DEBUG, + msg=f"Listing tasks: {owner_metadata=}", + ): + return await self._task_info_store.list_tasks(owner_metadata) + + async def set_task_progress(self, task_id: TaskID, report: ProgressReport) -> None: + await self._task_info_store.set_task_progress( + task_id=task_id, + report=report, + ) + + +if TYPE_CHECKING: + _: type[TaskManager] = CeleryTaskManager diff --git a/services/storage/src/simcore_service_storage/modules/celery/_celery_types.py b/packages/celery-library/src/celery_library/types.py similarity index 77% rename from services/storage/src/simcore_service_storage/modules/celery/_celery_types.py rename to packages/celery-library/src/celery_library/types.py index 4ed62e727756..bbd04eabc569 100644 --- a/services/storage/src/simcore_service_storage/modules/celery/_celery_types.py +++ b/packages/celery-library/src/celery_library/types.py @@ -3,14 +3,8 @@ from typing import Any from kombu.utils.json import register_type # type: ignore[import-untyped] -from models_library.api_schemas_storage.storage_schemas import ( - FileUploadCompletionBody, - FoldersBody, -) from pydantic import BaseModel -from ...models import FileMetaData - def _path_encoder(obj): if isinstance(obj, Path): @@ -37,16 +31,6 @@ def _pydantic_model_decoder(clz: type[BaseModel], data: dict[str, Any]) -> BaseM return clz(**data) -def _register_pydantic_types(*models: type[BaseModel]) -> None: - for model in models: - register_type( - model, - _class_full_name(model), - encoder=_pydantic_model_encoder, - decoder=partial(_pydantic_model_decoder, model), - ) - - def register_celery_types() -> None: register_type( Path, @@ -56,6 +40,12 @@ def register_celery_types() -> None: ) register_type(set, _class_full_name(set), encoder=list, decoder=set) - _register_pydantic_types(FileUploadCompletionBody) - _register_pydantic_types(FileMetaData) - _register_pydantic_types(FoldersBody) + +def register_pydantic_types(*models: type[BaseModel]) -> None: + for model in models: + register_type( + model, + _class_full_name(model), + encoder=_pydantic_model_encoder, + decoder=partial(_pydantic_model_decoder, model), + ) diff --git a/packages/celery-library/src/celery_library/utils.py b/packages/celery-library/src/celery_library/utils.py new file mode 100644 index 000000000000..4d8bad73b96d --- /dev/null +++ b/packages/celery-library/src/celery_library/utils.py @@ -0,0 +1,14 @@ +from celery import Celery # type: ignore[import-untyped] +from servicelib.celery.app_server import BaseAppServer + +_APP_SERVER_KEY = "app_server" + + +def get_app_server(app: Celery) -> BaseAppServer: + app_server = app.conf[_APP_SERVER_KEY] + assert isinstance(app_server, BaseAppServer) + return app_server + + +def set_app_server(app: Celery, app_server: BaseAppServer) -> None: + app.conf[_APP_SERVER_KEY] = app_server diff --git a/packages/celery-library/tests/conftest.py b/packages/celery-library/tests/conftest.py new file mode 100644 index 000000000000..8e8bc9768205 --- /dev/null +++ b/packages/celery-library/tests/conftest.py @@ -0,0 +1,179 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument + +import datetime +import logging +import threading +from collections.abc import AsyncIterator, Callable +from typing import Any + +import pytest +from celery import Celery # type: ignore[import-untyped] +from celery.contrib.testing.worker import ( + TestWorkController, + start_worker, +) +from celery.signals import worker_init, worker_shutdown +from celery.worker.worker import WorkController +from celery_library.backends.redis import RedisTaskInfoStore +from celery_library.signals import on_worker_init, on_worker_shutdown +from celery_library.task_manager import CeleryTaskManager +from celery_library.types import register_celery_types +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.typing_env import EnvVarsDict +from servicelib.celery.app_server import BaseAppServer +from servicelib.celery.task_manager import TaskManager +from servicelib.redis import RedisClientSDK +from settings_library.celery import CelerySettings +from settings_library.redis import RedisDatabase, RedisSettings + +pytest_plugins = [ + "pytest_simcore.docker_compose", + "pytest_simcore.docker_swarm", + "pytest_simcore.environment_configs", + "pytest_simcore.logging", + "pytest_simcore.rabbit_service", + "pytest_simcore.redis_service", + "pytest_simcore.repository_paths", +] + + +_logger = logging.getLogger(__name__) + + +class FakeAppServer(BaseAppServer): + def __init__(self, app: Celery, settings: CelerySettings): + super().__init__(app) + self._settings = settings + self._task_manager: CeleryTaskManager | None = None + + @property + def task_manager(self) -> TaskManager: + assert self._task_manager, "Task manager is not initialized" + return self._task_manager + + async def run_until_shutdown( + self, startup_completed_event: threading.Event + ) -> None: + redis_client_sdk = RedisClientSDK( + self._settings.CELERY_REDIS_RESULT_BACKEND.build_redis_dsn( + RedisDatabase.CELERY_TASKS + ), + client_name="pytest_celery_tasks", + ) + await redis_client_sdk.setup() + + self._task_manager = CeleryTaskManager( + self._app, + self._settings, + RedisTaskInfoStore(redis_client_sdk), + ) + + startup_completed_event.set() + await self.shutdown_event.wait() # wait for shutdown + + await redis_client_sdk.shutdown() + + +@pytest.fixture +def register_celery_tasks() -> Callable[[Celery], None]: + """override if tasks are needed""" + + def _(celery_app: Celery) -> None: ... + + return _ + + +@pytest.fixture +def app_environment( + monkeypatch: pytest.MonkeyPatch, + env_devel_dict: EnvVarsDict, +) -> EnvVarsDict: + return setenvs_from_dict( + monkeypatch, + { + **env_devel_dict, + }, + ) + + +@pytest.fixture +def celery_settings( + app_environment: EnvVarsDict, +) -> CelerySettings: + return CelerySettings.create_from_envs() + + +@pytest.fixture +def app_server(celery_app: Celery, celery_settings: CelerySettings) -> BaseAppServer: + return FakeAppServer(app=celery_app, settings=celery_settings) + + +@pytest.fixture(scope="session") +def celery_config() -> dict[str, Any]: + return { + "broker_connection_retry_on_startup": True, + "broker_url": "memory://localhost//", + "result_backend": "cache+memory://localhost//", + "result_expires": datetime.timedelta(days=7), + "result_extended": True, + "pool": "threads", + "task_default_queue": "default", + "task_send_sent_event": True, + "task_track_started": True, + "worker_send_task_events": True, + } + + +@pytest.fixture +async def with_celery_worker( + celery_app: Celery, + app_server: BaseAppServer, + register_celery_tasks: Callable[[Celery], None], +) -> AsyncIterator[TestWorkController]: + def _on_worker_init_wrapper(sender: WorkController, **_kwargs): + return on_worker_init(sender, app_server, **_kwargs) + + worker_init.connect(_on_worker_init_wrapper) + worker_shutdown.connect(on_worker_shutdown) + + register_celery_tasks(celery_app) + + with start_worker( + celery_app, + concurrency=1, + pool="threads", + loglevel="info", + perform_ping_check=False, + queues="default", + ) as worker: + yield worker + + +@pytest.fixture +async def mock_celery_app(celery_config: dict[str, Any]) -> Celery: + return Celery(**celery_config) + + +@pytest.fixture +async def celery_task_manager( + mock_celery_app: Celery, + celery_settings: CelerySettings, + use_in_memory_redis: RedisSettings, +) -> AsyncIterator[CeleryTaskManager]: + register_celery_types() + + try: + redis_client_sdk = RedisClientSDK( + use_in_memory_redis.build_redis_dsn(RedisDatabase.CELERY_TASKS), + client_name="pytest_celery_tasks", + ) + await redis_client_sdk.setup() + + yield CeleryTaskManager( + mock_celery_app, + celery_settings, + RedisTaskInfoStore(redis_client_sdk), + ) + finally: + await redis_client_sdk.shutdown() diff --git a/services/storage/tests/unit/test_async_jobs.py b/packages/celery-library/tests/unit/test_async_jobs.py similarity index 56% rename from services/storage/tests/unit/test_async_jobs.py rename to packages/celery-library/tests/unit/test_async_jobs.py index 36f29a15bd8a..4fddea2b698a 100644 --- a/services/storage/tests/unit/test_async_jobs.py +++ b/packages/celery-library/tests/unit/test_async_jobs.py @@ -3,35 +3,33 @@ import asyncio import pickle -from collections.abc import Callable +from collections.abc import Awaitable, Callable from datetime import timedelta from enum import Enum -from typing import Any +from typing import Any, Final import pytest from celery import Celery, Task -from fastapi import FastAPI +from celery.contrib.testing.worker import TestWorkController +from celery_library.rpc import _async_jobs +from celery_library.task import register_task +from common_library.errors_classes import OsparcErrorMixin +from faker import Faker from models_library.api_schemas_rpc_async_jobs.async_jobs import ( AsyncJobGet, - AsyncJobNameData, ) from models_library.api_schemas_rpc_async_jobs.exceptions import ( - JobAbortedError, JobError, + JobMissingError, ) -from models_library.api_schemas_storage import STORAGE_RPC_NAMESPACE -from models_library.api_schemas_storage.export_data_async_jobs import AccessRightError from models_library.products import ProductName -from models_library.rabbitmq_basic_types import RPCMethodName +from models_library.rabbitmq_basic_types import RPCNamespace from models_library.users import UserID +from pydantic import TypeAdapter +from servicelib.celery.models import ExecutionMetadata, OwnerMetadata, TaskID +from servicelib.celery.task_manager import TaskManager from servicelib.rabbitmq import RabbitMQRPCClient, RPCRouter from servicelib.rabbitmq.rpc_interfaces.async_jobs import async_jobs -from simcore_service_storage.api.rpc.routes import get_rabbitmq_rpc_server -from simcore_service_storage.modules.celery import get_celery_client -from simcore_service_storage.modules.celery._task import register_task -from simcore_service_storage.modules.celery.client import TaskMetadata -from simcore_service_storage.modules.celery.models import TaskID -from simcore_service_storage.modules.celery.worker import CeleryTaskWorker from tenacity import ( AsyncRetrying, retry_if_exception_type, @@ -41,21 +39,50 @@ pytest_simcore_core_services_selection = [ "rabbit", - "postgres", + "redis", ] +class AccessRightError(OsparcErrorMixin, RuntimeError): + msg_template: str = ( + "User {user_id} does not have access to file {file_id} with location {location_id}" + ) + + +@pytest.fixture +async def async_jobs_rabbitmq_rpc_client( + rabbitmq_rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], +) -> RabbitMQRPCClient: + rpc_client = await rabbitmq_rpc_client("pytest_async_jobs_rpc_client") + assert rpc_client + return rpc_client + + +@pytest.fixture +def user_id(faker: Faker) -> UserID: + return faker.pyint(min_value=1) + + +@pytest.fixture +def product_name(faker: Faker) -> ProductName: + return faker.word() + + ###### RPC Interface ###### router = RPCRouter() +ASYNC_JOBS_RPC_NAMESPACE: Final[RPCNamespace] = TypeAdapter( + RPCNamespace +).validate_python("async_jobs") + @router.expose() async def rpc_sync_job( - app: FastAPI, *, job_id_data: AsyncJobNameData, **kwargs: Any + task_manager: TaskManager, *, owner_metadata: OwnerMetadata, **kwargs: Any ) -> AsyncJobGet: task_name = sync_job.__name__ - task_uuid = await get_celery_client(app).submit_task( - TaskMetadata(name=task_name), task_context=job_id_data.model_dump(), **kwargs + task_uuid = await task_manager.submit_task( + ExecutionMetadata(name=task_name), owner_metadata=owner_metadata, **kwargs ) return AsyncJobGet(job_id=task_uuid, job_name=task_name) @@ -63,11 +90,11 @@ async def rpc_sync_job( @router.expose() async def rpc_async_job( - app: FastAPI, *, job_id_data: AsyncJobNameData, **kwargs: Any + task_manager: TaskManager, *, owner_metadata: OwnerMetadata, **kwargs: Any ) -> AsyncJobGet: task_name = async_job.__name__ - task_uuid = await get_celery_client(app).submit_task( - TaskMetadata(name=task_name), task_context=job_id_data.model_dump(), **kwargs + task_uuid = await task_manager.submit_task( + ExecutionMetadata(name=task_name), owner_metadata=owner_metadata, **kwargs ) return AsyncJobGet(job_id=task_uuid, job_name=task_name) @@ -94,8 +121,9 @@ async def _process_action(action: str, payload: Any) -> Any: return None -def sync_job(task: Task, action: Action, payload: Any) -> Any: +def sync_job(task: Task, task_id: TaskID, action: Action, payload: Any) -> Any: _ = task + _ = task_id return asyncio.run(_process_action(action, payload)) @@ -109,9 +137,15 @@ async def async_job(task: Task, task_id: TaskID, action: Action, payload: Any) - @pytest.fixture -async def register_rpc_routes(initialized_app: FastAPI) -> None: - rpc_server = get_rabbitmq_rpc_server(initialized_app) - await rpc_server.register_router(router, STORAGE_RPC_NAMESPACE, initialized_app) +async def register_rpc_routes( + async_jobs_rabbitmq_rpc_client: RabbitMQRPCClient, celery_task_manager: TaskManager +) -> None: + await async_jobs_rabbitmq_rpc_client.register_router( + _async_jobs.router, ASYNC_JOBS_RPC_NAMESPACE, task_manager=celery_task_manager + ) + await async_jobs_rabbitmq_rpc_client.register_router( + router, ASYNC_JOBS_RPC_NAMESPACE, task_manager=celery_task_manager + ) async def _start_task_via_rpc( @@ -121,16 +155,18 @@ async def _start_task_via_rpc( user_id: UserID, product_name: ProductName, **kwargs: Any, -) -> tuple[AsyncJobGet, AsyncJobNameData]: - job_id_data = AsyncJobNameData(user_id=user_id, product_name=product_name) +) -> tuple[AsyncJobGet, OwnerMetadata]: + owner_metadata = OwnerMetadata( + user_id=user_id, product_name=product_name, owner="pytest_client" + ) async_job_get = await async_jobs.submit( rabbitmq_rpc_client=client, - rpc_namespace=STORAGE_RPC_NAMESPACE, - method_name=RPCMethodName(rpc_task_name), - job_id_data=job_id_data, + rpc_namespace=ASYNC_JOBS_RPC_NAMESPACE, + method_name=rpc_task_name, + owner_metadata=owner_metadata, **kwargs, ) - return async_job_get, job_id_data + return async_job_get, owner_metadata @pytest.fixture @@ -155,10 +191,10 @@ def _(celery_app: Celery) -> None: async def _wait_for_job( - storage_rabbitmq_rpc_client: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, *, async_job_get: AsyncJobGet, - job_id_data: AsyncJobNameData, + owner_metadata: OwnerMetadata, stop_after: timedelta = timedelta(seconds=5), ) -> None: @@ -170,10 +206,10 @@ async def _wait_for_job( ): with attempt: result = await async_jobs.status( - storage_rabbitmq_rpc_client, - rpc_namespace=STORAGE_RPC_NAMESPACE, + rpc_client, + rpc_namespace=ASYNC_JOBS_RPC_NAMESPACE, job_id=async_job_get.job_id, - job_id_data=job_id_data, + owner_metadata=owner_metadata, ) assert ( result.done is True @@ -199,17 +235,16 @@ async def _wait_for_job( ], ) async def test_async_jobs_workflow( - initialized_app: FastAPI, register_rpc_routes: None, - storage_rabbitmq_rpc_client: RabbitMQRPCClient, - with_storage_celery_worker: CeleryTaskWorker, + async_jobs_rabbitmq_rpc_client: RabbitMQRPCClient, + with_celery_worker: TestWorkController, user_id: UserID, product_name: ProductName, exposed_rpc_start: str, payload: Any, ): - async_job_get, job_id_data = await _start_task_via_rpc( - storage_rabbitmq_rpc_client, + async_job_get, owner_metadata = await _start_task_via_rpc( + async_jobs_rabbitmq_rpc_client, rpc_task_name=exposed_rpc_start, user_id=user_id, product_name=product_name, @@ -218,24 +253,23 @@ async def test_async_jobs_workflow( ) jobs = await async_jobs.list_jobs( - storage_rabbitmq_rpc_client, - rpc_namespace=STORAGE_RPC_NAMESPACE, - filter_="", # currently not used - job_id_data=job_id_data, + async_jobs_rabbitmq_rpc_client, + rpc_namespace=ASYNC_JOBS_RPC_NAMESPACE, + owner_metadata=owner_metadata, ) assert len(jobs) > 0 await _wait_for_job( - storage_rabbitmq_rpc_client, + async_jobs_rabbitmq_rpc_client, async_job_get=async_job_get, - job_id_data=job_id_data, + owner_metadata=owner_metadata, ) async_job_result = await async_jobs.result( - storage_rabbitmq_rpc_client, - rpc_namespace=STORAGE_RPC_NAMESPACE, + async_jobs_rabbitmq_rpc_client, + rpc_namespace=ASYNC_JOBS_RPC_NAMESPACE, job_id=async_job_get.job_id, - job_id_data=job_id_data, + owner_metadata=owner_metadata, ) assert async_job_result.result == payload @@ -247,16 +281,15 @@ async def test_async_jobs_workflow( ], ) async def test_async_jobs_cancel( - initialized_app: FastAPI, register_rpc_routes: None, - storage_rabbitmq_rpc_client: RabbitMQRPCClient, - with_storage_celery_worker: CeleryTaskWorker, + async_jobs_rabbitmq_rpc_client: RabbitMQRPCClient, + with_celery_worker: TestWorkController, user_id: UserID, product_name: ProductName, exposed_rpc_start: str, ): - async_job_get, job_id_data = await _start_task_via_rpc( - storage_rabbitmq_rpc_client, + async_job_get, owner_metadata = await _start_task_via_rpc( + async_jobs_rabbitmq_rpc_client, rpc_task_name=exposed_rpc_start, user_id=user_id, product_name=product_name, @@ -265,32 +298,33 @@ async def test_async_jobs_cancel( ) await async_jobs.cancel( - storage_rabbitmq_rpc_client, - rpc_namespace=STORAGE_RPC_NAMESPACE, + async_jobs_rabbitmq_rpc_client, + rpc_namespace=ASYNC_JOBS_RPC_NAMESPACE, job_id=async_job_get.job_id, - job_id_data=job_id_data, - ) - - await _wait_for_job( - storage_rabbitmq_rpc_client, - async_job_get=async_job_get, - job_id_data=job_id_data, + owner_metadata=owner_metadata, ) jobs = await async_jobs.list_jobs( - storage_rabbitmq_rpc_client, - rpc_namespace=STORAGE_RPC_NAMESPACE, - filter_="", # currently not used - job_id_data=job_id_data, + async_jobs_rabbitmq_rpc_client, + rpc_namespace=ASYNC_JOBS_RPC_NAMESPACE, + owner_metadata=owner_metadata, ) assert async_job_get.job_id not in [job.job_id for job in jobs] - with pytest.raises(JobAbortedError): + with pytest.raises(JobMissingError): + await async_jobs.status( + async_jobs_rabbitmq_rpc_client, + rpc_namespace=ASYNC_JOBS_RPC_NAMESPACE, + job_id=async_job_get.job_id, + owner_metadata=owner_metadata, + ) + + with pytest.raises(JobMissingError): await async_jobs.result( - storage_rabbitmq_rpc_client, - rpc_namespace=STORAGE_RPC_NAMESPACE, + async_jobs_rabbitmq_rpc_client, + rpc_namespace=ASYNC_JOBS_RPC_NAMESPACE, job_id=async_job_get.job_id, - job_id_data=job_id_data, + owner_metadata=owner_metadata, ) @@ -312,17 +346,16 @@ async def test_async_jobs_cancel( ], ) async def test_async_jobs_raises( - initialized_app: FastAPI, register_rpc_routes: None, - storage_rabbitmq_rpc_client: RabbitMQRPCClient, - with_storage_celery_worker: CeleryTaskWorker, + async_jobs_rabbitmq_rpc_client: RabbitMQRPCClient, + with_celery_worker: TestWorkController, user_id: UserID, product_name: ProductName, exposed_rpc_start: str, error: Exception, ): - async_job_get, job_id_data = await _start_task_via_rpc( - storage_rabbitmq_rpc_client, + async_job_get, owner_metadata = await _start_task_via_rpc( + async_jobs_rabbitmq_rpc_client, rpc_task_name=exposed_rpc_start, user_id=user_id, product_name=product_name, @@ -331,18 +364,18 @@ async def test_async_jobs_raises( ) await _wait_for_job( - storage_rabbitmq_rpc_client, + async_jobs_rabbitmq_rpc_client, async_job_get=async_job_get, - job_id_data=job_id_data, + owner_metadata=owner_metadata, stop_after=timedelta(minutes=1), ) with pytest.raises(JobError) as exc: await async_jobs.result( - storage_rabbitmq_rpc_client, - rpc_namespace=STORAGE_RPC_NAMESPACE, + async_jobs_rabbitmq_rpc_client, + rpc_namespace=ASYNC_JOBS_RPC_NAMESPACE, job_id=async_job_get.job_id, - job_id_data=job_id_data, + owner_metadata=owner_metadata, ) assert exc.value.exc_type == type(error).__name__ assert exc.value.exc_msg == f"{error}" diff --git a/services/storage/tests/unit/test_modules_celery_errors.py b/packages/celery-library/tests/unit/test_errors.py similarity index 78% rename from services/storage/tests/unit/test_modules_celery_errors.py rename to packages/celery-library/tests/unit/test_errors.py index 74000f7649e3..c18e3bb50d6c 100644 --- a/services/storage/tests/unit/test_modules_celery_errors.py +++ b/packages/celery-library/tests/unit/test_errors.py @@ -1,9 +1,9 @@ import pytest -from models_library.api_schemas_storage.export_data_async_jobs import AccessRightError -from simcore_service_storage.modules.celery.errors import ( +from celery_library.errors import ( decode_celery_transferrable_error, - encore_celery_transferrable_error, + encode_celery_transferrable_error, ) +from models_library.api_schemas_storage.export_data_async_jobs import AccessRightError @pytest.mark.parametrize( @@ -13,11 +13,11 @@ AccessRightError(user_id=1, file_id="a/path/to/a/file.txt", location_id=0), ], ) -def test_workflow(original_error: Exception): +def test_error(original_error: Exception): try: raise original_error # noqa: TRY301 except Exception as e: # pylint: disable=broad-exception-caught - result = encore_celery_transferrable_error(e) + result = encode_celery_transferrable_error(e) assert decode_celery_transferrable_error(result).args == original_error.args assert f"{decode_celery_transferrable_error(result)}" == f"{original_error}" diff --git a/packages/celery-library/tests/unit/test_tasks.py b/packages/celery-library/tests/unit/test_tasks.py new file mode 100644 index 000000000000..9db28d7a5cab --- /dev/null +++ b/packages/celery-library/tests/unit/test_tasks.py @@ -0,0 +1,268 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + +import asyncio +import logging +import time +from collections.abc import Callable +from random import randint + +import pytest +from celery import Celery, Task # pylint: disable=no-name-in-module +from celery.worker.worker import WorkController # pylint: disable=no-name-in-module +from celery_library.errors import TaskNotFoundError, TransferrableCeleryError +from celery_library.task import register_task +from celery_library.task_manager import CeleryTaskManager +from celery_library.utils import get_app_server +from common_library.errors_classes import OsparcErrorMixin +from faker import Faker +from models_library.progress_bar import ProgressReport +from servicelib.celery.models import ( + ExecutionMetadata, + OwnerMetadata, + TaskID, + TaskState, + TaskUUID, + Wildcard, +) +from servicelib.logging_utils import log_context +from tenacity import Retrying, retry_if_exception_type, stop_after_delay, wait_fixed + +_faker = Faker() + +_logger = logging.getLogger(__name__) + +pytest_simcore_core_services_selection = ["redis"] +pytest_simcore_ops_services_selection = [] + + +class MyOwnerMetadata(OwnerMetadata): + user_id: int + + +async def _fake_file_processor( + celery_app: Celery, task_name: str, task_id: str, files: list[str] +) -> str: + def sleep_for(seconds: float) -> None: + time.sleep(seconds) + + for n, file in enumerate(files, start=1): + with log_context(_logger, logging.INFO, msg=f"Processing file {file}"): + await get_app_server(celery_app).task_manager.set_task_progress( + task_id=task_id, + report=ProgressReport(actual_value=n / len(files)), + ) + await asyncio.get_event_loop().run_in_executor(None, sleep_for, 1) + + return "archive.zip" + + +def fake_file_processor(task: Task, task_id: TaskID, files: list[str]) -> str: + assert task_id + assert task.name + _logger.info("Calling _fake_file_processor") + return asyncio.run_coroutine_threadsafe( + _fake_file_processor(task.app, task.name, task.request.id, files), + get_app_server(task.app).event_loop, + ).result() + + +class MyError(OsparcErrorMixin, Exception): + msg_template = "Something strange happened: {msg}" + + +def failure_task(task: Task, task_id: TaskID) -> None: + assert task_id + assert task + msg = "BOOM!" + raise MyError(msg=msg) + + +async def dreamer_task(task: Task, task_id: TaskID) -> list[int]: + numbers = [] + for _ in range(30): + numbers.append(randint(1, 90)) # noqa: S311 + await asyncio.sleep(0.5) + return numbers + + +@pytest.fixture +def register_celery_tasks() -> Callable[[Celery], None]: + def _(celery_app: Celery) -> None: + register_task(celery_app, fake_file_processor) + register_task(celery_app, failure_task) + register_task(celery_app, dreamer_task) + + return _ + + +async def test_submitting_task_calling_async_function_results_with_success_state( + celery_task_manager: CeleryTaskManager, + with_celery_worker: WorkController, +): + + owner_metadata = MyOwnerMetadata(user_id=42, owner="test-owner") + + task_uuid = await celery_task_manager.submit_task( + ExecutionMetadata( + name=fake_file_processor.__name__, + ), + owner_metadata=owner_metadata, + files=[f"file{n}" for n in range(5)], + ) + + for attempt in Retrying( + retry=retry_if_exception_type(AssertionError), + wait=wait_fixed(1), + stop=stop_after_delay(30), + ): + with attempt: + status = await celery_task_manager.get_task_status( + owner_metadata, task_uuid + ) + assert status.task_state == TaskState.SUCCESS + + assert ( + await celery_task_manager.get_task_status(owner_metadata, task_uuid) + ).task_state == TaskState.SUCCESS + assert ( + await celery_task_manager.get_task_result(owner_metadata, task_uuid) + ) == "archive.zip" + + +async def test_submitting_task_with_failure_results_with_error( + celery_task_manager: CeleryTaskManager, + with_celery_worker: WorkController, +): + + owner_metadata = MyOwnerMetadata(user_id=42, owner="test-owner") + + task_uuid = await celery_task_manager.submit_task( + ExecutionMetadata( + name=failure_task.__name__, + ), + owner_metadata=owner_metadata, + ) + + for attempt in Retrying( + retry=retry_if_exception_type(AssertionError), + wait=wait_fixed(1), + stop=stop_after_delay(30), + ): + + with attempt: + raw_result = await celery_task_manager.get_task_result( + owner_metadata, task_uuid + ) + assert isinstance(raw_result, TransferrableCeleryError) + + raw_result = await celery_task_manager.get_task_result(owner_metadata, task_uuid) + assert f"{raw_result}" == "Something strange happened: BOOM!" + + +async def test_cancelling_a_running_task_aborts_and_deletes( + celery_task_manager: CeleryTaskManager, + with_celery_worker: WorkController, +): + + owner_metadata = MyOwnerMetadata(user_id=42, owner="test-owner") + + task_uuid = await celery_task_manager.submit_task( + ExecutionMetadata( + name=dreamer_task.__name__, + ), + owner_metadata=owner_metadata, + ) + + await asyncio.sleep(3.0) + + await celery_task_manager.cancel_task(owner_metadata, task_uuid) + + with pytest.raises(TaskNotFoundError): + await celery_task_manager.get_task_status(owner_metadata, task_uuid) + + assert task_uuid not in await celery_task_manager.list_tasks(owner_metadata) + + +async def test_listing_task_uuids_contains_submitted_task( + celery_task_manager: CeleryTaskManager, + with_celery_worker: WorkController, +): + + owner_metadata = MyOwnerMetadata(user_id=42, owner="test-owner") + + task_uuid = await celery_task_manager.submit_task( + ExecutionMetadata( + name=dreamer_task.__name__, + ), + owner_metadata=owner_metadata, + ) + + for attempt in Retrying( + retry=retry_if_exception_type(AssertionError), + wait=wait_fixed(0.1), + stop=stop_after_delay(10), + ): + with attempt: + tasks = await celery_task_manager.list_tasks(owner_metadata) + assert any(task.uuid == task_uuid for task in tasks) + + tasks = await celery_task_manager.list_tasks(owner_metadata) + assert any(task.uuid == task_uuid for task in tasks) + + +async def test_filtering_listing_tasks( + celery_task_manager: CeleryTaskManager, + with_celery_worker: WorkController, +): + class MyOwnerMetadata(OwnerMetadata): + user_id: int + product_name: str | Wildcard + + user_id = 42 + _owner = "test-owner" + expected_task_uuids: set[TaskUUID] = set() + all_tasks: list[tuple[TaskUUID, MyOwnerMetadata]] = [] + + try: + for _ in range(5): + owner_metadata = MyOwnerMetadata( + user_id=user_id, product_name=_faker.word(), owner=_owner + ) + task_uuid = await celery_task_manager.submit_task( + ExecutionMetadata( + name=dreamer_task.__name__, + ), + owner_metadata=owner_metadata, + ) + expected_task_uuids.add(task_uuid) + all_tasks.append((task_uuid, owner_metadata)) + + for _ in range(3): + owner_metadata = MyOwnerMetadata( + user_id=_faker.pyint(min_value=100, max_value=200), + product_name=_faker.word(), + owner=_owner, + ) + task_uuid = await celery_task_manager.submit_task( + ExecutionMetadata( + name=dreamer_task.__name__, + ), + owner_metadata=owner_metadata, + ) + all_tasks.append((task_uuid, owner_metadata)) + + search_owner_metadata = MyOwnerMetadata( + user_id=user_id, + product_name="*", + owner=_owner, + ) + tasks = await celery_task_manager.list_tasks(search_owner_metadata) + assert expected_task_uuids == {task.uuid for task in tasks} + finally: + # clean up all tasks. this should ideally be done in the fixture + for task_uuid, owner_metadata in all_tasks: + await celery_task_manager.cancel_task(owner_metadata, task_uuid) diff --git a/packages/common-library/requirements/_base.txt b/packages/common-library/requirements/_base.txt index 062e97cee348..819cfba5b25d 100644 --- a/packages/common-library/requirements/_base.txt +++ b/packages/common-library/requirements/_base.txt @@ -4,17 +4,20 @@ orjson==3.10.15 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in -pydantic==2.10.6 +pydantic==2.11.7 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in # pydantic-extra-types -pydantic-core==2.27.2 +pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.10.2 +pydantic-extra-types==2.10.5 # via -r requirements/_base.in -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # pydantic # pydantic-core # pydantic-extra-types + # typing-inspection +typing-inspection==0.4.1 + # via pydantic diff --git a/packages/common-library/requirements/_test.txt b/packages/common-library/requirements/_test.txt index 9737c253a396..5a64e5150c1b 100644 --- a/packages/common-library/requirements/_test.txt +++ b/packages/common-library/requirements/_test.txt @@ -17,15 +17,17 @@ packaging==24.2 # pytest # pytest-sugar pluggy==1.5.0 - # via pytest + # via + # pytest + # pytest-cov pprintpp==0.4.0 # via pytest-icdiff -pydantic==2.10.6 +pydantic==2.11.7 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # pydantic-settings -pydantic-core==2.27.2 +pydantic-core==2.33.2 # via # -c requirements/_base.txt # pydantic @@ -33,7 +35,9 @@ pydantic-settings==2.7.0 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_test.in -pytest==8.3.5 +pygments==2.19.2 + # via pytest +pytest==8.4.1 # via # -r requirements/_test.in # pytest-asyncio @@ -42,15 +46,15 @@ pytest==8.3.5 # pytest-instafail # pytest-mock # pytest-sugar -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via -r requirements/_test.in -pytest-cov==6.0.0 +pytest-cov==6.2.1 # via -r requirements/_test.in pytest-icdiff==0.9 # via -r requirements/_test.in pytest-instafail==0.5.0 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in @@ -62,10 +66,15 @@ python-dotenv==1.0.1 # pydantic-settings termcolor==2.5.0 # via pytest-sugar -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # pydantic # pydantic-core + # typing-inspection +typing-inspection==0.4.1 + # via + # -c requirements/_base.txt + # pydantic tzdata==2025.1 # via faker diff --git a/packages/common-library/requirements/_tools.txt b/packages/common-library/requirements/_tools.txt index 8e681c5a5839..c7271a623ba3 100644 --- a/packages/common-library/requirements/_tools.txt +++ b/packages/common-library/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.8 +click==8.2.1 # via # black # pip-tools @@ -26,9 +26,9 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via -r requirements/../../../requirements/devenv.txt -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via # black # mypy @@ -40,7 +40,9 @@ packaging==24.2 # black # build pathspec==0.12.1 - # via black + # via + # black + # mypy pip==25.0.1 # via pip-tools pip-tools==7.4.1 @@ -64,11 +66,11 @@ pyyaml==6.0.2 # pre-commit ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.2 +setuptools==80.9.0 # via pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # -c requirements/_test.txt diff --git a/packages/common-library/src/common_library/async_tools.py b/packages/common-library/src/common_library/async_tools.py index 205de066851a..fc716303b02d 100644 --- a/packages/common-library/src/common_library/async_tools.py +++ b/packages/common-library/src/common_library/async_tools.py @@ -1,9 +1,17 @@ import asyncio +import datetime import functools -from collections.abc import Awaitable, Callable +import logging +import sys +from collections.abc import Awaitable, Callable, Coroutine from concurrent.futures import Executor +from functools import wraps from inspect import isawaitable -from typing import ParamSpec, TypeVar, overload +from typing import Any, ParamSpec, TypeVar, overload + +from .logging.logging_errors import create_troubleshooting_log_kwargs + +_logger = logging.getLogger(__name__) R = TypeVar("R") P = ParamSpec("P") @@ -62,3 +70,89 @@ async def maybe_await( return await obj assert not isawaitable(obj) # nosec return obj + + +async def cancel_wait_task( + task: asyncio.Task, *, max_delay: float | None = None +) -> None: + """Cancels the given task and waits for it to complete + + Arguments: + task -- task to be canceled + + + Keyword Arguments: + max_delay -- duration (in seconds) to wait before giving + up the cancellation. This timeout should be an upper bound to the + time needed for the task to cleanup after being canceled and + avoids that the cancellation takes forever. If None the timeout is not + set. (default: {None}) + + Raises: + TimeoutError: raised if cannot cancel the task. + CancelledError: raised ONLY if owner is being cancelled. + """ + if task.done(): + # nothing to do here + return + + # mark for cancellation + task.cancel("cancel_wait_task was called to cancel this task") + try: + _logger.debug("Cancelling task %s", task.get_name()) + await asyncio.shield( + # NOTE shield ensures that cancellation of the caller function won't stop you + # from observing the cancellation/finalization of task. + asyncio.wait_for(task, timeout=max_delay) + ) + except TimeoutError as exc: + _logger.exception( + **create_troubleshooting_log_kwargs( + f"Timeout while cancelling task {task.get_name()} after {max_delay} seconds", + error=exc, + error_context={"task_name": task.get_name(), "max_delay": max_delay}, + ) + ) + raise + except asyncio.CancelledError: + current_task = asyncio.current_task() + assert current_task is not None # nosec + if current_task.cancelling() > 0: + # owner function is being cancelled -> propagate cancellation + raise + finally: + if not task.done(): + current_exception = sys.exception() + _logger.error( + **create_troubleshooting_log_kwargs( + f"Failed to cancel ask {task.get_name()}", + error=( + current_exception if current_exception else Exception("Unknown") + ), + error_context={ + "task_name": task.get_name(), + "max_delay": max_delay, + }, + tip="Consider increasing max_delay or fixing the task to handle cancellations properly", + ) + ) + else: + _logger.debug("Task %s cancelled", task.get_name()) + + +def delayed_start( + delay: datetime.timedelta, +) -> Callable[ + [Callable[P, Coroutine[Any, Any, R]]], Callable[P, Coroutine[Any, Any, R]] +]: + def _decorator( + func: Callable[P, Coroutine[Any, Any, R]], + ) -> Callable[P, Coroutine[Any, Any, R]]: + @wraps(func) + async def _wrapper(*args: P.args, **kwargs: P.kwargs) -> R: + await asyncio.sleep(delay.total_seconds()) + return await func(*args, **kwargs) + + return _wrapper + + return _decorator diff --git a/packages/common-library/src/common_library/logging/__init__.py b/packages/common-library/src/common_library/logging/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/common-library/src/common_library/logging/logging_base.py b/packages/common-library/src/common_library/logging/logging_base.py new file mode 100644 index 000000000000..5f74661ad5b7 --- /dev/null +++ b/packages/common-library/src/common_library/logging/logging_base.py @@ -0,0 +1,22 @@ +from typing import NotRequired, TypedDict + + +class LogExtra(TypedDict): + log_uid: NotRequired[str] + log_oec: NotRequired[str] + + +def get_log_record_extra( + *, + user_id: int | str | None = None, + error_code: str | None = None, +) -> LogExtra | None: + extra: LogExtra = {} + + if user_id: + assert int(user_id) > 0 # nosec + extra["log_uid"] = f"{user_id}" + if error_code: + extra["log_oec"] = error_code + + return extra or None diff --git a/packages/service-library/src/servicelib/logging_errors.py b/packages/common-library/src/common_library/logging/logging_errors.py similarity index 69% rename from packages/service-library/src/servicelib/logging_errors.py rename to packages/common-library/src/common_library/logging/logging_errors.py index 2e6150acc393..1c7c7d469b5c 100644 --- a/packages/service-library/src/servicelib/logging_errors.py +++ b/packages/common-library/src/common_library/logging/logging_errors.py @@ -1,16 +1,18 @@ import logging -from typing import Any, TypedDict +from typing import Any, Final, TypedDict from common_library.error_codes import ErrorCodeStr from common_library.errors_classes import OsparcErrorMixin from common_library.json_serialization import json_dumps, representation_encoder -from .logging_utils import LogExtra, get_log_record_extra +from .logging_base import LogExtra, get_log_record_extra _logger = logging.getLogger(__name__) +_MAX_LOGGED_CAUSES: Final[int] = 10 -def create_troubleshotting_log_message( + +def create_troubleshooting_log_message( user_error_msg: str, *, error: BaseException, @@ -27,10 +29,25 @@ def create_troubleshotting_log_message( error_context -- Additional context surrounding the exception, such as environment variables or function-specific data. This can be derived from exc.error_context() (relevant when using the OsparcErrorMixin) tip -- Helpful suggestions or possible solutions explaining why the error may have occurred and how it could potentially be resolved """ + + def _collect_causes(exc: BaseException) -> str: + causes = [] + current = exc.__cause__ + seen = set() # Prevent infinite loops + while current is not None and id(current) not in seen: + seen.add(id(current)) + causes.append(f"[{type(current).__name__}]'{current}'") + current = getattr(current, "__cause__", None) + if len(causes) > _MAX_LOGGED_CAUSES: # Prevent excessive chains + causes.append("[... truncated]") + break + return " <- ".join(causes) + debug_data = json_dumps( { "exception_type": f"{type(error)}", - "exception_details": f"{error}", + "exception_string": f"{error}", + "exception_causes": _collect_causes(error), "error_code": error_code, "context": error_context, "tip": tip, @@ -47,7 +64,7 @@ class LogKwargs(TypedDict): extra: LogExtra | None -def create_troubleshotting_log_kwargs( +def create_troubleshooting_log_kwargs( user_error_msg: str, *, error: BaseException, @@ -64,9 +81,13 @@ def create_troubleshotting_log_kwargs( ... except MyException as exc _logger.exception( - **create_troubleshotting_log_kwargs( + **create_troubleshooting_log_kwargs( user_error_msg=frontend_msg, - exception=exc, + error=exc, + error_context={ + "user_id": user_id, + "product_name": product_name, + }, tip="Check row in `groups_extra_properties` for this product. It might be missing.", ) ) @@ -78,7 +99,7 @@ def create_troubleshotting_log_kwargs( context.update(error.error_context()) # compose as log message - log_msg = create_troubleshotting_log_message( + log_msg = create_troubleshooting_log_message( user_error_msg, error=error, error_code=error_code, diff --git a/packages/service-library/src/servicelib/logging_utils_filtering.py b/packages/common-library/src/common_library/logging/logging_utils_filtering.py similarity index 100% rename from packages/service-library/src/servicelib/logging_utils_filtering.py rename to packages/common-library/src/common_library/logging/logging_utils_filtering.py diff --git a/packages/common-library/src/common_library/pydantic_fields_extension.py b/packages/common-library/src/common_library/pydantic_fields_extension.py index 59303b0a1b31..b07428483951 100644 --- a/packages/common-library/src/common_library/pydantic_fields_extension.py +++ b/packages/common-library/src/common_library/pydantic_fields_extension.py @@ -1,22 +1,36 @@ from types import UnionType -from typing import Any, Literal, get_args, get_origin +from typing import Annotated, Any, Literal, Union, get_args, get_origin from pydantic.fields import FieldInfo +NoneType: type = type(None) + def get_type(info: FieldInfo) -> Any: field_type = info.annotation if args := get_args(info.annotation): - field_type = next(a for a in args if a is not type(None)) + field_type = next(a for a in args if a is not NoneType) return field_type +def _unwrap_annotation(ann): + """Peel off Annotated wrappers until reaching the core type.""" + while get_origin(ann) is Annotated: + ann = get_args(ann)[0] + return ann + + def is_literal(info: FieldInfo) -> bool: - return get_origin(info.annotation) is Literal + ann = _unwrap_annotation(info.annotation) + return get_origin(ann) is Literal def is_nullable(info: FieldInfo) -> bool: - origin = get_origin(info.annotation) # X | None or Optional[X] will return Union - if origin is UnionType: - return any(x in get_args(info.annotation) for x in (type(None), Any)) - return False + """Checks whether a field allows None as a value.""" + ann = _unwrap_annotation(info.annotation) + origin = get_origin(ann) # X | None or Optional[X] will return Union + + if origin in (Union, UnionType): + return any(arg is NoneType or arg is Any for arg in get_args(ann)) + + return ann is NoneType or ann is Any diff --git a/packages/common-library/src/common_library/serialization.py b/packages/common-library/src/common_library/serialization.py index 70dd53e13c4b..a24f0e7e96ea 100644 --- a/packages/common-library/src/common_library/serialization.py +++ b/packages/common-library/src/common_library/serialization.py @@ -11,7 +11,9 @@ def model_dump_with_secrets( ) -> dict[str, Any]: data = settings_obj.model_dump(**pydantic_export_options) - for field_name in settings_obj.model_fields: + settings_cls = settings_obj.__class__ + + for field_name in settings_cls.model_fields: if field_name not in data: continue @@ -29,7 +31,9 @@ def model_dump_with_secrets( data[field_name] = str(field_data) elif isinstance(field_data, dict): - possible_pydantic_model = settings_obj.model_fields[field_name].annotation + possible_pydantic_model = settings_obj.__class__.model_fields[ + field_name + ].annotation # NOTE: data could be a dict which does not represent a pydantic model or a union of models with contextlib.suppress(AttributeError, ValidationError): data[field_name] = model_dump_with_secrets( diff --git a/packages/common-library/src/common_library/user_messages.py b/packages/common-library/src/common_library/user_messages.py new file mode 100644 index 000000000000..4c6e22cdc2e5 --- /dev/null +++ b/packages/common-library/src/common_library/user_messages.py @@ -0,0 +1,11 @@ +def user_message(msg: str, *, _version: int | None = None) -> str: + """Marks a message as user-facing + + Arguments: + msg -- human-friendly string that follows docs/user-messages-guidelines.md + _version -- version number to track changes to messages; increment when modifying an existing message + + Returns: + The original message string, allowing it to be used inline in code + """ + return msg diff --git a/packages/common-library/tests/test_async_tools.py b/packages/common-library/tests/test_async_tools.py index 850945d39b20..9e9e081056fe 100644 --- a/packages/common-library/tests/test_async_tools.py +++ b/packages/common-library/tests/test_async_tools.py @@ -1,9 +1,16 @@ import asyncio +import time from concurrent.futures import ThreadPoolExecutor +from datetime import timedelta from typing import Any import pytest -from common_library.async_tools import make_async, maybe_await +from common_library.async_tools import ( + cancel_wait_task, + delayed_start, + make_async, + maybe_await, +) @make_async() @@ -13,7 +20,8 @@ def sync_function(x: int, y: int) -> int: @make_async() def sync_function_with_exception() -> None: - raise ValueError("This is an error!") + msg = "This is an error!" + raise ValueError(msg) @pytest.mark.asyncio @@ -93,3 +101,118 @@ def fetchone(self) -> Any: # pylint: disable=no-self-use sync_result = await maybe_await(SyncResultProxy().fetchone()) assert sync_result == {"id": 2, "name": "test2"} + + +async def test_cancel_and_wait(): + state = {"started": False, "cancelled": False, "cleaned_up": False} + SLEEP_TIME = 5 # seconds + + async def coro(): + try: + state["started"] = True + await asyncio.sleep(SLEEP_TIME) + except asyncio.CancelledError: + state["cancelled"] = True + raise + finally: + state["cleaned_up"] = True + + task = asyncio.create_task(coro()) + await asyncio.sleep(0.1) # Let coro start + + start = time.time() + await cancel_wait_task(task) + + elapsed = time.time() - start + assert elapsed < SLEEP_TIME, "Task should be cancelled quickly" + assert task.done() + assert task.cancelled() + assert state["started"] + assert state["cancelled"] + assert state["cleaned_up"] + + +async def test_cancel_and_wait_propagates_external_cancel(): + """ + This test ensures that if the caller of cancel_and_wait is cancelled, + the CancelledError is not swallowed. + """ + + async def coro(): + try: + await asyncio.sleep(4) + except asyncio.CancelledError: + await asyncio.sleep(1) # simulate cleanup + raise + + inner_task = asyncio.create_task(coro()) + + async def outer_coro(): + try: + await cancel_wait_task(inner_task) + except asyncio.CancelledError: + assert ( + not inner_task.cancelled() + ), "Internal Task DOES NOT RAISE CancelledError" + raise + + # Cancel the wrapper after a short delay + outer_task = asyncio.create_task(outer_coro()) + await asyncio.sleep(0.1) + outer_task.cancel() + + with pytest.raises(asyncio.CancelledError): + await outer_task + + # Ensure the task was cancelled + assert inner_task.cancelled() is False, "Task should not be cancelled initially" + + done_event = asyncio.Event() + + def on_done(_): + done_event.set() + + inner_task.add_done_callback(on_done) + await done_event.wait() + + +async def test_cancel_and_wait_timeout_on_slow_cleanup(): + """Test that cancel_and_wait raises TimeoutError when cleanup takes longer than max_delay""" + + CLEANUP_TIME = 2 # seconds + + async def slow_cleanup_coro(): + try: + await asyncio.sleep(10) # Long running task + except asyncio.CancelledError: + # Simulate slow cleanup that exceeds max_delay! + await asyncio.sleep(CLEANUP_TIME) + raise + + task = asyncio.create_task(slow_cleanup_coro()) + await asyncio.sleep(0.1) # Let the task start + + # Cancel with a max_delay shorter than cleanup time + with pytest.raises(TimeoutError): + await cancel_wait_task( + task, max_delay=CLEANUP_TIME / 10 + ) # 0.2 seconds < 2 seconds cleanup + + assert task.cancelled() + + +async def test_with_delay(): + @delayed_start(timedelta(seconds=0.2)) + async def decorated_awaitable() -> int: + return 42 + + assert await decorated_awaitable() == 42 + + async def another_awaitable() -> int: + return 42 + + decorated_another_awaitable = delayed_start(timedelta(seconds=0.2))( + another_awaitable + ) + + assert await decorated_another_awaitable() == 42 diff --git a/packages/service-library/tests/test_logging_errors.py b/packages/common-library/tests/test_logging_errors.py similarity index 87% rename from packages/service-library/tests/test_logging_errors.py rename to packages/common-library/tests/test_logging_errors.py index ac99c2fd657c..481f152a9f08 100644 --- a/packages/service-library/tests/test_logging_errors.py +++ b/packages/common-library/tests/test_logging_errors.py @@ -5,9 +5,9 @@ import pytest from common_library.error_codes import create_error_code, parse_error_code_parts from common_library.errors_classes import OsparcErrorMixin -from servicelib.logging_errors import ( - create_troubleshotting_log_kwargs, - create_troubleshotting_log_message, +from common_library.logging.logging_errors import ( + create_troubleshooting_log_kwargs, + create_troubleshooting_log_message, ) @@ -29,7 +29,7 @@ class MyError(OsparcErrorMixin, RuntimeError): msg = f"Nice message to user [{error_code}]" - log_msg = create_troubleshotting_log_message( + log_msg = create_troubleshooting_log_message( msg, error=exc, error_code=error_code, @@ -37,7 +37,7 @@ class MyError(OsparcErrorMixin, RuntimeError): tip="This is a test error", ) - log_kwargs = create_troubleshotting_log_kwargs( + log_kwargs = create_troubleshooting_log_kwargs( msg, error=exc, error_code=error_code, @@ -58,7 +58,7 @@ class MyError(OsparcErrorMixin, RuntimeError): # ERROR root:test_logging_utils.py:417 Nice message to user [OEC:126055703573984]. # { - # "exception_details": "My error 123", + # "exception_string": "My error 123", # "error_code": "OEC:126055703573984", # "context": { # "user_id": 123, diff --git a/packages/service-library/tests/test_logging_utils_filtering.py b/packages/common-library/tests/test_logging_utils_filtering.py similarity index 96% rename from packages/service-library/tests/test_logging_utils_filtering.py rename to packages/common-library/tests/test_logging_utils_filtering.py index 64084c3204ac..75240cab747d 100644 --- a/packages/service-library/tests/test_logging_utils_filtering.py +++ b/packages/common-library/tests/test_logging_utils_filtering.py @@ -1,10 +1,10 @@ # pylint: disable=redefined-outer-name import logging -from typing import Generator +from collections.abc import Generator import pytest -from servicelib.logging_utils_filtering import GeneralLogFilter +from common_library.logging.logging_utils_filtering import GeneralLogFilter @pytest.fixture diff --git a/packages/common-library/tests/test_pydantic_fields_extension.py b/packages/common-library/tests/test_pydantic_fields_extension.py index 3461344062a7..1c4896eb4737 100644 --- a/packages/common-library/tests/test_pydantic_fields_extension.py +++ b/packages/common-library/tests/test_pydantic_fields_extension.py @@ -1,16 +1,22 @@ -from typing import Any, Callable, Literal +from collections.abc import Callable +from typing import Annotated, Any, Literal import pytest from common_library.pydantic_fields_extension import get_type, is_literal, is_nullable -from pydantic import BaseModel, Field +from pydantic import BaseModel, PositiveInt class MyModel(BaseModel): a: int - b: float | None = Field(...) + b: float | None c: str = "bla" d: bool | None = None e: Literal["bla"] + f: Annotated[ + PositiveInt | None, + "nullable inside Annotated (PositiveInt = Annotated[int, ...])", + ] + g: Annotated[Literal["foo", "bar"], "literal inside Annotated"] @pytest.mark.parametrize( @@ -49,6 +55,8 @@ class MyModel(BaseModel): ), (is_literal, False, "d"), (is_literal, True, "e"), + (is_literal, False, "f"), + (is_literal, True, "g"), ( is_nullable, False, @@ -66,6 +74,11 @@ class MyModel(BaseModel): ), (is_nullable, True, "d"), (is_nullable, False, "e"), + ( + is_nullable, + True, + "f", + ), ], ) def test_field_fn(fn: Callable[[Any], Any], expected: Any, name: str): diff --git a/packages/common-library/tests/test_pydantic_validators.py b/packages/common-library/tests/test_pydantic_validators.py index c1cfea84c679..47ad5d5367c9 100644 --- a/packages/common-library/tests/test_pydantic_validators.py +++ b/packages/common-library/tests/test_pydantic_validators.py @@ -7,7 +7,7 @@ validate_numeric_string_as_timedelta, ) from faker import Faker -from pydantic import BeforeValidator, Field +from pydantic import BeforeValidator from pydantic_settings import BaseSettings, SettingsConfigDict from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict @@ -17,7 +17,7 @@ class Settings(BaseSettings): APP_NAME: str REQUEST_TIMEOUT: Annotated[ timedelta, BeforeValidator(_validate_legacy_timedelta_str) - ] = Field(default=timedelta(hours=1)) + ] = timedelta(hours=1) model_config = SettingsConfigDict() @@ -45,7 +45,7 @@ def test_validate_timedelta_in_legacy_mode( ): class Settings(BaseSettings): APP_NAME: str - REQUEST_TIMEOUT: timedelta = Field(default=timedelta(seconds=40)) + REQUEST_TIMEOUT: timedelta = timedelta(seconds=40) _validate_request_timeout = validate_numeric_string_as_timedelta( "REQUEST_TIMEOUT" diff --git a/packages/common-library/tests/test_user_messages.py b/packages/common-library/tests/test_user_messages.py new file mode 100644 index 000000000000..e5629700f423 --- /dev/null +++ b/packages/common-library/tests/test_user_messages.py @@ -0,0 +1,6 @@ +from common_library.user_messages import user_message + + +def test_user_message() -> None: + + assert user_message("This is a user message") == "This is a user message" diff --git a/packages/dask-task-models-library/requirements/_base.txt b/packages/dask-task-models-library/requirements/_base.txt index 75b9136287cd..9ab491c623df 100644 --- a/packages/dask-task-models-library/requirements/_base.txt +++ b/packages/dask-task-models-library/requirements/_base.txt @@ -6,7 +6,7 @@ attrs==25.3.0 # via # jsonschema # referencing -click==8.1.8 +click==8.2.1 # via # dask # distributed @@ -40,6 +40,8 @@ jinja2==3.1.6 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # distributed +jsonref==1.1.0 + # via -r requirements/../../../packages/models-library/requirements/_base.in jsonschema==4.23.0 # via -r requirements/../../../packages/models-library/requirements/_base.in jsonschema-specifications==2025.4.1 @@ -76,7 +78,7 @@ partd==1.4.2 # via dask psutil==7.0.0 # via distributed -pydantic==2.11.4 +pydantic==2.11.7 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -94,7 +96,7 @@ pydantic==2.11.4 # pydantic-settings pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.10.4 +pydantic-extra-types==2.10.5 # via # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -136,7 +138,7 @@ referencing==0.35.1 # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications -rich==14.0.0 +rich==14.1.0 # via # -r requirements/../../../packages/settings-library/requirements/_base.in # typer @@ -159,11 +161,11 @@ toolz==1.0.0 # partd tornado==6.5 # via distributed -typer==0.15.4 +typer==0.16.1 # via -r requirements/../../../packages/settings-library/requirements/_base.in types-python-dateutil==2.9.0.20250516 # via arrow -typing-extensions==4.13.2 +typing-extensions==4.14.1 # via # pydantic # pydantic-core @@ -172,7 +174,7 @@ typing-extensions==4.13.2 # typing-inspection typing-inspection==0.4.0 # via pydantic -urllib3==2.4.0 +urllib3==2.5.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt diff --git a/packages/dask-task-models-library/requirements/_test.txt b/packages/dask-task-models-library/requirements/_test.txt index 9daa7aacc472..db795fd452f8 100644 --- a/packages/dask-task-models-library/requirements/_test.txt +++ b/packages/dask-task-models-library/requirements/_test.txt @@ -22,10 +22,16 @@ pint==0.24.4 platformdirs==4.3.8 # via pint pluggy==1.6.0 - # via pytest + # via + # pytest + # pytest-cov pprintpp==0.4.0 # via pytest-icdiff -pytest==8.3.5 +pygments==2.19.1 + # via + # -c requirements/_base.txt + # pytest +pytest==8.4.1 # via # -r requirements/_test.in # pytest-asyncio @@ -34,15 +40,15 @@ pytest==8.3.5 # pytest-instafail # pytest-mock # pytest-sugar -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via -r requirements/_test.in -pytest-cov==6.1.1 +pytest-cov==6.2.1 # via -r requirements/_test.in pytest-icdiff==0.9 # via -r requirements/_test.in pytest-instafail==0.5.0 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in @@ -55,7 +61,7 @@ pyyaml==6.0.2 # -r requirements/_test.in termcolor==3.1.0 # via pytest-sugar -typing-extensions==4.13.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # flexcache diff --git a/packages/dask-task-models-library/requirements/_tools.txt b/packages/dask-task-models-library/requirements/_tools.txt index e0213f1353c0..52dcea8df704 100644 --- a/packages/dask-task-models-library/requirements/_tools.txt +++ b/packages/dask-task-models-library/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # black @@ -27,7 +27,7 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.1.0 # via @@ -42,7 +42,9 @@ packaging==25.0 # black # build pathspec==0.12.1 - # via black + # via + # black + # mypy pip==25.1.1 # via pip-tools pip-tools==7.4.1 @@ -69,11 +71,11 @@ pyyaml==6.0.2 # pre-commit ruff==0.11.10 # via -r requirements/../../../requirements/devenv.txt -setuptools==80.7.1 +setuptools==80.9.0 # via pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.13.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # -c requirements/_test.txt diff --git a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/io.py b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/io.py index 0f443c57f688..a6031747eb07 100644 --- a/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/io.py +++ b/packages/dask-task-models-library/src/dask_task_models_library/container_tasks/io.py @@ -75,13 +75,15 @@ def _update_json_schema_extra(schema: JsonDict) -> None: class FileUrl(BaseModel): url: AnyUrl - file_mapping: str | None = Field( - default=None, - description="Local file relpath name (if given), otherwise it takes the url filename", - ) - file_mime_type: str | None = Field( - default=None, description="the file MIME type", pattern=MIME_TYPE_RE - ) + file_mapping: Annotated[ + str | None, + Field( + description="Local file relpath name (if given), otherwise it takes the url filename" + ), + ] = None + file_mime_type: Annotated[ + str | None, Field(description="the file MIME type", pattern=MIME_TYPE_RE) + ] = None @staticmethod def _update_json_schema_extra(schema: JsonDict) -> None: diff --git a/packages/dask-task-models-library/src/dask_task_models_library/plugins/task_life_cycle_worker_plugin.py b/packages/dask-task-models-library/src/dask_task_models_library/plugins/task_life_cycle_worker_plugin.py index ebc6aabcad85..ad7135d94d0c 100644 --- a/packages/dask-task-models-library/src/dask_task_models_library/plugins/task_life_cycle_worker_plugin.py +++ b/packages/dask-task-models-library/src/dask_task_models_library/plugins/task_life_cycle_worker_plugin.py @@ -34,6 +34,7 @@ def transition( ): _logger.info("Task '%s' transition from %s to %s", key, start, finish) assert self._worker # nosec + assert isinstance(self._worker, Worker) # nosec self._worker.log_event( TASK_LIFE_CYCLE_EVENT.format(key=key), TaskLifeCycleState.from_worker_task_state( diff --git a/packages/models-library/requirements/_base.in b/packages/models-library/requirements/_base.in index b33d20bdd6b0..670e3cec3f86 100644 --- a/packages/models-library/requirements/_base.in +++ b/packages/models-library/requirements/_base.in @@ -5,8 +5,9 @@ --requirement ../../../packages/common-library/requirements/_base.in arrow +jsonref jsonschema orjson -pydantic[email] -pydantic-settings pydantic-extra-types +pydantic-settings +pydantic[email] diff --git a/packages/models-library/requirements/_base.txt b/packages/models-library/requirements/_base.txt index 9daa42c4b0a1..2cf37bcb9790 100644 --- a/packages/models-library/requirements/_base.txt +++ b/packages/models-library/requirements/_base.txt @@ -12,6 +12,8 @@ email-validator==2.2.0 # via pydantic idna==3.10 # via email-validator +jsonref==1.1.0 + # via -r requirements/_base.in jsonschema==4.23.0 # via -r requirements/_base.in jsonschema-specifications==2024.10.1 @@ -22,7 +24,7 @@ orjson==3.10.15 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/_base.in -pydantic==2.10.6 +pydantic==2.11.7 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -30,9 +32,9 @@ pydantic==2.10.6 # -r requirements/_base.in # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.2 +pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.10.2 +pydantic-extra-types==2.10.5 # via # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/_base.in @@ -59,8 +61,11 @@ six==1.17.0 # via python-dateutil types-python-dateutil==2.9.0.20241206 # via arrow -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # pydantic # pydantic-core # pydantic-extra-types + # typing-inspection +typing-inspection==0.4.1 + # via pydantic diff --git a/packages/models-library/requirements/_test.txt b/packages/models-library/requirements/_test.txt index f54c9d6f0f7f..cc04de2fcff5 100644 --- a/packages/models-library/requirements/_test.txt +++ b/packages/models-library/requirements/_test.txt @@ -31,14 +31,18 @@ pint==0.24.4 platformdirs==4.3.6 # via pint pluggy==1.5.0 - # via pytest + # via + # pytest + # pytest-cov pprintpp==0.4.0 # via pytest-icdiff propcache==0.3.0 # via yarl psutil==7.0.0 # via -r requirements/_test.in -pytest==8.3.5 +pygments==2.19.2 + # via pytest +pytest==8.4.1 # via # -r requirements/_test.in # pytest-asyncio @@ -47,15 +51,15 @@ pytest==8.3.5 # pytest-instafail # pytest-mock # pytest-sugar -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via -r requirements/_test.in -pytest-cov==6.0.0 +pytest-cov==6.2.1 # via -r requirements/_test.in pytest-icdiff==0.9 # via -r requirements/_test.in pytest-instafail==0.5.0 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in @@ -84,7 +88,7 @@ types-jsonschema==4.23.0.20241208 # via -r requirements/_test.in types-pyyaml==6.0.12.20241230 # via -r requirements/_test.in -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # flexcache diff --git a/packages/models-library/requirements/_tools.txt b/packages/models-library/requirements/_tools.txt index 3ae7f8fc714f..b4eb603e298f 100644 --- a/packages/models-library/requirements/_tools.txt +++ b/packages/models-library/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.8 +click==8.2.1 # via # black # pip-tools @@ -31,9 +31,9 @@ mccabe==0.7.0 # via pylint mdurl==0.1.2 # via markdown-it-py -mypy==1.15.0 +mypy==1.16.1 # via -r requirements/../../../requirements/devenv.txt -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via # black # mypy @@ -45,7 +45,9 @@ packaging==24.2 # black # build pathspec==0.12.1 - # via black + # via + # black + # mypy pip==25.0.1 # via pip-tools pip-tools==7.4.1 @@ -58,8 +60,10 @@ platformdirs==4.3.6 # virtualenv pre-commit==4.1.0 # via -r requirements/../../../requirements/devenv.txt -pygments==2.19.1 - # via rich +pygments==2.19.2 + # via + # -c requirements/_test.txt + # rich pylint==3.3.4 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 @@ -71,19 +75,19 @@ pyyaml==6.0.2 # -c requirements/../../../requirements/constraints.txt # -c requirements/_test.txt # pre-commit -rich==13.9.4 +rich==14.1.0 # via typer ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.2 +setuptools==80.9.0 # via pip-tools shellingham==1.5.4 # via typer tomlkit==0.13.2 # via pylint -typer==0.15.2 +typer==0.16.1 # via -r requirements/_tools.in -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # -c requirements/_test.txt diff --git a/packages/models-library/src/models_library/api_schemas__common/errors.py b/packages/models-library/src/models_library/api_schemas__common/errors.py index d1f7d6aa34db..0e62cd62e9ae 100644 --- a/packages/models-library/src/models_library/api_schemas__common/errors.py +++ b/packages/models-library/src/models_library/api_schemas__common/errors.py @@ -1,5 +1,5 @@ import http -from typing import Any +from typing import Annotated, Any from pydantic import BaseModel, Field @@ -7,12 +7,10 @@ class DefaultApiError(BaseModel): - name: IDStr = Field( - ..., - description="Error identifier as a code or a name. " - "Mainly for machine-machine communication purposes.", + name: Annotated[IDStr, Field(description="Exception's class name")] + detail: Annotated[Any | None, Field(description="Human readable error message")] = ( + None ) - detail: Any | None = Field(default=None, description="Human readable error message") @classmethod def from_status_code( diff --git a/packages/models-library/src/models_library/api_schemas__common/meta.py b/packages/models-library/src/models_library/api_schemas__common/meta.py index 514abdc7d6df..2dc0c4e9d328 100644 --- a/packages/models-library/src/models_library/api_schemas__common/meta.py +++ b/packages/models-library/src/models_library/api_schemas__common/meta.py @@ -1,3 +1,5 @@ +from typing import Annotated + from pydantic import BaseModel, ConfigDict, Field from ..basic_types import VersionStr @@ -6,9 +8,10 @@ class BaseMeta(BaseModel): name: str version: VersionStr - released: dict[str, VersionStr] | None = Field( - default=None, description="Maps every route's path tag with a released version" - ) + released: Annotated[ + dict[str, VersionStr] | None, + Field(description="Maps every route's path tag with a released version"), + ] = None model_config = ConfigDict( json_schema_extra={ diff --git a/packages/models-library/src/models_library/api_schemas_api_server/api_keys.py b/packages/models-library/src/models_library/api_schemas_api_server/api_keys.py index 999cb2f192cb..534fd82329fb 100644 --- a/packages/models-library/src/models_library/api_schemas_api_server/api_keys.py +++ b/packages/models-library/src/models_library/api_schemas_api_server/api_keys.py @@ -1,3 +1,5 @@ +from typing import Annotated + from pydantic import BaseModel, ConfigDict, Field, SecretStr @@ -10,7 +12,7 @@ class ApiKeyInDB(BaseModel): api_key: str api_secret: str - id_: int = Field(0, alias="id") + id_: Annotated[int, Field(alias="id")] = 0 display_name: str user_id: int product_name: str diff --git a/packages/models-library/src/models_library/api_schemas_catalog/services_specifications.py b/packages/models-library/src/models_library/api_schemas_catalog/services_specifications.py index 331ef23f83e5..a95d5f622c25 100644 --- a/packages/models-library/src/models_library/api_schemas_catalog/services_specifications.py +++ b/packages/models-library/src/models_library/api_schemas_catalog/services_specifications.py @@ -1,18 +1,23 @@ +from typing import Annotated + from pydantic import BaseModel, Field from ..generated_models.docker_rest_api import ServiceSpec as DockerServiceSpec class ServiceSpecifications(BaseModel): - sidecar: DockerServiceSpec | None = Field( - default=None, - description="schedule-time specifications for the service sidecar (follows Docker Service creation API, see https://docs.docker.com/engine/api/v1.25/#operation/ServiceCreate)", - ) - service: DockerServiceSpec | None = Field( - default=None, - description="schedule-time specifications specifications for the service (follows Docker Service creation API (specifically only the Resources part), see https://docs.docker.com/engine/api/v1.41/#tag/Service/operation/ServiceCreate", - ) + sidecar: Annotated[ + DockerServiceSpec | None, + Field( + description="schedule-time specifications for the service sidecar (follows Docker Service creation API, see https://docs.docker.com/engine/api/v1.25/#operation/ServiceCreate)", + ), + ] = None + service: Annotated[ + DockerServiceSpec | None, + Field( + description="schedule-time specifications specifications for the service (follows Docker Service creation API (specifically only the Resources part), see https://docs.docker.com/engine/api/v1.41/#tag/Service/operation/ServiceCreate", + ), + ] = None -class ServiceSpecificationsGet(ServiceSpecifications): - ... +class ServiceSpecificationsGet(ServiceSpecifications): ... diff --git a/packages/models-library/src/models_library/api_schemas_clusters_keeper/clusters.py b/packages/models-library/src/models_library/api_schemas_clusters_keeper/clusters.py index 135b42188b8b..dad13b9db243 100644 --- a/packages/models-library/src/models_library/api_schemas_clusters_keeper/clusters.py +++ b/packages/models-library/src/models_library/api_schemas_clusters_keeper/clusters.py @@ -1,5 +1,6 @@ import datetime from enum import auto +from typing import Annotated from pydantic import AnyUrl, BaseModel, Field @@ -17,7 +18,7 @@ class ClusterState(StrAutoEnum): class OnDemandCluster(BaseModel): endpoint: AnyUrl - authentication: ClusterAuthentication = Field(discriminator="type") + authentication: Annotated[ClusterAuthentication, Field(discriminator="type")] state: ClusterState user_id: UserID wallet_id: WalletID | None diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py b/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py index 26b7d10d0bee..f6c00b530b1d 100644 --- a/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py +++ b/packages/models-library/src/models_library/api_schemas_directorv2/clusters.py @@ -20,10 +20,10 @@ class TaskCounts(BaseModel): class WorkerMetrics(BaseModel): - cpu: float = Field(..., description="consumed % of cpus") - memory: ByteSize = Field(..., description="consumed memory") - num_fds: int = Field(..., description="consumed file descriptors") - task_counts: TaskCounts = Field(..., description="task details") + cpu: Annotated[float, Field(description="consumed % of cpus")] + memory: Annotated[ByteSize, Field(description="consumed memory")] + num_fds: Annotated[int, Field(description="consumed file descriptors")] + task_counts: Annotated[TaskCounts, Field(description="task details")] AvailableResources: TypeAlias = DictModel[str, PositiveFloat] @@ -54,7 +54,7 @@ class Worker(BaseModel): class Scheduler(BaseModel): - status: str = Field(..., description="The running status of the scheduler") + status: Annotated[str, Field(description="The running status of the scheduler")] workers: Annotated[WorkersDict | None, Field(default_factory=dict)] @field_validator("workers", mode="before") @@ -66,10 +66,5 @@ def ensure_workers_is_empty_dict(cls, v): class ClusterDetails(BaseModel): - scheduler: Scheduler = Field( - ..., - description="This contains dask scheduler information given by the underlying dask library", - ) - dashboard_link: AnyUrl = Field( - ..., description="Link to this scheduler's dashboard" - ) + scheduler: Annotated[Scheduler, Field(description="scheduler information")] + dashboard_link: Annotated[AnyUrl, Field(description="Link to the dask dashboard")] diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/comp_runs.py b/packages/models-library/src/models_library/api_schemas_directorv2/comp_runs.py index 7dc2b03c41b7..37e3e2b29bbe 100644 --- a/packages/models-library/src/models_library/api_schemas_directorv2/comp_runs.py +++ b/packages/models-library/src/models_library/api_schemas_directorv2/comp_runs.py @@ -1,6 +1,7 @@ from datetime import datetime from typing import Any, NamedTuple +from models_library.computations import CollectionRunID from models_library.services_types import ServiceRunID from pydantic import ( AnyUrl, @@ -63,6 +64,55 @@ class ComputationRunRpcGetPage(NamedTuple): total: PositiveInt +class ComputationCollectionRunRpcGet(BaseModel): + collection_run_id: CollectionRunID + project_ids: list[str] + state: RunningState + info: dict[str, Any] + submitted_at: datetime + started_at: datetime | None + ended_at: datetime | None + + model_config = ConfigDict( + json_schema_extra={ + "examples": [ + { + "collection_run_id": "12e0c8b2-bad6-40fb-9948-8dec4f65d4d9", + "project_ids": ["beb16d18-d57d-44aa-a638-9727fa4a72ef"], + "state": "SUCCESS", + "info": { + "wallet_id": 9866, + "user_email": "test@example.net", + "wallet_name": "test", + "product_name": "osparc", + "project_name": "test", + "project_metadata": { + "parent_node_id": "12e0c8b2-bad6-40fb-9948-8dec4f65d4d9", + "parent_node_name": "UJyfwFVYySnPCaLuQIaz", + "parent_project_id": "beb16d18-d57d-44aa-a638-9727fa4a72ef", + "parent_project_name": "qTjDmYPxeqAWfCKCQCYF", + "root_parent_node_id": "37176e84-d977-4993-bc49-d76fcfc6e625", + "root_parent_node_name": "UEXExIZVPeFzGRmMglPr", + "root_parent_project_id": "beb16d18-d57d-44aa-a638-9727fa4a72ef", + "root_parent_project_name": "FuDpjjFIyeNTWRUWCuKo", + }, + "node_id_names_map": {}, + "simcore_user_agent": "agent", + }, + "submitted_at": "2023-01-11 13:11:47.293595", + "started_at": "2023-01-11 13:11:47.293595", + "ended_at": "2023-01-11 13:11:47.293595", + } + ] + } + ) + + +class ComputationCollectionRunRpcGetPage(NamedTuple): + items: list[ComputationCollectionRunRpcGet] + total: PositiveInt + + class ComputationTaskRpcGet(BaseModel): project_uuid: ProjectID node_id: NodeID @@ -100,3 +150,42 @@ class ComputationTaskRpcGet(BaseModel): class ComputationTaskRpcGetPage(NamedTuple): items: list[ComputationTaskRpcGet] total: PositiveInt + + +class ComputationCollectionRunTaskRpcGet(BaseModel): + project_uuid: ProjectID + node_id: NodeID + state: RunningState + progress: float + image: dict[str, Any] + started_at: datetime | None + ended_at: datetime | None + log_download_link: AnyUrl | None + service_run_id: ServiceRunID + + model_config = ConfigDict( + json_schema_extra={ + "examples": [ + { + "project_uuid": "beb16d18-d57d-44aa-a638-9727fa4a72ef", + "node_id": "12e0c8b2-bad6-40fb-9948-8dec4f65d4d9", + "state": "SUCCESS", + "progress": 0.0, + "image": { + "name": "simcore/services/comp/ti-solutions-optimizer", + "tag": "1.0.19", + "node_requirements": {"CPU": 8.0, "RAM": 25769803776}, + }, + "started_at": "2023-01-11 13:11:47.293595", + "ended_at": "2023-01-11 13:11:47.293595", + "log_download_link": "https://example.com/logs", + "service_run_id": "comp_1_12e0c8b2-bad6-40fb-9948-8dec4f65d4d9_1", + } + ] + } + ) + + +class ComputationCollectionRunTaskRpcGetPage(NamedTuple): + items: list[ComputationCollectionRunTaskRpcGet] + total: PositiveInt diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/computations.py b/packages/models-library/src/models_library/api_schemas_directorv2/computations.py index 3691fdbf6ee4..00a6549b3faf 100644 --- a/packages/models-library/src/models_library/api_schemas_directorv2/computations.py +++ b/packages/models-library/src/models_library/api_schemas_directorv2/computations.py @@ -1,5 +1,6 @@ from typing import Annotated, Any, TypeAlias +from models_library.computations import CollectionRunID from pydantic import ( AnyHttpUrl, AnyUrl, @@ -12,7 +13,7 @@ from ..basic_types import IDStr from ..projects import ProjectID -from ..projects_nodes_io import NodeID +from ..projects_nodes_io import NodeID, SimcoreS3FileID from ..projects_pipeline import ComputationTask from ..users import UserID from ..wallets import WalletInfo @@ -72,6 +73,12 @@ class ComputationCreate(BaseModel): description="contains information about the wallet used to bill the running service" ), ] = None + collection_run_id: Annotated[ + CollectionRunID | None, + Field( + description="In case start_pipeline is True, this is the collection run id to which the comp run belongs." + ), + ] = None @field_validator("product_name") @classmethod @@ -83,6 +90,20 @@ def _ensure_product_name_defined_if_computation_starts( raise ValueError(msg) return v + @field_validator("collection_run_id") + @classmethod + def _ensure_collection_run_id_dependency_on_start_pipeline( + cls, v, info: ValidationInfo + ): + start_pipeline = info.data.get("start_pipeline") + if start_pipeline and v is None: + msg = "collection_run_id must be provided when start_pipeline is True!" + raise ValueError(msg) + if not start_pipeline and v is not None: + msg = "collection_run_id must be None when start_pipeline is False!" + raise ValueError(msg) + return v + class ComputationStop(BaseModel): user_id: UserID @@ -105,6 +126,30 @@ class TaskLogFileGet(BaseModel): ] = None +class TaskLogFileIdGet(BaseModel): + task_id: NodeID + file_id: SimcoreS3FileID | None + + model_config = ConfigDict( + json_schema_extra={ + "examples": [ + { + "task_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "file_id": "1c46752c-b096-11ea-a3c4-02420a00392e/3fa85f64-5717-4562-b3fc-2c963f66afa6/logs/task_logs.txt", + }, + { + "task_id": "6ba7b810-9dad-11d1-80b4-00c04fd430c8", + "file_id": "1c46752c-b096-11ea-a3c4-02420a00392e/6ba7b810-9dad-11d1-80b4-00c04fd430c8/logs/debug.log", + }, + { + "task_id": "6ba7b811-9dad-11d1-80b4-00c04fd430c8", + "file_id": None, + }, + ] + } + ) + + class TasksSelection(BaseModel): nodes_ids: list[NodeID] diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services.py b/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services.py index 565580b84bdc..93208f0aae5a 100644 --- a/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services.py +++ b/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services.py @@ -5,6 +5,7 @@ from ..resource_tracker import HardwareInfo, PricingInfo from ..services import ServicePortKey +from ..services_creation import CreateServiceMetricsAdditionalParams from ..services_resources import ServiceResourcesDict, ServiceResourcesDictHelpers from ..wallets import WalletInfo from .dynamic_services_service import RunningDynamicServiceDetails, ServiceDetails @@ -104,3 +105,11 @@ class GetProjectInactivityResponse(BaseModel): is_inactive: bool model_config = ConfigDict(json_schema_extra={"example": {"is_inactive": "false"}}) + + +class ContainersComposeSpec(BaseModel): + docker_compose_yaml: str + + +class ContainersCreate(BaseModel): + metrics_params: CreateServiceMetricsAdditionalParams diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services_service.py b/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services_service.py index 769e1fc94193..6dddfa748561 100644 --- a/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services_service.py +++ b/packages/models-library/src/models_library/api_schemas_directorv2/dynamic_services_service.py @@ -1,7 +1,9 @@ from functools import cached_property from pathlib import Path +from typing import Annotated from pydantic import BaseModel, ConfigDict, Field +from pydantic.config import JsonDict from ..basic_types import PortInt from ..projects import ProjectID @@ -88,40 +90,50 @@ class RunningDynamicServiceDetails(ServiceDetails): alias="service_message", ) + is_collaborative: Annotated[ + bool, + Field(description="True if service allows collaboration (multi-tenant access)"), + ] = False + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ # legacy + { + "service_key": "simcore/services/dynamic/raw-graphs", + "service_version": "2.10.6", + "user_id": 1, + "project_id": "32fb4eb6-ab30-11ef-9ee4-0242ac140008", + "service_uuid": "0cd049ba-cd6b-4a12-b416-a50c9bc8e7bb", + "service_basepath": "/x/0cd049ba-cd6b-4a12-b416-a50c9bc8e7bb", + "service_host": "raw-graphs_0cd049ba-cd6b-4a12-b416-a50c9bc8e7bb", + "service_port": 4000, + "published_port": None, + "entry_point": "", + "service_state": "running", + "service_message": "", + }, + # new style + { + "service_key": "simcore/services/dynamic/jupyter-math", + "service_version": "3.0.3", + "user_id": 1, + "project_id": "32fb4eb6-ab30-11ef-9ee4-0242ac140008", + "service_uuid": "6e3cad3a-eb64-43de-b476-9ac3c413fd9c", + "boot_type": "V2", + "service_host": "dy-sidecar_6e3cad3a-eb64-43de-b476-9ac3c413fd9c", + "service_port": 8888, + "service_state": "running", + "service_message": "", + }, + ] + } + ) + model_config = ConfigDict( ignored_types=(cached_property,), - json_schema_extra={ - "examples": [ - # legacy - { - "service_key": "simcore/services/dynamic/raw-graphs", - "service_version": "2.10.6", - "user_id": 1, - "project_id": "32fb4eb6-ab30-11ef-9ee4-0242ac140008", - "service_uuid": "0cd049ba-cd6b-4a12-b416-a50c9bc8e7bb", - "service_basepath": "/x/0cd049ba-cd6b-4a12-b416-a50c9bc8e7bb", - "service_host": "raw-graphs_0cd049ba-cd6b-4a12-b416-a50c9bc8e7bb", - "service_port": 4000, - "published_port": None, - "entry_point": "", - "service_state": "running", - "service_message": "", - }, - # new style - { - "service_key": "simcore/services/dynamic/jupyter-math", - "service_version": "3.0.3", - "user_id": 1, - "project_id": "32fb4eb6-ab30-11ef-9ee4-0242ac140008", - "service_uuid": "6e3cad3a-eb64-43de-b476-9ac3c413fd9c", - "boot_type": "V2", - "service_host": "dy-sidecar_6e3cad3a-eb64-43de-b476-9ac3c413fd9c", - "service_port": 8888, - "service_state": "running", - "service_message": "", - }, - ] - }, + json_schema_extra=_update_json_schema_extra, ) @cached_property diff --git a/packages/models-library/src/models_library/api_schemas_directorv2/errors.py b/packages/models-library/src/models_library/api_schemas_directorv2/errors.py index ecf33eefd146..81b7e07e1feb 100644 --- a/packages/models-library/src/models_library/api_schemas_directorv2/errors.py +++ b/packages/models-library/src/models_library/api_schemas_directorv2/errors.py @@ -1,14 +1,16 @@ +from typing import Annotated + from pydantic import BaseModel, Field class Error(BaseModel): - code: str | None = Field(None, description="Server Exception") + code: Annotated[str | None, Field(description="Server Exception")] = None class ErrorType(BaseModel): - message: str = Field(..., description="Error message") + message: Annotated[str, Field(description="Error message")] + status: Annotated[int, Field(description="Error code")] errors: list[Error] | None = None - status: int = Field(..., description="Error code") class ErrorEnveloped(BaseModel): diff --git a/packages/models-library/src/models_library/api_schemas_long_running_tasks/base.py b/packages/models-library/src/models_library/api_schemas_long_running_tasks/base.py index a3bb93813dc0..38f2fa2f926a 100644 --- a/packages/models-library/src/models_library/api_schemas_long_running_tasks/base.py +++ b/packages/models-library/src/models_library/api_schemas_long_running_tasks/base.py @@ -1,7 +1,9 @@ import logging +from collections.abc import Awaitable, Callable from typing import Annotated, TypeAlias -from pydantic import BaseModel, Field, field_validator, validate_call +from pydantic import BaseModel, ConfigDict, Field, field_validator, validate_call +from pydantic.config import JsonDict _logger = logging.getLogger(__name__) @@ -18,12 +20,36 @@ class TaskProgress(BaseModel): defined as a float bound between 0.0 and 1.0 """ - task_id: TaskId | None = Field(default=None) - message: ProgressMessage = Field(default="") - percent: ProgressPercent = Field(default=0.0) + task_id: TaskId | None = None + message: ProgressMessage = "" + percent: ProgressPercent = 0.0 + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + { + "task_id": "3ac48b54-a48d-4c5e-a6ac-dcaddb9eaa59", + "message": "Halfway done", + "percent": 0.5, + } + ] + } + ) + + model_config = ConfigDict(json_schema_extra=_update_json_schema_extra) + + # used to propagate progress updates internally + _update_callback: Callable[["TaskProgress"], Awaitable[None]] | None = None + + def set_update_callback( + self, callback: Callable[["TaskProgress"], Awaitable[None]] + ) -> None: + self._update_callback = callback @validate_call - def update( + async def update( self, *, message: ProgressMessage | None = None, @@ -40,6 +66,16 @@ def update( _logger.debug("Progress update: %s", f"{self}") + if self._update_callback is not None: + try: + await self._update_callback(self) + except Exception as exc: # pylint: disable=broad-exception-caught + _logger.warning( + "Error while calling progress update callback: %s", + exc, + stack_info=True, + ) + @classmethod def create(cls, task_id: TaskId | None = None) -> "TaskProgress": return cls(task_id=task_id) diff --git a/packages/models-library/src/models_library/api_schemas_long_running_tasks/tasks.py b/packages/models-library/src/models_library/api_schemas_long_running_tasks/tasks.py index acd73831b22f..8f27127c71a0 100644 --- a/packages/models-library/src/models_library/api_schemas_long_running_tasks/tasks.py +++ b/packages/models-library/src/models_library/api_schemas_long_running_tasks/tasks.py @@ -2,7 +2,8 @@ from datetime import datetime from typing import Any -from pydantic import BaseModel, field_validator +from common_library.exclude import Unset +from pydantic import BaseModel, ConfigDict, model_validator from .base import TaskId, TaskProgress @@ -18,14 +19,32 @@ class TaskResult(BaseModel): error: Any | None -class TaskGet(BaseModel): +class TaskBase(BaseModel): task_id: TaskId - task_name: str + task_name: str | Unset = Unset.VALUE + + @model_validator(mode="after") + def try_populate_task_name_from_task_id(self) -> "TaskBase": + # NOTE: currently this model is used to validate tasks coming from + # the celery backend and form long_running_tasks + # 1. if a task comes from Celery, it will keep it's given name + # 2. if a task comes from long_running_tasks, it will extract it form + # the task_id, which looks like "{PREFIX}.{TASK_NAME}.UNIQUE|{UUID}" + + if self.task_id and self.task_name == Unset.VALUE: + parts = self.task_id.split(".") + if len(parts) > 1: + self.task_name = urllib.parse.unquote(parts[1]) + + if self.task_name == Unset.VALUE: + self.task_name = self.task_id + + return self + + model_config = ConfigDict(arbitrary_types_allowed=True) + + +class TaskGet(TaskBase): status_href: str result_href: str abort_href: str - - @field_validator("task_name") - @classmethod - def unquote_str(cls, v) -> str: - return urllib.parse.unquote(v) diff --git a/packages/models-library/src/models_library/api_schemas_rpc_async_jobs/async_jobs.py b/packages/models-library/src/models_library/api_schemas_rpc_async_jobs/async_jobs.py index 3b19513ca361..e71ee54bfaa8 100644 --- a/packages/models-library/src/models_library/api_schemas_rpc_async_jobs/async_jobs.py +++ b/packages/models-library/src/models_library/api_schemas_rpc_async_jobs/async_jobs.py @@ -1,11 +1,9 @@ from typing import Annotated, Any, TypeAlias from uuid import UUID -from pydantic import BaseModel, StringConstraints +from pydantic import BaseModel, ConfigDict, StringConstraints -from ..products import ProductName from ..progress_bar import ProgressReport -from ..users import UserID AsyncJobId: TypeAlias = UUID AsyncJobName: TypeAlias = Annotated[ @@ -24,6 +22,17 @@ class AsyncJobResult(BaseModel): class AsyncJobGet(BaseModel): + model_config = ConfigDict( + json_schema_extra={ + "examples": [ + { + "job_id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "job_name": "export_data_task", + } + ] + } + ) + job_id: AsyncJobId job_name: AsyncJobName @@ -31,10 +40,3 @@ class AsyncJobGet(BaseModel): class AsyncJobAbort(BaseModel): result: bool job_id: AsyncJobId - - -class AsyncJobNameData(BaseModel): - """Data for controlling access to an async job""" - - product_name: ProductName - user_id: UserID diff --git a/packages/models-library/src/models_library/api_schemas_rpc_async_jobs/exceptions.py b/packages/models-library/src/models_library/api_schemas_rpc_async_jobs/exceptions.py index 7399b6ff303d..67c0dd5353c6 100644 --- a/packages/models-library/src/models_library/api_schemas_rpc_async_jobs/exceptions.py +++ b/packages/models-library/src/models_library/api_schemas_rpc_async_jobs/exceptions.py @@ -6,7 +6,7 @@ class BaseAsyncjobRpcError(OsparcErrorMixin, RuntimeError): class JobSchedulerError(BaseAsyncjobRpcError): - msg_template: str = "Celery exception: {exc}" + msg_template: str = "Async job scheduler exception: {exc}" class JobMissingError(BaseAsyncjobRpcError): diff --git a/packages/models-library/src/models_library/api_schemas_webserver/__init__.py b/packages/models-library/src/models_library/api_schemas_webserver/__init__.py index c95f68ab78c6..20fa4370b7f6 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/__init__.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/__init__.py @@ -7,3 +7,14 @@ WEBSERVER_RPC_NAMESPACE: Final[RPCNamespace] = TypeAdapter( RPCNamespace ).validate_python("webserver") + + +def get_webserver_rpc_namespace(webserver_host: str) -> RPCNamespace: + """ + Returns the RPC namespace to select among the different webserver services + + e.g. webserver, wb-api-server, wb-garbage-collector, etc. + + On the service side, this is defined in settings.WEBSERVER_HOST + """ + return TypeAdapter(RPCNamespace).validate_python(webserver_host) diff --git a/packages/models-library/src/models_library/api_schemas_webserver/_base.py b/packages/models-library/src/models_library/api_schemas_webserver/_base.py index 4dfcf1473dd8..44ae40ddc51c 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/_base.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/_base.py @@ -1,5 +1,5 @@ """ - Base model classes for schemas in OpenAPI specs (OAS) for this service +Base model classes for schemas in OpenAPI specs (OAS) for this service """ @@ -29,9 +29,8 @@ class InputSchemaWithoutCamelCase(BaseModel): ) -class InputSchema(BaseModel): +class InputSchema(InputSchemaWithoutCamelCase): model_config = ConfigDict( - **InputSchemaWithoutCamelCase.model_config, alias_generator=snake_to_camel, ) diff --git a/packages/models-library/src/models_library/api_schemas_webserver/auth.py b/packages/models-library/src/models_library/api_schemas_webserver/auth.py index 697867d93b82..ccf17cb3a20f 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/auth.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/auth.py @@ -2,8 +2,10 @@ from typing import Annotated, Any from models_library.basic_types import IDStr +from models_library.rest_base import RequestParameters from pydantic import AliasGenerator, ConfigDict, Field, HttpUrl, SecretStr from pydantic.alias_generators import to_camel +from pydantic.config import JsonDict from ..emails import LowerCaseEmailStr from ._base import InputSchema, OutputSchema @@ -13,33 +15,39 @@ class AccountRequestInfo(InputSchema): form: dict[str, Any] captcha: str + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "example": { + "form": { + "firstName": "James", + "lastName": "Maxwel", + "email": "maxwel@email.com", + "phone": "+41 44 245 96 96", + "company": "EM Com", + "address": "Infinite Loop", + "city": "Washington", + "postalCode": "98001", + "country": "Switzerland", + "application": "Antenna_Design", + "description": "Description of something", + "hear": "Search_Engine", + "privacyPolicy": True, + "eula": True, + }, + "captcha": "A12B34", + } + } + ) + model_config = ConfigDict( str_strip_whitespace=True, str_max_length=200, # NOTE: this is just informative. The format of the form is defined # currently in the front-end and it might change # SEE image in https://github.com/ITISFoundation/osparc-simcore/pull/5378 - json_schema_extra={ - "example": { - "form": { - "firstName": "James", - "lastName": "Maxwel", - "email": "maxwel@email.com", - "phone": "+1 123456789", - "company": "EM Com", - "address": "Infinite Loop", - "city": "Washington", - "postalCode": "98001", - "country": "USA", - "application": "Antenna_Design", - "description": "Description of something", - "hear": "Search_Engine", - "privacyPolicy": True, - "eula": True, - }, - "captcha": "A12B34", - } - }, + json_schema_extra=_update_json_schema_extra, ) @@ -53,6 +61,17 @@ class UnregisterCheck(InputSchema): # +class ApiKeyListQueryParams(RequestParameters): + include_autogenerated: Annotated[ + bool, + Field( + alias="includeAutogenerated", + description="If True, then the list includes autogenerated API keys. " + "Otherwise, only user-created API keys are returned.", + ), + ] = False + + class ApiKeyCreateRequest(InputSchema): display_name: Annotated[str, Field(..., min_length=3)] expiration: Annotated[ diff --git a/packages/models-library/src/models_library/api_schemas_webserver/computations.py b/packages/models-library/src/models_library/api_schemas_webserver/computations.py index 0cd3d993b6d7..ae4b05360205 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/computations.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/computations.py @@ -8,17 +8,20 @@ BaseModel, ConfigDict, Field, + field_validator, ) from ..api_schemas_directorv2.computations import ( ComputationGet as _DirectorV2ComputationGet, ) from ..basic_types import IDStr +from ..computations import CollectionRunID from ..projects import CommitID, ProjectID from ..projects_nodes_io import NodeID from ..projects_state import RunningState from ..rest_ordering import OrderBy, create_ordering_query_model_class from ..rest_pagination import PageQueryParameters +from ..utils.common_validators import null_or_none_str_to_none_validator from ._base import ( InputSchemaWithoutCamelCase, OutputSchema, @@ -153,3 +156,54 @@ class ComputationTaskRestGet(OutputSchema): log_download_link: AnyUrl | None node_name: str osparc_credits: Decimal | None + + +### Computation Collection Run + + +class ComputationCollectionRunListQueryParams( + PageQueryParameters, +): + filter_only_running: Annotated[ + bool, Field(description="If true, only running collection runs are returned") + ] = False + + filter_by_root_project_id: ProjectID | None = None + + _null_or_none_to_none = field_validator("filter_by_root_project_id", mode="before")( + null_or_none_str_to_none_validator + ) + + +class ComputationCollectionRunRestGet(OutputSchema): + collection_run_id: CollectionRunID + project_ids: list[str] + state: RunningState + info: dict[str, Any] + submitted_at: datetime + started_at: datetime | None + ended_at: datetime | None + name: str + + +class ComputationCollectionRunPathParams(BaseModel): + collection_run_id: CollectionRunID + model_config = ConfigDict(populate_by_name=True, extra="forbid") + + +class ComputationCollectionRunTaskListQueryParams( + PageQueryParameters, +): ... + + +class ComputationCollectionRunTaskRestGet(OutputSchema): + project_uuid: ProjectID + node_id: NodeID + state: RunningState + progress: float + image: dict[str, Any] + started_at: datetime | None + ended_at: datetime | None + log_download_link: AnyUrl | None + osparc_credits: Decimal | None + name: str diff --git a/packages/models-library/src/models_library/api_schemas_webserver/projects_conversations.py b/packages/models-library/src/models_library/api_schemas_webserver/conversations.py similarity index 84% rename from packages/models-library/src/models_library/api_schemas_webserver/projects_conversations.py rename to packages/models-library/src/models_library/api_schemas_webserver/conversations.py index b3d7a3c2590c..c0fade966011 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/projects_conversations.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/conversations.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import Annotated, Self +from typing import Annotated, Any, Self from pydantic import Field @@ -16,7 +16,7 @@ from ..projects import ProjectID from ._base import InputSchema, OutputSchema -### PROJECT CONVERSATION ------------------------------------------------------------------- +### CONVERSATION ------------------------------------------------------------------- class ConversationRestGet(OutputSchema): @@ -28,6 +28,7 @@ class ConversationRestGet(OutputSchema): type: ConversationType created: datetime modified: datetime + extra_context: dict[str, str] @classmethod def from_domain_model(cls, domain: ConversationGetDB) -> Self: @@ -40,14 +41,16 @@ def from_domain_model(cls, domain: ConversationGetDB) -> Self: type=domain.type, created=domain.created, modified=domain.modified, + extra_context=domain.extra_context, ) class ConversationPatch(InputSchema): name: str | None = None + extra_context: dict[str, Any] | None = None -### PROJECT CONVERSATION MESSAGES --------------------------------------------------------------- +### CONVERSATION MESSAGES --------------------------------------------------------------- class ConversationMessageRestGet(OutputSchema): diff --git a/packages/models-library/src/models_library/api_schemas_webserver/functions.py b/packages/models-library/src/models_library/api_schemas_webserver/functions.py index c1f4a7b55e59..226db44f68d5 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/functions.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/functions.py @@ -1,6 +1,7 @@ +import datetime from typing import Annotated, TypeAlias -from pydantic import Field +from pydantic import ConfigDict, Field, HttpUrl from ..functions import ( Function, @@ -23,6 +24,7 @@ FunctionOutputs, FunctionOutputSchema, FunctionSchemaClass, + FunctionUpdate, JSONFunctionInputSchema, JSONFunctionOutputSchema, ProjectFunction, @@ -46,6 +48,8 @@ UnsupportedFunctionClassError, UnsupportedFunctionFunctionJobClassCombinationError, ) +from ..groups import GroupID +from ..projects import ProjectID from ._base import InputSchema, OutputSchema __all__ = [ @@ -110,10 +114,72 @@ ] -class RegisteredSolverFunctionGet(RegisteredSolverFunction, OutputSchema): ... +class FunctionGroupAccessRightsGet(OutputSchema): + read: bool + write: bool + execute: bool -class RegisteredProjectFunctionGet(RegisteredProjectFunction, OutputSchema): ... +class FunctionGroupAccessRightsUpdate(InputSchema): + read: bool + write: bool + execute: bool + + +class RegisteredSolverFunctionGet(RegisteredSolverFunction, OutputSchema): + uid: Annotated[FunctionID, Field(alias="uuid")] + created_at: Annotated[datetime.datetime, Field(alias="creationDate")] + modified_at: Annotated[datetime.datetime, Field(alias="lastChangeDate")] + access_rights: dict[GroupID, FunctionGroupAccessRightsGet] + thumbnail: HttpUrl | None = None + + +class RegisteredProjectFunctionGet(RegisteredProjectFunction, OutputSchema): + uid: Annotated[FunctionID, Field(alias="uuid")] + project_id: Annotated[ProjectID, Field(alias="templateId")] + created_at: Annotated[datetime.datetime, Field(alias="creationDate")] + modified_at: Annotated[datetime.datetime, Field(alias="lastChangeDate")] + access_rights: dict[GroupID, FunctionGroupAccessRightsGet] + thumbnail: HttpUrl | None = None + model_config = ConfigDict( + populate_by_name=True, + json_schema_extra={ + "examples": [ + { + "function_class": "PROJECT", + "title": "Example Project Function", + "description": "This is an example project function.", + "input_schema": { + "schema_content": { + "type": "object", + "properties": {"input1": {"type": "integer"}}, + }, + "schema_class": "application/schema+json", + }, + "output_schema": { + "schema_content": { + "type": "object", + "properties": {"output1": {"type": "string"}}, + }, + "schema_class": "application/schema+json", + }, + "default_inputs": None, + "project_id": "11111111-1111-1111-1111-111111111111", + "uid": "22222222-2222-2222-2222-222222222222", + "created_at": "2024-01-01T12:00:00", + "modified_at": "2024-01-02T12:00:00", + "access_rights": { + "5": { + "read": True, + "write": False, + "execute": True, + } + }, + "thumbnail": None, + }, + ] + }, + ) class SolverFunctionToRegister(SolverFunction, InputSchema): ... @@ -131,3 +197,6 @@ class ProjectFunctionToRegister(ProjectFunction, InputSchema): ... RegisteredProjectFunctionGet | RegisteredSolverFunctionGet, Field(discriminator="function_class"), ] + + +class RegisteredFunctionUpdate(FunctionUpdate, InputSchema): ... diff --git a/packages/models-library/src/models_library/api_schemas_webserver/groups.py b/packages/models-library/src/models_library/api_schemas_webserver/groups.py index 643c66b817a7..5f56fbc9790e 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/groups.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/groups.py @@ -54,14 +54,48 @@ class GroupAccessRights(BaseModel): ) -class GroupGet(OutputSchema): - gid: GroupID = Field(..., description="the group ID") - label: str = Field(..., description="the group name") - description: str = Field(..., description="the group description") - thumbnail: AnyUrl | None = Field( - default=None, description="url to the group thumbnail" - ) - access_rights: GroupAccessRights = Field(..., alias="accessRights") +class GroupGetBase(OutputSchema): + gid: Annotated[GroupID, Field(description="the group's unique ID")] + label: Annotated[str, Field(description="the group's display name")] + description: str + thumbnail: Annotated[ + AnyUrl | None, Field(description="a link to the group's thumbnail") + ] = None + + @field_validator("thumbnail", mode="before") + @classmethod + def _sanitize_thumbnail_input(cls, v): + if v: + # Enforces null if thumbnail is not valid URL or empty + with suppress(ValidationError): + return TypeAdapter(AnyHttpUrl).validate_python(v) + return None + + @classmethod + def dump_basic_group_data(cls, group: Group) -> dict: + """Helper function to extract common group data for schema conversion""" + return remap_keys( + group.model_dump( + include={ + "gid", + "name", + "description", + "thumbnail", + }, + exclude={ + "inclusion_rules", # deprecated + }, + exclude_unset=True, + by_alias=False, + ), + rename={ + "name": "label", + }, + ) + + +class GroupGet(GroupGetBase): + access_rights: Annotated[GroupAccessRights, Field(alias="accessRights")] inclusion_rules: Annotated[ dict[str, str], @@ -77,24 +111,7 @@ def from_domain_model(cls, group: Group, access_rights: AccessRightsDict) -> Sel # Adapts these domain models into this schema return cls.model_validate( { - **remap_keys( - group.model_dump( - include={ - "gid", - "name", - "description", - "thumbnail", - }, - exclude={ - "inclusion_rules", # deprecated - }, - exclude_unset=True, - by_alias=False, - ), - rename={ - "name": "label", - }, - ), + **cls.dump_basic_group_data(group), "access_rights": access_rights, } ) @@ -136,15 +153,6 @@ def _update_json_schema_extra(schema: JsonDict) -> None: model_config = ConfigDict(json_schema_extra=_update_json_schema_extra) - @field_validator("thumbnail", mode="before") - @classmethod - def _sanitize_legacy_data(cls, v): - if v: - # Enforces null if thumbnail is not valid URL or empty - with suppress(ValidationError): - return TypeAdapter(AnyHttpUrl).validate_python(v) - return None - class GroupCreate(InputSchema): label: str @@ -187,6 +195,12 @@ class MyGroupsGet(OutputSchema): organizations: list[GroupGet] | None = None all: GroupGet product: GroupGet | None = None + support: Annotated[ + GroupGetBase | None, + Field( + description="Group ID of the app support team or None if no support is defined for this product" + ), + ] = None model_config = ConfigDict( json_schema_extra={ @@ -225,6 +239,12 @@ class MyGroupsGet(OutputSchema): "description": "Open to all users", "accessRights": {"read": True, "write": False, "delete": False}, }, + "support": { + "gid": "2", + "label": "Support Team", + "description": "The support team of the application", + "thumbnail": "https://placekitten.com/15/15", + }, } } ) @@ -234,6 +254,7 @@ def from_domain_model( cls, groups_by_type: GroupsByTypeTuple, my_product_group: tuple[Group, AccessRightsDict] | None, + product_support_group: Group | None, ) -> Self: assert groups_by_type.primary # nosec assert groups_by_type.everyone # nosec @@ -249,6 +270,13 @@ def from_domain_model( if my_product_group else None ), + support=( + GroupGetBase.model_validate( + GroupGetBase.dump_basic_group_data(product_support_group) + ) + if product_support_group + else None + ), ) diff --git a/packages/models-library/src/models_library/api_schemas_webserver/products.py b/packages/models-library/src/models_library/api_schemas_webserver/products.py index 61f03a2c5e95..062d574faabf 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/products.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/products.py @@ -135,10 +135,23 @@ class ProductUIGet(OutputSchema): ExtraCreditsUsdRangeInt: TypeAlias = Annotated[int, Field(ge=0, lt=500)] +TrialAccountAnnotated: TypeAlias = Annotated[ + PositiveInt | None, + Field( + description="Expiration time in days for trial accounts; `null` means not a trial account" + ), +] + +WelcomeCreditsAnnotated: TypeAlias = Annotated[ + ExtraCreditsUsdRangeInt | None, + Field(description="Welcome credits in USD; `null` means no welcome credits"), +] + + class InvitationGenerate(InputSchema): guest: LowerCaseEmailStr - trial_account_days: PositiveInt | None = None - extra_credits_in_usd: ExtraCreditsUsdRangeInt | None = None + trial_account_days: TrialAccountAnnotated = None + extra_credits_in_usd: WelcomeCreditsAnnotated = None class InvitationGenerated(OutputSchema): diff --git a/packages/models-library/src/models_library/api_schemas_webserver/projects.py b/packages/models-library/src/models_library/api_schemas_webserver/projects.py index 2b15e052944b..083628693882 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/projects.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/projects.py @@ -35,7 +35,12 @@ ProjectType, ) from ..projects_access import AccessRights, GroupIDStr -from ..projects_state import ProjectState +from ..projects_state import ( + ProjectShareCurrentUserGroupIDs, + ProjectShareLocked, + ProjectShareStatus, + ProjectStateRunningState, +) from ..utils._original_fastapi_encoders import jsonable_encoder from ..utils.common_validators import ( empty_str_to_none_pre_validator, @@ -105,12 +110,23 @@ def to_domain_model(self) -> dict[str, Any]: ) +class ProjectShareStateOutputSchema(OutputSchema): + status: ProjectShareStatus + locked: ProjectShareLocked + current_user_groupids: ProjectShareCurrentUserGroupIDs + + +class ProjectStateOutputSchema(OutputSchema): + share_state: ProjectShareStateOutputSchema + state: ProjectStateRunningState + + class ProjectGet(OutputSchema): uuid: ProjectID # display name: str - description: str + description: Annotated[str, BeforeValidator(none_to_empty_str_pre_validator)] thumbnail: HttpUrl | Literal[""] type: ProjectType @@ -124,11 +140,11 @@ class ProjectGet(OutputSchema): # state creation_date: DateTimeStr last_change_date: DateTimeStr - state: ProjectState | None = None + state: ProjectStateOutputSchema | None = None trashed_at: datetime | None trashed_by: Annotated[ GroupID | None, Field(description="The primary gid of the user who trashed") - ] + ] = None # labeling tags: list[int] @@ -150,10 +166,6 @@ class ProjectGet(OutputSchema): workspace_id: WorkspaceID | None folder_id: FolderID | None - _empty_description = field_validator("description", mode="before")( - none_to_empty_str_pre_validator - ) - @staticmethod def _update_json_schema_extra(schema: JsonDict) -> None: schema.update( @@ -262,11 +274,34 @@ class ProjectPatch(InputSchema): ] = None quality: dict[str, Any] | None = None template_type: ProjectTemplateType | None = None + hidden: bool | None = None def to_domain_model(self) -> dict[str, Any]: return self.model_dump(exclude_unset=True, by_alias=False) +class ProjectDocument(OutputSchema): + uuid: ProjectID + workspace_id: WorkspaceID | None + name: str + description: str + thumbnail: HttpUrl | None + last_change_date: datetime + classifiers: list[ClassifierID] + dev: dict | None + quality: dict[str, Any] + workbench: NodesDict + ui: StudyUI | None + type: ProjectType + template_type: ProjectTemplateType | None + + # config + model_config = ConfigDict(from_attributes=True, arbitrary_types_allowed=True) + + +ProjectDocumentVersion: TypeAlias = int + + __all__: tuple[str, ...] = ( "EmptyModel", "ProjectCopyOverride", diff --git a/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py b/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py index a89325532013..c180bec8c506 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/projects_nodes.py @@ -1,18 +1,19 @@ # mypy: disable-error-code=truthy-function from typing import Annotated, Any, Literal, TypeAlias -from models_library.groups import GroupID -from models_library.projects import ProjectID -from models_library.services_history import ServiceRelease from pydantic import ConfigDict, Field +from pydantic.config import JsonDict from ..access_rights import ExecutableAccessRights from ..api_schemas_directorv2.dynamic_services import RetrieveDataOut from ..basic_types import PortInt +from ..groups import GroupID +from ..projects import ProjectID from ..projects_nodes import InputID, InputsDict, PartialNode from ..projects_nodes_io import NodeID from ..services import ServiceKey, ServicePortKey, ServiceVersion from ..services_enums import ServiceState +from ..services_history import ServiceRelease from ..services_resources import ServiceResourcesDict from ._base import InputSchemaWithoutCamelCase, OutputSchema @@ -163,14 +164,20 @@ class NodeGetIdle(OutputSchema): def from_node_id(cls, node_id: NodeID) -> "NodeGetIdle": return cls(service_state="idle", service_uuid=node_id) - model_config = ConfigDict( - json_schema_extra={ - "example": { - "service_uuid": "3fa85f64-5717-4562-b3fc-2c963f66afa6", - "service_state": "idle", + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + { + "service_uuid": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "service_state": "idle", + } + ] } - } - ) + ) + + model_config = ConfigDict(json_schema_extra=_update_json_schema_extra) class NodeGetUnknown(OutputSchema): diff --git a/packages/models-library/src/models_library/api_schemas_webserver/projects_ui.py b/packages/models-library/src/models_library/api_schemas_webserver/projects_ui.py index 9bbb92f447c2..e912a621acc7 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/projects_ui.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/projects_ui.py @@ -40,8 +40,8 @@ class SlideshowUI(TypedDict): class AnnotationUI(BaseModel): - type: Literal["note", "rect", "text"] - color: Annotated[Color, PlainSerializer(Color.as_hex)] + type: Literal["note", "rect", "text", "conversation"] + color: Annotated[Color | None, PlainSerializer(Color.as_hex)] = None attributes: Annotated[dict, Field(description="svg attributes")] @staticmethod @@ -71,6 +71,15 @@ def _update_json_schema_extra(schema: JsonDict) -> None: "color": "#0000FF", "attributes": {"x": 415, "y": 100, "text": "Hey!"}, }, + { + "type": "conversation", + "attributes": { + "conversationId": 2, + "x": 415, + "y": 100, + "title": "My chat", + }, + }, ] }, ) @@ -82,13 +91,14 @@ def _update_json_schema_extra(schema: JsonDict) -> None: class StudyUI(OutputSchema): # Model fully controlled by the UI and stored under `projects.ui` - icon: HttpUrl | None = None + icon: HttpUrl | None = None # <-- Deprecated workbench: dict[NodeIDStr, WorkbenchUI] | None = None slideshow: dict[NodeIDStr, SlideshowUI] | None = None current_node_id: NodeID | None = None annotations: dict[NodeIDStr, AnnotationUI] | None = None - template_type: Literal["hypertool"] | None = None + template_type: Literal["hypertool"] | None = None # <-- Deprecated + mode: Literal["workbench", "app", "guided", "standalone", "pipeline"] | None = None _empty_is_none = field_validator("*", mode="before")( empty_str_to_none_pre_validator @@ -169,6 +179,15 @@ def _update_json_schema_extra(schema: JsonDict) -> None: "fontSize": 12, }, }, + "cf94f068-259c-4192-89f9-b2a56d51249d": { + "type": "conversation", + "attributes": { + "conversationId": 2, + "x": 119, + "y": 223, + "title": "My chat", + }, + }, }, "current_node_id": "4b3345e5-861f-47b0-8b52-a4508449be79", "template_type": "hypertool", diff --git a/packages/models-library/src/models_library/api_schemas_webserver/socketio.py b/packages/models-library/src/models_library/api_schemas_webserver/socketio.py index 6e3f987198ac..32753cdb829a 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/socketio.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/socketio.py @@ -1,5 +1,6 @@ from ..basic_types import IDStr from ..groups import GroupID +from ..projects import ProjectID from ..users import UserID @@ -15,3 +16,7 @@ def from_group_id(cls, group_id: GroupID) -> "SocketIORoomStr": @classmethod def from_user_id(cls, user_id: UserID) -> "SocketIORoomStr": return cls(f"user:{user_id}") + + @classmethod + def from_project_id(cls, project_id: ProjectID) -> "SocketIORoomStr": + return cls(f"project:{project_id}") diff --git a/packages/models-library/src/models_library/api_schemas_webserver/users.py b/packages/models-library/src/models_library/api_schemas_webserver/users.py index ed102bf746a6..052b8bb4440e 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/users.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/users.py @@ -1,7 +1,7 @@ import re from datetime import date, datetime from enum import Enum -from typing import Annotated, Any, Literal, Self +from typing import Annotated, Any, Literal, Self, TypeAlias import annotated_types from common_library.basic_types import DEFAULT_FACTORY @@ -11,18 +11,20 @@ from models_library.rest_filters import Filters from models_library.rest_pagination import PageQueryParameters from pydantic import ( + BaseModel, ConfigDict, EmailStr, Field, StringConstraints, ValidationInfo, field_validator, + model_validator, ) from pydantic.config import JsonDict from ..basic_types import IDStr from ..emails import LowerCaseEmailStr -from ..groups import AccessRightsDict, Group, GroupID, GroupsByTypeTuple +from ..groups import AccessRightsDict, Group, GroupID, GroupsByTypeTuple, PrimaryGroupID from ..products import ProductName from ..rest_base import RequestParameters from ..users import ( @@ -41,7 +43,7 @@ OutputSchemaWithoutCamelCase, ) from .groups import MyGroupsGet -from .products import InvitationGenerate +from .products import TrialAccountAnnotated, WelcomeCreditsAnnotated from .users_preferences import AggregatedPreferences # @@ -61,7 +63,18 @@ class MyProfilePrivacyPatch(InputSchema): hide_email: bool | None = None -class MyProfileGet(OutputSchemaWithoutCamelCase): +class MyProfileAddressGet(OutputSchema): + """Details provided upon registration and used e.g. for invoicing""" + + institution: str | None + address: str | None + city: str | None + state: Annotated[str | None, Field(description="State, province, canton, ...")] + postal_code: str | None + country: str | None + + +class MyProfileRestGet(OutputSchemaWithoutCamelCase): id: UserID user_name: Annotated[ IDStr, Field(description="Unique username identifier", alias="userName") @@ -69,8 +82,16 @@ class MyProfileGet(OutputSchemaWithoutCamelCase): first_name: FirstNameStr | None = None last_name: LastNameStr | None = None login: LowerCaseEmailStr - - role: Literal["ANONYMOUS", "GUEST", "USER", "TESTER", "PRODUCT_OWNER", "ADMIN"] + phone: str | None = None + + role: Literal[ + "ANONYMOUS", + "GUEST", + "USER", + "TESTER", + "PRODUCT_OWNER", + "ADMIN", + ] groups: MyGroupsGet | None = None gravatar_id: Annotated[str | None, Field(deprecated=True)] = None @@ -84,6 +105,7 @@ class MyProfileGet(OutputSchemaWithoutCamelCase): privacy: MyProfilePrivacyGet preferences: AggregatedPreferences + contact: MyProfileAddressGet | None = None @staticmethod def _update_json_schema_extra(schema: JsonDict) -> None: @@ -103,6 +125,25 @@ def _update_json_schema_extra(schema: JsonDict) -> None: "hide_email": 1, }, }, + { + "id": 1, + "login": "minimal@user.com", + "userName": "minuser", + "role": "USER", + "preferences": {}, + "privacy": { + "hide_username": False, + "hide_fullname": False, + "hide_email": False, + }, + "provided": { + "address": "123 Main St", + "city": "Sampleville", + "state": "CA", + "postal_code": "12345", + "country": "Wonderland", + }, + }, ] } ) @@ -130,8 +171,10 @@ def from_domain_model( my_groups_by_type: GroupsByTypeTuple, my_product_group: tuple[Group, AccessRightsDict] | None, my_preferences: AggregatedPreferences, + my_support_group: Group | None, + profile_contact: MyProfileAddressGet | None = None, ) -> Self: - data = remap_keys( + profile_data = remap_keys( my_profile.model_dump( include={ "id", @@ -140,6 +183,7 @@ def from_domain_model( "last_name", "email", "role", + "phone", "privacy", "expiration_date", }, @@ -148,27 +192,28 @@ def from_domain_model( rename={"email": "login"}, ) return cls( - **data, - groups=MyGroupsGet.from_domain_model(my_groups_by_type, my_product_group), + **profile_data, + groups=MyGroupsGet.from_domain_model( + my_groups_by_type, my_product_group, my_support_group + ), preferences=my_preferences, + contact=profile_contact, ) -class MyProfilePatch(InputSchemaWithoutCamelCase): +class MyProfileRestPatch(InputSchemaWithoutCamelCase): first_name: FirstNameStr | None = None last_name: LastNameStr | None = None user_name: Annotated[IDStr | None, Field(alias="userName", min_length=4)] = None + # NOTE: phone is updated via a dedicated endpoint! privacy: MyProfilePrivacyPatch | None = None - model_config = ConfigDict( - json_schema_extra={ - "example": { - "first_name": "Pedro", - "last_name": "Crespo", - } - } - ) + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update({"examples": [{"first_name": "Pedro", "last_name": "Crespo"}]}) + + model_config = ConfigDict(json_schema_extra=_update_json_schema_extra) @field_validator("user_name") @classmethod @@ -255,24 +300,55 @@ class UsersForAdminListFilter(Filters): class UsersAccountListQueryParams(UsersForAdminListFilter, PageQueryParameters): ... +class _InvitationDetails(InputSchema): + trial_account_days: TrialAccountAnnotated = None + extra_credits_in_usd: WelcomeCreditsAnnotated = None + + class UserAccountApprove(InputSchema): email: EmailStr - invitation: InvitationGenerate | None = None + invitation: _InvitationDetails | None = None class UserAccountReject(InputSchema): email: EmailStr +GlobString: TypeAlias = Annotated[ + str, + StringConstraints( + min_length=3, max_length=200, strip_whitespace=True, pattern=r"^[^%]*$" + ), +] + + class UserAccountSearchQueryParams(RequestParameters): email: Annotated[ - str, + GlobString | None, Field( - min_length=3, - max_length=200, description="complete or glob pattern for an email", ), - ] + ] = None + primary_group_id: Annotated[ + GroupID | None, + Field( + description="Filter by primary group ID", + ), + ] = None + user_name: Annotated[ + GlobString | None, + Field( + description="complete or glob pattern for a username", + ), + ] = None + + @model_validator(mode="after") + def _validate_at_least_one_filter(self) -> Self: + field_names = list(self.__class__.model_fields) + if not any(getattr(self, field_name, None) for field_name in field_names): + msg = f"At least one filter {field_names} must be provided" + raise ValueError(msg) + return self class UserAccountGet(OutputSchema): @@ -295,16 +371,17 @@ class UserAccountGet(OutputSchema): ), ] = DEFAULT_FACTORY - # pre-registration + # pre-registration NOTE: that some users have no pre-registartion and therefore all options here can be none pre_registration_id: int | None - invited_by: str | None = None + pre_registration_created: datetime | None + invited_by: UserNameID | None = None account_request_status: AccountRequestStatus | None - account_request_reviewed_by: UserID | None = None + account_request_reviewed_by: UserNameID | None = None account_request_reviewed_at: datetime | None = None # user status registered: bool - status: UserStatus | None + status: UserStatus | None = None products: Annotated[ list[ProductName] | None, Field( @@ -312,6 +389,24 @@ class UserAccountGet(OutputSchema): ), ] = None + # user (if an account was created) + user_id: Annotated[ + UserID | None, + Field(description="Unique identifier of the user if an account was created"), + ] = None + user_name: Annotated[ + UserNameID | None, + Field(description="Username of the user if an account was created"), + ] = None + user_primary_group_id: Annotated[ + PrimaryGroupID | None, + Field( + description="Primary group ID of the user if an account was created", + alias="groupId", + # SEE https://github.com/ITISFoundation/osparc-simcore/pull/8358#issuecomment-3279491740 + ), + ] = None + @field_validator("status") @classmethod def _consistency_check(cls, v, info: ValidationInfo): @@ -328,6 +423,10 @@ def _consistency_check(cls, v, info: ValidationInfo): # +class TokenPathParams(BaseModel): + service: str + + class MyTokenCreate(InputSchemaWithoutCamelCase): service: Annotated[ IDStr, @@ -372,3 +471,8 @@ class MyPermissionGet(OutputSchema): @classmethod def from_domain_model(cls, permission: UserPermission) -> Self: return cls(name=permission.name, allowed=permission.allowed) + + +class MyFunctionPermissionsGet(OutputSchema): + read_functions: bool + write_functions: bool diff --git a/packages/models-library/src/models_library/auth.py b/packages/models-library/src/models_library/auth.py new file mode 100644 index 000000000000..eead22ea84bb --- /dev/null +++ b/packages/models-library/src/models_library/auth.py @@ -0,0 +1,8 @@ +from typing import Final + +from models_library.rpc.webserver.auth.api_keys import generate_api_key_prefix + +API_KEY_AUTOGENERATED_DISPLAY_NAME_PREFIX: Final[str] = "__auto_" +API_KEY_AUTOGENERATED_KEY_PREFIX: Final[str] = generate_api_key_prefix( + API_KEY_AUTOGENERATED_DISPLAY_NAME_PREFIX +) diff --git a/packages/models-library/src/models_library/boot_options.py b/packages/models-library/src/models_library/boot_options.py index 8b26f70c210d..096a958a6a92 100644 --- a/packages/models-library/src/models_library/boot_options.py +++ b/packages/models-library/src/models_library/boot_options.py @@ -1,4 +1,5 @@ from pydantic import BaseModel, ConfigDict, ValidationInfo, field_validator +from pydantic.config import JsonDict from typing_extensions import ( # https://docs.pydantic.dev/latest/api/standard_library_types/#typeddict TypedDict, ) @@ -26,42 +27,46 @@ def ensure_default_included(cls, v, info: ValidationInfo): raise ValueError(msg) return v - model_config = ConfigDict( - json_schema_extra={ - "examples": [ - { - "label": "Boot mode", - "description": "Start it in web page mode", - "default": "0", - "items": { - "0": { - "label": "Non Voila", - "description": "Tooltip for non Voila boot mode", - }, - "1": { - "label": "Voila", - "description": "Tooltip for Voila boot mode", + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + { + "label": "Boot mode", + "description": "Start it in web page mode", + "default": "0", + "items": { + "0": { + "label": "Non Voila", + "description": "Tooltip for non Voila boot mode", + }, + "1": { + "label": "Voila", + "description": "Tooltip for Voila boot mode", + }, }, }, - }, - { - "label": "Application theme", - "description": "Select a theme for the application", - "default": "b", - "items": { - "a": { - "label": "Clear", - "description": "Using white background", - }, - "b": { - "label": "Dark", - "description": "Using black and gray tones", + { + "label": "Application theme", + "description": "Select a theme for the application", + "default": "b", + "items": { + "a": { + "label": "Clear", + "description": "Using white background", + }, + "b": { + "label": "Dark", + "description": "Using black and gray tones", + }, }, }, - }, - ] - } - ) + ] + } + ) + + model_config = ConfigDict(json_schema_extra=_update_json_schema_extra) BootOptions = dict[EnvVarKey, BootOption] diff --git a/packages/models-library/src/models_library/computations.py b/packages/models-library/src/models_library/computations.py index 6b88aff83ad2..1efabc3fc73b 100644 --- a/packages/models-library/src/models_library/computations.py +++ b/packages/models-library/src/models_library/computations.py @@ -1,6 +1,7 @@ from datetime import datetime from decimal import Decimal -from typing import Any +from typing import Any, TypeAlias +from uuid import UUID from pydantic import AnyUrl, BaseModel @@ -36,3 +37,34 @@ class ComputationRunWithAttributes(BaseModel): # Attributes added by the webserver root_project_name: str project_custom_metadata: dict[str, Any] + + +CollectionRunID: TypeAlias = UUID + + +class ComputationCollectionRunWithAttributes(BaseModel): + collection_run_id: CollectionRunID + project_ids: list[str] + state: RunningState + info: dict[str, Any] + submitted_at: datetime + started_at: datetime | None + ended_at: datetime | None + + # Attributes added by the webserver + name: str # Either root project name or collection name if provided by the client on start + + +class ComputationCollectionRunTaskWithAttributes(BaseModel): + project_uuid: ProjectID + node_id: NodeID + state: RunningState + progress: float + image: dict[str, Any] + started_at: datetime | None + ended_at: datetime | None + log_download_link: AnyUrl | None + + # Attributes added by the webserver + name: str # Either node name or job name if provided by the client on start + osparc_credits: Decimal | None diff --git a/packages/models-library/src/models_library/conversations.py b/packages/models-library/src/models_library/conversations.py index 5d33a0fcd455..8db6a85987c6 100644 --- a/packages/models-library/src/models_library/conversations.py +++ b/packages/models-library/src/models_library/conversations.py @@ -1,16 +1,20 @@ from datetime import datetime from enum import auto -from typing import TypeAlias +from typing import Annotated, Any, TypeAlias from uuid import UUID from models_library.groups import GroupID from models_library.projects import ProjectID -from pydantic import BaseModel, ConfigDict +from pydantic import BaseModel, ConfigDict, StringConstraints from .products import ProductName from .utils.enums import StrAutoEnum ConversationID: TypeAlias = UUID +ConversationName: TypeAlias = Annotated[ + str, StringConstraints(strip_whitespace=True, min_length=1, max_length=255) +] + ConversationMessageID: TypeAlias = UUID @@ -19,6 +23,7 @@ class ConversationType(StrAutoEnum): PROJECT_ANNOTATION = ( auto() # Something like sticky note, can be located anywhere in the pipeline UI ) + SUPPORT = auto() # Support conversation class ConversationMessageType(StrAutoEnum): @@ -36,10 +41,11 @@ class ConversationMessageType(StrAutoEnum): class ConversationGetDB(BaseModel): conversation_id: ConversationID product_name: ProductName - name: str + name: ConversationName project_uuid: ProjectID | None user_group_id: GroupID type: ConversationType + extra_context: dict[str, Any] # states created: datetime @@ -63,7 +69,8 @@ class ConversationMessageGetDB(BaseModel): class ConversationPatchDB(BaseModel): - name: str | None = None + name: ConversationName | None = None + extra_context: dict[str, Any] | None = None class ConversationMessagePatchDB(BaseModel): diff --git a/packages/models-library/src/models_library/docker.py b/packages/models-library/src/models_library/docker.py index ae23ba7eec4d..13719609436b 100644 --- a/packages/models-library/src/models_library/docker.py +++ b/packages/models-library/src/models_library/docker.py @@ -1,25 +1,12 @@ -import contextlib import re -from typing import Annotated, Any, Final, TypeAlias +from typing import Annotated, TypeAlias from pydantic import ( - BaseModel, - ByteSize, - ConfigDict, - Field, StringConstraints, - TypeAdapter, - ValidationError, - model_validator, ) from .basic_regex import DOCKER_GENERIC_TAG_KEY_RE, DOCKER_LABEL_KEY_REGEX from .basic_types import ConstrainedStr -from .generated_models.docker_rest_api import Task -from .products import ProductName -from .projects import ProjectID -from .projects_nodes_io import NodeID -from .users import UserID class DockerLabelKey(ConstrainedStr): @@ -47,192 +34,6 @@ def from_key(cls, key: str) -> "DockerLabelKey": ), ] -_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX: Final[str] = "io.simcore.runtime." -_BACKWARDS_COMPATIBILITY_SIMCORE_RUNTIME_DOCKER_LABELS_MAP: Final[dict[str, str]] = { - "node_id": f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}node-id", - "product_name": f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}product-name", - "project_id": f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}project-id", - "simcore_user_agent": f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}simcore-user-agent", - "study_id": f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}project-id", - "user_id": f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}user-id", - "uuid": f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}node-id", - "mem_limit": f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}memory-limit", - "swarm_stack_name": f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}swarm-stack-name", -} -_UNDEFINED_LABEL_VALUE_STR: Final[str] = "undefined" -_UNDEFINED_LABEL_VALUE_INT: Final[str] = "0" - - -DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY: Final[DockerLabelKey] = ( - TypeAdapter(DockerLabelKey).validate_python("ec2-instance-type") -) - - -def to_simcore_runtime_docker_label_key(key: str) -> DockerLabelKey: - return DockerLabelKey( - f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}{key.replace('_', '-').lower()}" - ) - - -class StandardSimcoreDockerLabels(BaseModel): - """ - Represents the standard label on oSparc created containers (not yet services) - In order to create this object in code, please use model_construct() method! - """ - - user_id: UserID = Field(..., alias=f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}user-id") # type: ignore[literal-required] - project_id: ProjectID = Field( # type: ignore[literal-required] - ..., alias=f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}project-id" - ) - node_id: NodeID = Field(..., alias=f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}node-id") # type: ignore[literal-required] - - product_name: ProductName = Field( # type: ignore[literal-required] - ..., alias=f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}product-name" - ) - simcore_user_agent: str = Field( # type: ignore[literal-required] - ..., alias=f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}simcore-user-agent" - ) - - swarm_stack_name: str = Field( # type: ignore[literal-required] - ..., alias=f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}swarm-stack-name" - ) - - memory_limit: ByteSize = Field( # type: ignore[literal-required] - ..., alias=f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}memory-limit" - ) - cpu_limit: float = Field( # type: ignore[literal-required] - ..., alias=f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}cpu-limit" - ) - - @model_validator(mode="before") - @classmethod - def _backwards_compatibility(cls, values: dict[str, Any]) -> dict[str, Any]: - # NOTE: this is necessary for dy-sidecar and legacy service until they are adjusted - if mapped_values := { - _BACKWARDS_COMPATIBILITY_SIMCORE_RUNTIME_DOCKER_LABELS_MAP[k]: v - for k, v in values.items() - if k in _BACKWARDS_COMPATIBILITY_SIMCORE_RUNTIME_DOCKER_LABELS_MAP - }: - # these values were sometimes omitted, so let's provide some defaults - for key in ["product-name", "simcore-user-agent", "swarm-stack-name"]: - mapped_values.setdefault( - f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}{key}", - _UNDEFINED_LABEL_VALUE_STR, - ) - - mapped_values.setdefault( - f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}memory-limit", - values.get("memory_limit", _UNDEFINED_LABEL_VALUE_INT), - ) - - def _convert_nano_cpus_to_cpus(nano_cpu: str) -> str: - with contextlib.suppress(ValidationError): - return f"{TypeAdapter(float).validate_python(nano_cpu) / (1.0 * 10**9):.2f}" - return _UNDEFINED_LABEL_VALUE_INT - - mapped_values.setdefault( - f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}cpu-limit", - values.get( - "cpu_limit", - _convert_nano_cpus_to_cpus( - values.get( - "nano_cpus_limit", - _UNDEFINED_LABEL_VALUE_INT, - ) - ), - ), - ) - return mapped_values - return values - - def to_simcore_runtime_docker_labels(self) -> dict[DockerLabelKey, str]: - """returns a dictionary of strings as required by docker""" - return { - to_simcore_runtime_docker_label_key(k): f"{v}" - for k, v in sorted(self.model_dump().items()) - } - - @classmethod - def from_docker_task(cls, docker_task: Task) -> "StandardSimcoreDockerLabels": - assert docker_task.spec # nosec - assert docker_task.spec.container_spec # nosec - task_labels = docker_task.spec.container_spec.labels or {} - return cls.model_validate(task_labels) - - model_config = ConfigDict( - populate_by_name=True, - json_schema_extra={ - "examples": [ - # legacy service labels - { - "study_id": "29f393fc-1410-47b3-b4b9-61dfce21a2a6", - "swarm_stack_name": "devel-simcore", - "user_id": "5", - "uuid": "1f963626-66e1-43f1-a777-33955c08b909", - }, - # legacy container labels - { - "mem_limit": "1073741824", - "nano_cpus_limit": "4000000000", - "node_id": "1f963626-66e1-43f1-a777-33955c08b909", - "simcore_user_agent": "puppeteer", - "study_id": "29f393fc-1410-47b3-b4b9-61dfce21a2a6", - "swarm_stack_name": "devel-simcore", - "user_id": "5", - }, - # dy-sidecar service labels - { - "study_id": "29f393fc-1410-47b3-b4b9-61dfce21a2a6", - "swarm_stack_name": "devel-simcore", - "user_id": "5", - "uuid": "1f963626-66e1-43f1-a777-33955c08b909", - }, - # dy-sidecar container labels - { - "mem_limit": "1073741824", - "nano_cpus_limit": "4000000000", - "study_id": "29f393fc-1410-47b3-b4b9-61dfce21a2a6", - "user_id": "5", - "uuid": "1f963626-66e1-43f1-a777-33955c08b909", - }, - # dy-proxy service labels - { - "dynamic-type": "dynamic-sidecar", - "study_id": "29f393fc-1410-47b3-b4b9-61dfce21a2a6", - "swarm_stack_name": "devel-simcore", - "type": "dependency-v2", - "user_id": "5", - "uuid": "1f963626-66e1-43f1-a777-33955c08b909", - }, - # dy-proxy container labels - { - "study_id": "29f393fc-1410-47b3-b4b9-61dfce21a2a6", - "user_id": "5", - "uuid": "1f963626-66e1-43f1-a777-33955c08b909", - }, - # dy-sidecar user-services labels - { - "product_name": "osparc", - "simcore_user_agent": "puppeteer", - "study_id": "29f393fc-1410-47b3-b4b9-61dfce21a2a6", - "user_id": "5", - "uuid": "1f963626-66e1-43f1-a777-33955c08b909", - }, - # modern both dynamic-sidecar services and computational services - { - "io.simcore.runtime.cpu-limit": "2.4", - "io.simcore.runtime.memory-limit": "1073741824", - "io.simcore.runtime.node-id": "1f963626-66e1-43f1-a777-33955c08b909", - "io.simcore.runtime.product-name": "osparc", - "io.simcore.runtime.project-id": "29f393fc-1410-47b3-b4b9-61dfce21a2a6", - "io.simcore.runtime.simcore-user-agent": "puppeteer", - "io.simcore.runtime.swarm-stack-name": "devel-osparc", - "io.simcore.runtime.user-id": "5", - }, - ] - }, - ) - DockerNodeID: TypeAlias = Annotated[ str, StringConstraints(strip_whitespace=True, pattern=re.compile(r"[a-zA-Z0-9]")) diff --git a/packages/models-library/src/models_library/functions.py b/packages/models-library/src/models_library/functions.py index 75175b62c637..f8a0f68d9255 100644 --- a/packages/models-library/src/models_library/functions.py +++ b/packages/models-library/src/models_library/functions.py @@ -11,11 +11,13 @@ from models_library.products import ProductName from models_library.services_types import ServiceKey, ServiceVersion from models_library.users import UserID +from models_library.utils.enums import StrAutoEnum from pydantic import BaseModel, ConfigDict, Field from .projects import ProjectID from .utils.change_case import snake_to_camel +TaskID: TypeAlias = str FunctionID: TypeAlias = UUID FunctionJobID: TypeAlias = UUID FileID: TypeAlias = UUID @@ -98,6 +100,12 @@ class FunctionBase(BaseModel): class RegisteredFunctionBase(FunctionBase): uid: FunctionID created_at: datetime.datetime + modified_at: datetime.datetime + + +class FunctionUpdate(BaseModel): + title: str | None = None + description: str | None = None class ProjectFunction(FunctionBase): @@ -106,7 +114,37 @@ class ProjectFunction(FunctionBase): class RegisteredProjectFunction(ProjectFunction, RegisteredFunctionBase): - pass + model_config = ConfigDict( + populate_by_name=True, + json_schema_extra={ + "examples": [ + { + "function_class": "PROJECT", + "title": "Example Project Function", + "description": "This is an example project function.", + "input_schema": { + "schema_content": { + "type": "object", + "properties": {"input1": {"type": "integer"}}, + }, + "schema_class": "application/schema+json", + }, + "output_schema": { + "schema_content": { + "type": "object", + "properties": {"output1": {"type": "string"}}, + }, + "schema_class": "application/schema+json", + }, + "default_inputs": None, + "project_id": "11111111-1111-1111-1111-111111111111", + "uid": "22222222-2222-2222-2222-222222222222", + "created_at": "2024-01-01T12:00:00", + "modified_at": "2024-01-02T12:00:00", + }, + ] + }, + ) SolverJobID: TypeAlias = UUID @@ -152,35 +190,48 @@ class FunctionJobBase(BaseModel): function_class: FunctionClass -class RegisteredFunctionJobBase(FunctionJobBase): - uid: FunctionJobID - created_at: datetime.datetime - - class ProjectFunctionJob(FunctionJobBase): function_class: Literal[FunctionClass.PROJECT] = FunctionClass.PROJECT - project_job_id: ProjectID + project_job_id: ProjectID | None + job_creation_task_id: TaskID | None -class RegisteredProjectFunctionJob(ProjectFunctionJob, RegisteredFunctionJobBase): - pass +class RegisteredProjectFunctionJobPatch(BaseModel): + function_class: Literal[FunctionClass.PROJECT] = FunctionClass.PROJECT + title: str | None + description: str | None + inputs: FunctionInputs + outputs: FunctionOutputs + project_job_id: ProjectID | None + job_creation_task_id: TaskID | None class SolverFunctionJob(FunctionJobBase): function_class: Literal[FunctionClass.SOLVER] = FunctionClass.SOLVER - solver_job_id: ProjectID + solver_job_id: ProjectID | None + job_creation_task_id: TaskID | None -class RegisteredSolverFunctionJob(SolverFunctionJob, RegisteredFunctionJobBase): - pass +class RegisteredSolverFunctionJobPatch(BaseModel): + function_class: Literal[FunctionClass.SOLVER] = FunctionClass.SOLVER + title: str | None + description: str | None + inputs: FunctionInputs + outputs: FunctionOutputs + solver_job_id: ProjectID | None + job_creation_task_id: TaskID | None class PythonCodeFunctionJob(FunctionJobBase): function_class: Literal[FunctionClass.PYTHON_CODE] = FunctionClass.PYTHON_CODE -class RegisteredPythonCodeFunctionJob(PythonCodeFunctionJob, RegisteredFunctionJobBase): - pass +class RegisteredPythonCodeFunctionJobPatch(BaseModel): + function_class: Literal[FunctionClass.PYTHON_CODE] = FunctionClass.PYTHON_CODE + title: str | None + inputs: FunctionInputs + outputs: FunctionOutputs + description: str | None FunctionJob: TypeAlias = Annotated[ @@ -188,6 +239,24 @@ class RegisteredPythonCodeFunctionJob(PythonCodeFunctionJob, RegisteredFunctionJ Field(discriminator="function_class"), ] + +class RegisteredFunctionJobBase(FunctionJobBase): + uid: FunctionJobID + created_at: datetime.datetime + + +class RegisteredProjectFunctionJob(ProjectFunctionJob, RegisteredFunctionJobBase): + pass + + +class RegisteredSolverFunctionJob(SolverFunctionJob, RegisteredFunctionJobBase): + pass + + +class RegisteredPythonCodeFunctionJob(PythonCodeFunctionJob, RegisteredFunctionJobBase): + pass + + RegisteredFunctionJob: TypeAlias = Annotated[ RegisteredProjectFunctionJob | RegisteredPythonCodeFunctionJob @@ -195,11 +264,48 @@ class RegisteredPythonCodeFunctionJob(PythonCodeFunctionJob, RegisteredFunctionJ Field(discriminator="function_class"), ] +RegisteredFunctionJobPatch = Annotated[ + RegisteredProjectFunctionJobPatch + | RegisteredPythonCodeFunctionJobPatch + | RegisteredSolverFunctionJobPatch, + Field(discriminator="function_class"), +] + class FunctionJobStatus(BaseModel): status: str +class RegisteredFunctionJobWithStatusBase(RegisteredFunctionJobBase, FunctionJobBase): + status: FunctionJobStatus + + +class RegisteredProjectFunctionJobWithStatus( + RegisteredProjectFunctionJob, RegisteredFunctionJobWithStatusBase +): + pass + + +class RegisteredSolverFunctionJobWithStatus( + RegisteredSolverFunctionJob, RegisteredFunctionJobWithStatusBase +): + pass + + +class RegisteredPythonCodeFunctionJobWithStatus( + RegisteredPythonCodeFunctionJob, RegisteredFunctionJobWithStatusBase +): + pass + + +RegisteredFunctionJobWithStatus: TypeAlias = Annotated[ + RegisteredProjectFunctionJobWithStatus + | RegisteredPythonCodeFunctionJobWithStatus + | RegisteredSolverFunctionJobWithStatus, + Field(discriminator="function_class"), +] + + class FunctionJobCollection(BaseModel): """Model for a collection of function jobs""" @@ -226,12 +332,20 @@ class FunctionJobDB(BaseModel): class_specific_data: FunctionJobClassSpecificData function_class: FunctionClass + model_config = ConfigDict(from_attributes=True) + class RegisteredFunctionJobDB(FunctionJobDB): uuid: FunctionJobID created: datetime.datetime +class RegisteredFunctionJobWithStatusDB(FunctionJobDB): + uuid: FunctionJobID + created: datetime.datetime + status: str + + class FunctionDB(BaseModel): function_class: FunctionClass title: str = "" @@ -241,16 +355,21 @@ class FunctionDB(BaseModel): default_inputs: FunctionInputs class_specific_data: FunctionClassSpecificData + model_config = ConfigDict(from_attributes=True) + class RegisteredFunctionDB(FunctionDB): uuid: FunctionID created: datetime.datetime + modified: datetime.datetime class FunctionJobCollectionDB(BaseModel): title: str = "" description: str = "" + model_config = ConfigDict(from_attributes=True) + class RegisteredFunctionJobCollectionDB(FunctionJobCollectionDB): uuid: FunctionJobCollectionID @@ -301,6 +420,25 @@ class FunctionAccessRightsDB(BaseModel): ) +class FunctionUserApiAccessRights(BaseModel): + user_id: UserID + read_functions: bool = False + write_functions: bool = False + execute_functions: bool = False + read_function_jobs: bool = False + write_function_jobs: bool = False + execute_function_jobs: bool = False + read_function_job_collections: bool = False + write_function_job_collections: bool = False + execute_function_job_collections: bool = False + + model_config = ConfigDict( + alias_generator=snake_to_camel, + populate_by_name=True, + extra="forbid", + ) + + FunctionJobAccessRights: TypeAlias = FunctionAccessRights FunctionJobAccessRightsDB: TypeAlias = FunctionAccessRightsDB FunctionJobUserAccessRights: TypeAlias = FunctionUserAccessRights @@ -310,3 +448,15 @@ class FunctionAccessRightsDB(BaseModel): FunctionJobCollectionAccessRightsDB: TypeAlias = FunctionAccessRightsDB FunctionJobCollectionUserAccessRights: TypeAlias = FunctionUserAccessRights FunctionJobCollectionGroupAccessRights: TypeAlias = FunctionGroupAccessRights + + +class FunctionsApiAccessRights(StrAutoEnum): + READ_FUNCTIONS = "read_functions" + WRITE_FUNCTIONS = "write_functions" + EXECUTE_FUNCTIONS = "execute_functions" + READ_FUNCTION_JOBS = "read_function_jobs" + WRITE_FUNCTION_JOBS = "write_function_jobs" + EXECUTE_FUNCTION_JOBS = "execute_function_jobs" + READ_FUNCTION_JOB_COLLECTIONS = "read_function_job_collections" + WRITE_FUNCTION_JOB_COLLECTIONS = "write_function_job_collections" + EXECUTE_FUNCTION_JOB_COLLECTIONS = "execute_function_job_collections" diff --git a/packages/models-library/src/models_library/functions_errors.py b/packages/models-library/src/models_library/functions_errors.py index 5e00413a8313..629c3dc1c7e9 100644 --- a/packages/models-library/src/models_library/functions_errors.py +++ b/packages/models-library/src/models_library/functions_errors.py @@ -2,84 +2,171 @@ class FunctionBaseError(OsparcErrorMixin, Exception): - pass + status_code: int class FunctionJobReadAccessDeniedError(FunctionBaseError): msg_template: str = ( "Function job {function_job_id} read access denied for user {user_id}" ) + status_code: int = 403 # Forbidden class FunctionIDNotFoundError(FunctionBaseError): msg_template: str = "Function {function_id} not found" + status_code: int = 404 # Not Found + + +class FunctionHasJobsCannotDeleteError(FunctionBaseError): + msg_template: str = ( + "Cannot delete function {function_id} because it has {jobs_count} associated job(s)." + ) + status_code: int = 409 # Conflict class FunctionJobIDNotFoundError(FunctionBaseError): msg_template: str = "Function job {function_job_id} not found" + status_code: int = 404 # Not Found class FunctionInputsValidationError(FunctionBaseError): msg_template: str = "Function inputs validation failed: {error}" + status_code: int = 422 # Unprocessable Entity class FunctionReadAccessDeniedError(FunctionBaseError): msg_template: str = "Function {function_id} read access denied for user {user_id}" + status_code: int = 403 # Forbidden class FunctionJobCollectionIDNotFoundError(FunctionBaseError): msg_template: str = "Function job collection {function_job_collection_id} not found" + status_code: int = 404 # Not Found class UnsupportedFunctionClassError(FunctionBaseError): msg_template: str = "Function class {function_class} is not supported" + status_code: int = 400 # Bad Request class UnsupportedFunctionJobClassError(FunctionBaseError): msg_template: str = "Function job class {function_job_class} is not supported" + status_code: int = 400 # Bad Request class UnsupportedFunctionFunctionJobClassCombinationError(FunctionBaseError): msg_template: str = ( "Function class {function_class} and function job class {function_job_class} combination is not supported" ) + status_code: int = 400 # Bad Request class FunctionJobCollectionReadAccessDeniedError(FunctionBaseError): msg_template: str = ( "Function job collection {function_job_collection_id} read access denied for user {user_id}" ) + status_code: int = 403 # Forbidden class FunctionWriteAccessDeniedError(FunctionBaseError): msg_template: str = "Function {function_id} write access denied for user {user_id}" + status_code: int = 403 # Forbidden class FunctionJobWriteAccessDeniedError(FunctionBaseError): msg_template: str = ( "Function job {function_job_id} write access denied for user {user_id}" ) + status_code: int = 403 # Forbidden class FunctionJobCollectionWriteAccessDeniedError(FunctionBaseError): msg_template: str = ( "Function job collection {function_job_collection_id} write access denied for user {user_id}" ) + status_code: int = 403 # Forbidden class FunctionExecuteAccessDeniedError(FunctionBaseError): msg_template: str = ( "Function {function_id} execute access denied for user {user_id}" ) + status_code: int = 403 # Forbidden class FunctionJobExecuteAccessDeniedError(FunctionBaseError): msg_template: str = ( "Function job {function_job_id} execute access denied for user {user_id}" ) + status_code: int = 403 # Forbidden class FunctionJobCollectionExecuteAccessDeniedError(FunctionBaseError): msg_template: str = ( "Function job collection {function_job_collection_id} execute access denied for user {user_id}" ) + status_code: int = 403 # Forbidden + + +class FunctionsReadApiAccessDeniedError(FunctionBaseError): + msg_template: str = "User {user_id} does not have the permission to read functions" + status_code: int = 403 # Forbidden + + +class FunctionsWriteApiAccessDeniedError(FunctionBaseError): + msg_template: str = "User {user_id} does not have the permission to write functions" + status_code: int = 403 # Forbidden + + +class FunctionsExecuteApiAccessDeniedError(FunctionBaseError): + msg_template: str = ( + "User {user_id} does not have the permission to execute functions" + ) + status_code: int = 403 # Forbidden + + +class FunctionJobsReadApiAccessDeniedError(FunctionBaseError): + msg_template: str = ( + "User {user_id} does not have the permission to read function jobs" + ) + status_code: int = 403 # Forbidden + + +class FunctionJobsWriteApiAccessDeniedError(FunctionBaseError): + msg_template: str = ( + "User {user_id} does not have the permission to write function jobs" + ) + status_code: int = 403 # Forbidden + + +class FunctionJobsExecuteApiAccessDeniedError(FunctionBaseError): + msg_template: str = ( + "User {user_id} does not have the permission to execute function jobs" + ) + status_code: int = 403 # Forbidden + + +class FunctionJobCollectionsReadApiAccessDeniedError(FunctionBaseError): + msg_template: str = ( + "User {user_id} does not have the permission to read function job collections" + ) + status_code: int = 403 # Forbidden + + +class FunctionJobCollectionsWriteApiAccessDeniedError(FunctionBaseError): + msg_template: str = ( + "User {user_id} does not have the permission to write function job collections" + ) + status_code: int = 403 # Forbidden + + +class FunctionJobCollectionsExecuteApiAccessDeniedError(FunctionBaseError): + msg_template: str = ( + "User {user_id} does not have the permission to execute function job collections" + ) + status_code: int = 403 # Forbidden + + +class FunctionJobPatchModelIncompatibleError(FunctionBaseError): + msg_template = "Incompatible patch model for Function '{function_id}' in product '{product_name}'." + status_code: int = 422 diff --git a/packages/models-library/src/models_library/groups.py b/packages/models-library/src/models_library/groups.py index d35b1de7dcca..bc12bfa9ad0d 100644 --- a/packages/models-library/src/models_library/groups.py +++ b/packages/models-library/src/models_library/groups.py @@ -15,12 +15,14 @@ EVERYONE_GROUP_ID: Final[int] = 1 GroupID: TypeAlias = PositiveInt +PrimaryGroupID: TypeAlias = Annotated[GroupID, Field(gt=EVERYONE_GROUP_ID)] +StandardGroupID: TypeAlias = Annotated[GroupID, Field(gt=EVERYONE_GROUP_ID)] __all__: tuple[str, ...] = ("GroupType",) class Group(BaseModel): - gid: PositiveInt + gid: GroupID name: str description: str group_type: Annotated[GroupType, Field(alias="type")] @@ -39,38 +41,51 @@ class Group(BaseModel): @staticmethod def _update_json_schema_extra(schema: JsonDict) -> None: + everyone: JsonDict = { + "gid": 1, + "name": "Everyone", + "type": "everyone", + "description": "all users", + "thumbnail": None, + } + user: JsonDict = { + "gid": 2, + "name": "User", + "description": "primary group", + "type": "primary", + "thumbnail": None, + } + organization: JsonDict = { + "gid": 3, + "name": "Organization", + "description": "standard group", + "type": "standard", + "thumbnail": None, + "inclusionRules": {}, + } + product: JsonDict = { + "gid": 4, + "name": "Product", + "description": "standard group for products", + "type": "standard", + "thumbnail": None, + } + support: JsonDict = { + "gid": 5, + "name": "Support", + "description": "support group", + "type": "standard", + "thumbnail": None, + } + schema.update( { "examples": [ - { - "gid": 1, - "name": "Everyone", - "type": "everyone", - "description": "all users", - "thumbnail": None, - }, - { - "gid": 2, - "name": "User", - "description": "primary group", - "type": "primary", - "thumbnail": None, - }, - { - "gid": 3, - "name": "Organization", - "description": "standard group", - "type": "standard", - "thumbnail": None, - "inclusionRules": {}, - }, - { - "gid": 4, - "name": "Product", - "description": "standard group for products", - "type": "standard", - "thumbnail": None, - }, + everyone, + user, + organization, + product, + support, ] } ) diff --git a/packages/models-library/src/models_library/osparc_variable_identifier.py b/packages/models-library/src/models_library/osparc_variable_identifier.py index 80a8e6d0fc07..6928405521fb 100644 --- a/packages/models-library/src/models_library/osparc_variable_identifier.py +++ b/packages/models-library/src/models_library/osparc_variable_identifier.py @@ -1,24 +1,21 @@ +import os from copy import deepcopy -from typing import Any, TypeVar +from typing import Annotated, Any, Final, TypeVar from common_library.errors_classes import OsparcErrorMixin from models_library.basic_types import ConstrainedStr - -from pydantic import BaseModel +from pydantic import BaseModel, Discriminator, PositiveInt, Tag from .utils.string_substitution import OSPARC_IDENTIFIER_PREFIX +from .utils.types import get_types_from_annotated_union T = TypeVar("T") -class OsparcVariableIdentifier(ConstrainedStr): +class _BaseOsparcVariableIdentifier(ConstrainedStr): # NOTE: To allow parametrized value, set the type to Union[OsparcVariableIdentifier, ...] # NOTE: When dealing with str types, to avoid unexpected behavior, the following # order is suggested `OsparcVariableIdentifier | str` - # NOTE: in below regex `{`` and `}` are respectively escaped with `{{` and `}}` - pattern = ( - rf"^\${{1,2}}(?:\{{)?{OSPARC_IDENTIFIER_PREFIX}[A-Za-z0-9_]+(?:\}})?(:-.+)?$" - ) def _get_without_template_markers(self) -> str: # $VAR @@ -42,6 +39,40 @@ def default_value(self) -> str | None: parts = self._get_without_template_markers().split(":-") return parts[1] if len(parts) > 1 else None + @staticmethod + def get_pattern(max_dollars: PositiveInt) -> str: + # NOTE: in below regex `{`` and `}` are respectively escaped with `{{` and `}}` + return rf"^\${{1,{max_dollars}}}(?:\{{)?{OSPARC_IDENTIFIER_PREFIX}[A-Za-z0-9_]+(?:\}})?(:-.+)?$" + + +class PlatformOsparcVariableIdentifier(_BaseOsparcVariableIdentifier): + pattern = _BaseOsparcVariableIdentifier.get_pattern(max_dollars=2) + + +class OoilOsparcVariableIdentifier(_BaseOsparcVariableIdentifier): + pattern = _BaseOsparcVariableIdentifier.get_pattern(max_dollars=4) + + +_PLATFORM: Final[str] = "platform" +_OOIL_VERSION: Final[str] = "ooil-version" + + +def _get_discriminator_value(v: Any) -> str: + _ = v + if os.environ.get("ENABLE_OOIL_OSPARC_VARIABLE_IDENTIFIER", None): + return _OOIL_VERSION + + return _PLATFORM + + +OsparcVariableIdentifier = Annotated[ + ( + Annotated[PlatformOsparcVariableIdentifier, Tag(_PLATFORM)] + | Annotated[OoilOsparcVariableIdentifier, Tag(_OOIL_VERSION)] + ), + Discriminator(_get_discriminator_value), +] + class UnresolvedOsparcVariableIdentifierError(OsparcErrorMixin, TypeError): msg_template = "Provided argument is unresolved: value={value}" @@ -59,9 +90,9 @@ def example_func(par: OsparcVariableIdentifier | int) -> None: Raises: TypeError: if the the OsparcVariableIdentifier was unresolved """ - if isinstance(var, OsparcVariableIdentifier): + if isinstance(var, get_types_from_annotated_union(OsparcVariableIdentifier)): raise UnresolvedOsparcVariableIdentifierError(value=var) - return var + return var # type: ignore[return-value] def replace_osparc_variable_identifier( # noqa: C901 @@ -86,11 +117,11 @@ def replace_osparc_variable_identifier( # noqa: C901 ``` """ - if isinstance(obj, OsparcVariableIdentifier): - if obj.name in osparc_variables: - return deepcopy(osparc_variables[obj.name]) # type: ignore - if obj.default_value is not None: - return deepcopy(obj.default_value) # type: ignore + if isinstance(obj, get_types_from_annotated_union(OsparcVariableIdentifier)): + if obj.name in osparc_variables: # type: ignore[attr-defined] + return deepcopy(osparc_variables[obj.name]) # type: ignore[no-any-return,attr-defined] + if obj.default_value is not None: # type: ignore[attr-defined] + return deepcopy(obj.default_value) # type: ignore[no-any-return,attr-defined] elif isinstance(obj, dict): for key, value in obj.items(): obj[key] = replace_osparc_variable_identifier(value, osparc_variables) @@ -124,7 +155,7 @@ def raise_if_unresolved_osparc_variable_identifier_found(obj: Any) -> None: UnresolvedOsparcVariableIdentifierError: if not all instances of `OsparcVariableIdentifier` were replaced """ - if isinstance(obj, OsparcVariableIdentifier): + if isinstance(obj, get_types_from_annotated_union(OsparcVariableIdentifier)): raise_if_unresolved(obj) elif isinstance(obj, dict): for key, value in obj.items(): diff --git a/packages/models-library/src/models_library/progress_bar.py b/packages/models-library/src/models_library/progress_bar.py index ad8130570e50..b6665aa93930 100644 --- a/packages/models-library/src/models_library/progress_bar.py +++ b/packages/models-library/src/models_library/progress_bar.py @@ -1,6 +1,8 @@ from typing import Literal, TypeAlias +from models_library.utils.json_schema import GenerateResolvedJsonSchema from pydantic import BaseModel, ConfigDict +from pydantic.config import JsonDict # NOTE: keep a list of possible unit, and please use correct official unit names ProgressUnit: TypeAlias = Literal["Byte"] @@ -13,9 +15,10 @@ class ProgressStructuredMessage(BaseModel): unit: str | None = None sub: "ProgressStructuredMessage | None" = None - model_config = ConfigDict( - json_schema_extra={ - "examples": [ + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + examples=[ { "description": "some description", "current": 12.2, @@ -39,8 +42,9 @@ class ProgressStructuredMessage(BaseModel): }, }, ] - } - ) + ) + + model_config = ConfigDict(json_schema_extra=_update_json_schema_extra) UNITLESS = None @@ -96,7 +100,9 @@ def composed_message(self) -> str: { "actual_value": 0.3, "total": 1.0, - "message": ProgressStructuredMessage.model_config["json_schema_extra"]["examples"][2], # type: ignore [index] + "message": ProgressStructuredMessage.model_json_schema( + schema_generator=GenerateResolvedJsonSchema + )["examples"][2], }, ] }, diff --git a/packages/models-library/src/models_library/projects.py b/packages/models-library/src/models_library/projects.py index 0c4dd0884b9a..e66c5a54543f 100644 --- a/packages/models-library/src/models_library/projects.py +++ b/packages/models-library/src/models_library/projects.py @@ -8,11 +8,9 @@ from uuid import UUID from common_library.basic_types import DEFAULT_FACTORY -from models_library.basic_types import ConstrainedStr -from models_library.folders import FolderID -from models_library.workspaces import WorkspaceID from pydantic import ( BaseModel, + BeforeValidator, ConfigDict, Field, HttpUrl, @@ -21,8 +19,11 @@ ) from .basic_regex import DATE_RE, UUID_RE_BASE +from .basic_types import ConstrainedStr from .emails import LowerCaseEmailStr +from .folders import FolderID from .groups import GroupID +from .products import ProductName from .projects_access import AccessRights, GroupIDStr from .projects_nodes import Node from .projects_nodes_io import NodeIDStr @@ -33,6 +34,7 @@ none_to_empty_str_pre_validator, ) from .utils.enums import StrAutoEnum +from .workspaces import WorkspaceID ProjectID: TypeAlias = UUID CommitID: TypeAlias = int @@ -84,6 +86,7 @@ class BaseProjectModel(BaseModel): ] description: Annotated[ str, + BeforeValidator(none_to_empty_str_pre_validator), Field( description="longer one-line description about the project", examples=["Dabbling in temporal transitions ..."], @@ -91,6 +94,9 @@ class BaseProjectModel(BaseModel): ] thumbnail: Annotated[ HttpUrl | None, + BeforeValidator( + empty_str_to_none_pre_validator, + ), Field( description="url of the project thumbnail", examples=["https://placeimg.com/171/96/tech/grayscale/?0.jpg"], @@ -103,15 +109,6 @@ class BaseProjectModel(BaseModel): # Pipeline of nodes (SEE projects_nodes.py) workbench: Annotated[NodesDict, Field(description="Project's pipeline")] - # validators - _empty_thumbnail_is_none = field_validator("thumbnail", mode="before")( - empty_str_to_none_pre_validator - ) - - _none_description_is_empty = field_validator("description", mode="before")( - none_to_empty_str_pre_validator - ) - class ProjectAtDB(BaseProjectModel): # Model used to READ from database @@ -147,6 +144,25 @@ def _convert_sql_alchemy_enum(cls, v): ) +class ProjectListAtDB(BaseProjectModel): + id: int + type: ProjectType + template_type: ProjectTemplateType | None + prj_owner: int | None + ui: dict[str, Any] | None + classifiers: list[ClassifierID] | None + dev: dict[str, Any] | None + quality: dict[str, Any] + published: bool | None + hidden: bool + workspace_id: WorkspaceID | None + trashed: datetime | None + trashed_by: UserID | None + trashed_explicitly: bool + product_name: ProductName + folder_id: FolderID | None + + class Project(BaseProjectModel): # NOTE: This is the pydantic pendant of project-v0.0.1.json used in the API of the webserver/webclient # NOT for usage with DB!! diff --git a/packages/models-library/src/models_library/projects_access.py b/packages/models-library/src/models_library/projects_access.py index a1e4db0cc311..2f34bf1183f8 100644 --- a/packages/models-library/src/models_library/projects_access.py +++ b/packages/models-library/src/models_library/projects_access.py @@ -1,18 +1,17 @@ """ - Ownership and access rights +Ownership and access rights """ from enum import Enum +from typing import Annotated from pydantic import BaseModel, ConfigDict, Field -from pydantic.types import PositiveInt from .basic_types import IDStr -from .users import FirstNameStr, LastNameStr +from .users import UserID -class GroupIDStr(IDStr): - ... +class GroupIDStr(IDStr): ... class AccessEnum(str, Enum): @@ -22,26 +21,23 @@ class AccessEnum(str, Enum): class AccessRights(BaseModel): - read: bool = Field(..., description="has read access") - write: bool = Field(..., description="has write access") - delete: bool = Field(..., description="has deletion rights") + read: Annotated[bool, Field(description="has read access")] + write: Annotated[bool, Field(description="has write access")] + delete: Annotated[bool, Field(description="has deletion rights")] model_config = ConfigDict(extra="forbid") class Owner(BaseModel): - user_id: PositiveInt = Field(..., description="Owner's user id") - first_name: FirstNameStr | None = Field(..., description="Owner's first name") - last_name: LastNameStr | None = Field(..., description="Owner's last name") + user_id: Annotated[UserID, Field(description="Owner's user id")] model_config = ConfigDict( extra="forbid", json_schema_extra={ "examples": [ - # NOTE: None and empty string are both defining an undefined value - {"user_id": 1, "first_name": None, "last_name": None}, - {"user_id": 2, "first_name": "", "last_name": ""}, - {"user_id": 3, "first_name": "John", "last_name": "Smith"}, + {"user_id": 1}, + {"user_id": 42}, + {"user_id": 666}, ] }, ) diff --git a/packages/models-library/src/models_library/projects_nodes.py b/packages/models-library/src/models_library/projects_nodes.py index 66683369f357..1bb91c1ddaac 100644 --- a/packages/models-library/src/models_library/projects_nodes.py +++ b/packages/models-library/src/models_library/projects_nodes.py @@ -2,7 +2,8 @@ Models Node as a central element in a project's pipeline """ -from typing import Annotated, Any, TypeAlias, Union +from enum import auto +from typing import Annotated, Any, Self, TypeAlias, Union from common_library.basic_types import DEFAULT_FACTORY from pydantic import ( @@ -16,10 +17,12 @@ StrictInt, StringConstraints, field_validator, + model_validator, ) from pydantic.config import JsonDict from .basic_types import EnvVarKey, KeyIDStr +from .groups import GroupID from .projects_access import AccessEnum from .projects_nodes_io import ( DatCoreFileLink, @@ -31,8 +34,9 @@ from .projects_nodes_layout import Position from .projects_state import RunningState from .services import ServiceKey, ServiceVersion +from .utils.enums import StrAutoEnum -InputTypes = Union[ +InputTypes = Union[ # noqa: UP007 # NOTE: WARNING the order in Union[*] below matters! StrictBool, StrictInt, @@ -44,7 +48,7 @@ DownloadLink, list[Any] | dict[str, Any], # arrays | object ] -OutputTypes = Union[ +OutputTypes = Union[ # noqa: UP007 # NOTE: WARNING the order in Union[*] below matters! StrictBool, StrictInt, @@ -71,6 +75,69 @@ UnitStr: TypeAlias = Annotated[str, StringConstraints(strip_whitespace=True)] +class NodeShareStatus(StrAutoEnum): + OPENING = auto() + OPENED = auto() + CLOSING = auto() + + +class NodeShareState(BaseModel): + locked: Annotated[ + bool, + Field( + description="True if the node is locked, False otherwise", + ), + ] + + current_user_groupids: Annotated[ + list[GroupID] | None, + Field( + description="Group(s) that currently have access to the node (or locked it)" + ), + ] = None + + status: Annotated[ + NodeShareStatus | None, + Field( + description="Reason why the node is locked, None if not locked", + ), + ] = None + + @model_validator(mode="after") + def _validate_lock_state(self) -> Self: + if self.locked and (self.current_user_groupids is None or self.status is None): + msg = "If the node is locked, both 'current_user_groupids' and 'status' must be set" + raise ValueError(msg) + + return self + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + { + "locked": False, + }, + { + "locked": True, + "current_user_groupids": [666], + "status": "OPENING", + }, + { + "locked": False, + "current_user_groupids": [666, 4563], + "status": "OPENED", + }, + ] + } + ) + + model_config = ConfigDict( + extra="forbid", json_schema_extra=_update_json_schema_extra + ) + + class NodeState(BaseModel): modified: Annotated[ bool, @@ -104,16 +171,28 @@ class NodeState(BaseModel): ), ] = 0 + lock_state: Annotated[ + NodeShareState | None, Field(description="the node's lock state") + ] = None + model_config = ConfigDict( extra="forbid", - populate_by_name=True, + validate_by_alias=True, + validate_by_name=True, json_schema_extra={ "examples": [ + # example with alias name { "modified": True, "dependencies": [], "currentStatus": "NOT_STARTED", }, + # example with field name + { + "modified": True, + "dependencies": [], + "current_status": "NOT_STARTED", + }, { "modified": True, "dependencies": ["42838344-03de-4ce2-8d93-589a5dcdfd05"], @@ -163,8 +242,9 @@ class Node(BaseModel): Field( ge=0, le=100, - description="the node progress value (deprecated in DB, still used for API only)", - deprecated=True, + description="the node progress value", + deprecated=True, # NOTE: still used in the File Picker (frontend nodes) and must be removed first from there before retiring it here + # SEE https://github.com/ITISFoundation/osparc-simcore/issues/8365 ), ] = None @@ -173,6 +253,8 @@ class Node(BaseModel): Field( description="url of the latest screenshot of the node", examples=["https://placeimg.com/171/96/tech/grayscale/?0.jpg"], + deprecated=True, + # SEE https://github.com/ITISFoundation/osparc-simcore/issues/8365 ), ] = None @@ -192,7 +274,7 @@ class Node(BaseModel): ] = DEFAULT_FACTORY inputs_required: Annotated[ - list[InputID], + list[InputID] | None, Field( default_factory=list, description="Defines inputs that are required in order to run the service", @@ -231,15 +313,22 @@ class Node(BaseModel): Field(default_factory=dict, description="values of output properties"), ] = DEFAULT_FACTORY - output_node: Annotated[bool | None, Field(deprecated=True, alias="outputNode")] = ( - None - ) + output_node: Annotated[ + bool | None, + Field( + deprecated=True, + alias="outputNode", + # SEE https://github.com/ITISFoundation/osparc-simcore/issues/8365 + ), + ] = None - output_nodes: Annotated[ + output_nodes: Annotated[ # <-- (DEPRECATED) Can be removed list[NodeID] | None, Field( description="Used in group-nodes. Node IDs of those connected to the output", alias="outputNodes", + deprecated=True, + # SEE https://github.com/ITISFoundation/osparc-simcore/issues/8365 ), ] = None @@ -247,6 +336,8 @@ class Node(BaseModel): NodeID | None, Field( description="Parent's (group-nodes') node ID s. Used to group", + deprecated=True, + # SEE https://github.com/ITISFoundation/osparc-simcore/issues/8365 ), ] = None @@ -263,6 +354,9 @@ class Node(BaseModel): Field(default_factory=NodeState, description="The node's state object"), ] = DEFAULT_FACTORY + # NOTE: requested_resources should be here! WARNING: this model is used both in database and rest api! + # Model for project_nodes table should NOT be Node but a different one ! + boot_options: Annotated[ dict[EnvVarKey, str] | None, Field( @@ -286,7 +380,6 @@ def _convert_empty_str_to_none(cls, v): @classmethod def _convert_from_enum(cls, v): if isinstance(v, str): - # the old version of state was a enum of RunningState running_state_value = _convert_old_enum_name(v) return NodeState(current_status=running_state_value) @@ -383,12 +476,14 @@ def _update_json_schema_extra(schema: JsonDict) -> None: model_config = ConfigDict( extra="forbid", - populate_by_name=True, + validate_by_name=True, + validate_by_alias=True, json_schema_extra=_update_json_schema_extra, ) class PartialNode(Node): - key: Annotated[ServiceKey, Field(default=None)] - version: Annotated[ServiceVersion, Field(default=None)] - label: Annotated[str, Field(default=None)] + # NOTE: `type: ignore[assignment]` is needed because mypy gets confused when overriding the types by adding the Union with None + key: ServiceKey | None = None # type: ignore[assignment] + version: ServiceVersion | None = None # type: ignore[assignment] + label: str | None = None # type: ignore[assignment] diff --git a/packages/models-library/src/models_library/projects_nodes_io.py b/packages/models-library/src/models_library/projects_nodes_io.py index 90fdf1412780..d4708582dead 100644 --- a/packages/models-library/src/models_library/projects_nodes_io.py +++ b/packages/models-library/src/models_library/projects_nodes_io.py @@ -30,10 +30,9 @@ UUID_RE, ) -NodeID = UUID - UUIDStr: TypeAlias = Annotated[str, StringConstraints(pattern=UUID_RE)] +NodeID: TypeAlias = UUID NodeIDStr: TypeAlias = UUIDStr LocationID: TypeAlias = int diff --git a/packages/models-library/src/models_library/projects_state.py b/packages/models-library/src/models_library/projects_state.py index cef15bce5b5a..f3cd126d8b21 100644 --- a/packages/models-library/src/models_library/projects_state.py +++ b/packages/models-library/src/models_library/projects_state.py @@ -3,7 +3,7 @@ """ from enum import Enum, unique -from typing import Annotated +from typing import Annotated, Final, Self, TypeAlias from pydantic import ( BaseModel, @@ -13,7 +13,9 @@ field_validator, model_validator, ) +from pydantic.config import JsonDict +from .groups import GroupID from .projects_access import Owner @@ -22,18 +24,32 @@ class RunningState(str, Enum): """State of execution of a project's computational workflow SEE StateType for task state + + # Computational backend states explained: + - UNKNOWN - The backend doesn't know about the task anymore, it has disappeared from the system or it was never created (eg. when we are asking for the task) + - NOT_STARTED - Default state when the task is created + - PUBLISHED - The task has been submitted to the computational backend (click on "Run" button in the UI) + - PENDING - Task has been transferred to the Dask scheduler and is waiting for a worker to pick it up (director-v2 --> Dask scheduler) + - But! it is also transition state (ex. PENDING -> WAITING_FOR_CLUSTER -> PENDING -> WAITING_FOR_RESOURCES -> PENDING -> STARTED) + - WAITING_FOR_CLUSTER - No cluster (Dask scheduler) is available to run the task; waiting for one to become available + - WAITING_FOR_RESOURCES - No worker (Dask worker) is available to run the task; waiting for one to become available + - STARTED - A worker has picked up the task and is executing it + - SUCCESS - Task finished successfully + - FAILED - Task finished with an error + - ABORTED - Task was aborted before completion + """ UNKNOWN = "UNKNOWN" - PUBLISHED = "PUBLISHED" NOT_STARTED = "NOT_STARTED" + PUBLISHED = "PUBLISHED" PENDING = "PENDING" + WAITING_FOR_CLUSTER = "WAITING_FOR_CLUSTER" WAITING_FOR_RESOURCES = "WAITING_FOR_RESOURCES" STARTED = "STARTED" SUCCESS = "SUCCESS" FAILED = "FAILED" ABORTED = "ABORTED" - WAITING_FOR_CLUSTER = "WAITING_FOR_CLUSTER" @staticmethod def list_running_states() -> list["RunningState"]: @@ -49,6 +65,13 @@ def is_running(self) -> bool: return self in self.list_running_states() +RUNNING_STATE_COMPLETED_STATES: Final[tuple[RunningState, ...]] = ( + RunningState.ABORTED, + RunningState.FAILED, + RunningState.SUCCESS, +) + + @unique class DataState(str, Enum): UP_TO_DATE = "UPTODATE" @@ -63,7 +86,101 @@ class ProjectStatus(str, Enum): EXPORTING = "EXPORTING" OPENING = "OPENING" OPENED = "OPENED" - MAINTAINING = "MAINTAINING" + MAINTAINING = "MAINTAINING" # used for maintenance tasks, like removing EFS data + + +ProjectShareStatus: TypeAlias = Annotated[ + ProjectStatus, Field(description="The status of the project") +] +ProjectShareLocked: TypeAlias = Annotated[ + bool, Field(description="True if the project is locked") +] +ProjectShareCurrentUserGroupIDs: TypeAlias = Annotated[ + list[GroupID], + Field( + description="Current users in the project (if the project is locked, the list contains only the lock owner)" + ), +] + + +class ProjectShareState(BaseModel): + status: ProjectShareStatus + locked: ProjectShareLocked + current_user_groupids: ProjectShareCurrentUserGroupIDs + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + { + "status": ProjectStatus.CLOSED, + "locked": False, + "current_user_groupids": [], + }, + { + "status": ProjectStatus.OPENING, + "locked": False, + "current_user_groupids": [ + "7", + "15", + "666", + ], + }, + { + "status": ProjectStatus.OPENED, + "locked": False, + "current_user_groupids": [ + "7", + "15", + "666", + ], + }, + { + "status": ProjectStatus.CLONING, + "locked": True, + "current_user_groupids": [ + "666", + ], + }, + ] + } + ) + + model_config = ConfigDict( + extra="forbid", json_schema_extra=_update_json_schema_extra + ) + + @model_validator(mode="after") + def check_model_valid(self) -> Self: + if ( + self.status + in [ + ProjectStatus.CLONING, + ProjectStatus.EXPORTING, + ProjectStatus.MAINTAINING, + ] + and not self.locked + ): + msg = f"Project is {self.status=}, but it is not locked" + raise ValueError(msg) + if self.locked and not self.current_user_groupids: + msg = "If the project is locked, the current_users list must contain at least the lock owner" + raise ValueError(msg) + if self.status is ProjectStatus.CLOSED: + if self.locked: + msg = "If the project is closed, it cannot be locked" + raise ValueError(msg) + if self.current_user_groupids: + msg = "If the project is closed, the current_users list must be empty" + raise ValueError(msg) + elif not self.current_user_groupids and ( + self.status is not ProjectStatus.MAINTAINING + ): + msg = f"If the project is {self.status=}, the current_users list must not be empty" + raise ValueError(msg) + + return self class ProjectLocked(BaseModel): @@ -85,8 +202,6 @@ class ProjectLocked(BaseModel): "status": ProjectStatus.OPENED, "owner": { "user_id": 123, - "first_name": "Johnny", - "last_name": "Cash", }, }, ] @@ -130,8 +245,16 @@ class ProjectRunningState(BaseModel): model_config = ConfigDict(extra="forbid") +ProjectStateShareState: TypeAlias = Annotated[ + ProjectShareState, Field(description="The project share state") +] +ProjectStateRunningState: TypeAlias = Annotated[ + ProjectRunningState, Field(description="The project running state") +] + + class ProjectState(BaseModel): - locked: Annotated[ProjectLocked, Field(..., description="The project lock state")] - state: ProjectRunningState = Field(..., description="The project running state") + share_state: ProjectStateShareState + state: ProjectStateRunningState model_config = ConfigDict(extra="forbid") diff --git a/packages/models-library/src/models_library/rabbitmq_messages.py b/packages/models-library/src/models_library/rabbitmq_messages.py index 44d2b7ddc9a0..d6c8135eb99d 100644 --- a/packages/models-library/src/models_library/rabbitmq_messages.py +++ b/packages/models-library/src/models_library/rabbitmq_messages.py @@ -316,3 +316,13 @@ class WalletCreditsLimitReachedMessage(RabbitMessageBase): def routing_key(self) -> str | None: return f"{self.wallet_id}.{self.credits_limit}" + + +class ComputationalPipelineStatusMessage(RabbitMessageBase, ProjectMessageBase): + channel_name: Literal["io.simcore.service.computation.pipeline-status"] = ( + "io.simcore.service.computation.pipeline-status" + ) + run_result: RunningState + + def routing_key(self) -> str | None: + return f"{self.project_id}.all_nodes" diff --git a/packages/models-library/src/models_library/rest_error.py b/packages/models-library/src/models_library/rest_error.py index 71cc1b877b66..ce85977c0491 100644 --- a/packages/models-library/src/models_library/rest_error.py +++ b/packages/models-library/src/models_library/rest_error.py @@ -4,6 +4,7 @@ from common_library.basic_types import DEFAULT_FACTORY from models_library.generics import Envelope from pydantic import BaseModel, ConfigDict, Field +from pydantic.config import JsonDict from .basic_types import IDStr, LogLevel @@ -72,43 +73,60 @@ class ErrorGet(BaseModel): description="Message displayed to the user", ), ] + support_id: Annotated[ IDStr | None, Field(description="ID to track the incident during support", alias="supportId"), ] = None - status: int - # NOTE: The fields blow are DEPRECATED. Still here to keep compatibilty with front-end until updated + status: Annotated[ + int, + Field( + description="Redundant HTTP status code of the error." + "Must be the same as in the HTTP response" + ), + ] + + # NOTE: The fields below are DEPRECATED. + # Still here to keep compatibilty with front-end until updated errors: Annotated[ list[ErrorItemType], Field(deprecated=True, default_factory=list, json_schema_extra={"default": []}), ] = DEFAULT_FACTORY + logs: Annotated[ list[LogMessageType], Field(deprecated=True, default_factory=list, json_schema_extra={"default": []}), ] = DEFAULT_FACTORY + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + { + "message": "Sorry you do not have sufficient access rights for product", + "status": 401, + }, + { + "message": "Opps this error was unexpected. We are working on that!", + "supportId": "OEC:12346789", + "status": 500, + }, + ] + } + ) + model_config = ConfigDict( populate_by_name=True, extra="ignore", # Used to prune extra fields from internal data frozen=True, - json_schema_extra={ - "examples": [ - { - "message": "Sorry you do not have sufficient access rights for product", - "status": 401, - }, - { - "message": "Opps this error was unexpected. We are working on that!", - "supportId": "OEC:12346789", - "status": 500, - }, - ] - }, + json_schema_extra=_update_json_schema_extra, ) class EnvelopedError(Envelope[None]): + # SEE https://github.com/ITISFoundation/osparc-simcore/issues/443 error: ErrorGet model_config = ConfigDict( diff --git a/packages/models-library/src/models_library/rpc/webserver/auth/api_keys.py b/packages/models-library/src/models_library/rpc/webserver/auth/api_keys.py index 80d248d90456..3b0da5ed78cd 100644 --- a/packages/models-library/src/models_library/rpc/webserver/auth/api_keys.py +++ b/packages/models-library/src/models_library/rpc/webserver/auth/api_keys.py @@ -16,8 +16,12 @@ _SECRET_LEN: Final = 20 +def generate_api_key_prefix(name: str) -> str: + return _PUNCTUATION_REGEX.sub("_", name[:5]) + + def generate_unique_api_key(name: str, length: int = _KEY_LEN) -> str: - prefix = _PUNCTUATION_REGEX.sub("_", name[:5]) + prefix = generate_api_key_prefix(name) hashed = hashlib.sha256(name.encode()).hexdigest() return f"{prefix}_{hashed[:length]}" diff --git a/packages/models-library/src/models_library/rpc/webserver/projects.py b/packages/models-library/src/models_library/rpc/webserver/projects.py index d1bfff342134..e136b1fd99d4 100644 --- a/packages/models-library/src/models_library/rpc/webserver/projects.py +++ b/packages/models-library/src/models_library/rpc/webserver/projects.py @@ -90,6 +90,7 @@ class ProjectJobRpcGet(BaseModel): # Specific to jobs job_parent_resource_name: str + storage_assets_deleted: bool @staticmethod def _update_json_schema_extra(schema: JsonDict) -> None: @@ -105,6 +106,7 @@ def _update_json_schema_extra(schema: JsonDict) -> None: "created_at": "2023-01-01T00:00:00Z", "modified_at": "2023-01-01T00:00:00Z", "job_parent_resource_name": "solvers/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/releases/2.0.2", + "storage_assets_deleted": "false", }, { "uuid": "00000000-1234-5678-1234-123456789012", @@ -114,6 +116,7 @@ def _update_json_schema_extra(schema: JsonDict) -> None: "created_at": "2023-02-01T00:00:00Z", "modified_at": "2023-02-01T00:00:00Z", "job_parent_resource_name": "studies/96642f2a-a72c-11ef-8776-02420a00087d", + "storage_assets_deleted": "true", }, { "uuid": "00000000-0000-5678-1234-123456789012", @@ -123,6 +126,7 @@ def _update_json_schema_extra(schema: JsonDict) -> None: "created_at": "2023-03-01T00:00:00Z", "modified_at": "2023-03-01T00:00:00Z", "job_parent_resource_name": "program/simcore%2Fservices%2Fdynamic%2Fjupyter/releases/5.0.2", + "storage_assets_deleted": "false", }, ] } diff --git a/packages/models-library/src/models_library/service_settings_labels.py b/packages/models-library/src/models_library/service_settings_labels.py index b3e1956caba4..5b2e3dbc1a1a 100644 --- a/packages/models-library/src/models_library/service_settings_labels.py +++ b/packages/models-library/src/models_library/service_settings_labels.py @@ -372,6 +372,16 @@ class DynamicSidecarServiceLabels(BaseModel): ), ] = None + is_collaborative: Annotated[ + bool, + Field( + alias="simcore.service.is-collaborative", + description=( + "if True, the service is collaborative and will not be locked" + ), + ), + ] = False + compose_spec: Annotated[ Json[ComposeSpecLabelDict | None] | None, Field( @@ -570,8 +580,9 @@ def _not_allowed_in_both_specs(self): "containers_allowed_outgoing_internet", "containers_allowed_outgoing_permit_list", } - if match_keys & set(self.model_fields) != match_keys: - err_msg = f"Expected the following keys {match_keys} to be present {self.model_fields=}" + cls = self.__class__ + if match_keys & set(cls.model_fields) != match_keys: + err_msg = f"Expected the following keys {match_keys} to be present {cls.model_fields=}" raise ValueError(err_msg) if ( @@ -662,6 +673,7 @@ def _update_json_schema_extra(schema: JsonDict) -> None: "simcore.service.user-preferences-path": json_dumps( "/tmp/path_to_preferences" # noqa: S108 ), + "simcore.service.is_collaborative": "False", }, # dynamic-service with compose spec { @@ -701,6 +713,7 @@ def _update_json_schema_extra(schema: JsonDict) -> None: "simcore.service.callbacks-mapping": json_dumps( CallbacksMapping.model_json_schema()["examples"][3] ), + "simcore.service.is_collaborative": "True", }, ] }, diff --git a/packages/models-library/src/models_library/service_settings_nat_rule.py b/packages/models-library/src/models_library/service_settings_nat_rule.py index 1f50b62f5037..9c3bd06d087f 100644 --- a/packages/models-library/src/models_library/service_settings_nat_rule.py +++ b/packages/models-library/src/models_library/service_settings_nat_rule.py @@ -1,10 +1,21 @@ from collections.abc import Generator from typing import Final -from pydantic import BaseModel, ConfigDict, Field, TypeAdapter, ValidationInfo, field_validator +from pydantic import ( + BaseModel, + ConfigDict, + Field, + TypeAdapter, + ValidationInfo, + field_validator, +) from .basic_types import PortInt -from .osparc_variable_identifier import OsparcVariableIdentifier, raise_if_unresolved +from .osparc_variable_identifier import ( + OsparcVariableIdentifier, + raise_if_unresolved, +) +from .utils.types import get_types_from_annotated_union # Cloudflare DNS server address DEFAULT_DNS_SERVER_ADDRESS: Final[str] = "1.1.1.1" # NOSONAR @@ -20,13 +31,15 @@ class _PortRange(BaseModel): @field_validator("upper") @classmethod def lower_less_than_upper(cls, v, info: ValidationInfo) -> PortInt: - if isinstance(v, OsparcVariableIdentifier): + if isinstance(v, get_types_from_annotated_union(OsparcVariableIdentifier)): return v # type: ignore # bypass validation if unresolved upper = v lower: PortInt | OsparcVariableIdentifier | None = info.data.get("lower") - if lower and isinstance(lower, OsparcVariableIdentifier): + if lower and isinstance( + lower, get_types_from_annotated_union(OsparcVariableIdentifier) + ): return v # type: ignore # bypass validation if unresolved if lower is None or lower >= upper: diff --git a/packages/models-library/src/models_library/services_metadata_published.py b/packages/models-library/src/models_library/services_metadata_published.py index 51fba05b7f42..8163a91c365e 100644 --- a/packages/models-library/src/models_library/services_metadata_published.py +++ b/packages/models-library/src/models_library/services_metadata_published.py @@ -76,8 +76,12 @@ } }, "boot-options": { - "example_service_defined_boot_mode": BootOption.model_config["json_schema_extra"]["examples"][0], # type: ignore [index] - "example_service_defined_theme_selection": BootOption.model_config["json_schema_extra"]["examples"][1], # type: ignore [index] + "example_service_defined_boot_mode": BootOption.model_json_schema()["examples"][ + 0 + ], + "example_service_defined_theme_selection": BootOption.model_json_schema()[ + "examples" + ][1], }, "min-visible-inputs": 2, } diff --git a/packages/models-library/src/models_library/services_metadata_runtime.py b/packages/models-library/src/models_library/services_metadata_runtime.py new file mode 100644 index 000000000000..b2b9cc01e765 --- /dev/null +++ b/packages/models-library/src/models_library/services_metadata_runtime.py @@ -0,0 +1,204 @@ +import contextlib +from typing import Any, Final + +from pydantic import ( + BaseModel, + ByteSize, + ConfigDict, + Field, + TypeAdapter, + ValidationError, + model_validator, +) + +from .docker import DockerLabelKey +from .generated_models.docker_rest_api import Task +from .products import ProductName +from .projects import ProjectID +from .projects_nodes_io import NodeID +from .users import UserID + +DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY: Final[DockerLabelKey] = ( + TypeAdapter(DockerLabelKey).validate_python("ec2-instance-type") +) + +_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX: Final[str] = "io.simcore.runtime." +_BACKWARDS_COMPATIBILITY_SIMCORE_RUNTIME_DOCKER_LABELS_MAP: Final[dict[str, str]] = { + "node_id": f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}node-id", + "product_name": f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}product-name", + "project_id": f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}project-id", + "simcore_user_agent": f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}simcore-user-agent", + "study_id": f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}project-id", + "user_id": f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}user-id", + "uuid": f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}node-id", + "mem_limit": f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}memory-limit", + "swarm_stack_name": f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}swarm-stack-name", +} +_UNDEFINED_LABEL_VALUE_STR: Final[str] = "undefined" +_UNDEFINED_LABEL_VALUE_INT: Final[str] = "0" + + +def to_simcore_runtime_docker_label_key(key: str) -> DockerLabelKey: + return DockerLabelKey( + f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}{key.replace('_', '-').lower()}" + ) + + +class SimcoreContainerLabels(BaseModel): + """ + Represents the standard label on oSparc created containers (not yet services) + In order to create this object in code, please use model_construct() method! + """ + + user_id: UserID = Field(..., alias=f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}user-id") # type: ignore[literal-required] + project_id: ProjectID = Field( # type: ignore[literal-required] + ..., alias=f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}project-id" + ) + node_id: NodeID = Field(..., alias=f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}node-id") # type: ignore[literal-required] + + product_name: ProductName = Field( # type: ignore[literal-required] + ..., alias=f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}product-name" + ) + simcore_user_agent: str = Field( # type: ignore[literal-required] + ..., alias=f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}simcore-user-agent" + ) + + swarm_stack_name: str = Field( # type: ignore[literal-required] + ..., alias=f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}swarm-stack-name" + ) + + memory_limit: ByteSize = Field( # type: ignore[literal-required] + ..., alias=f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}memory-limit" + ) + cpu_limit: float = Field( # type: ignore[literal-required] + ..., alias=f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}cpu-limit" + ) + + @model_validator(mode="before") + @classmethod + def _backwards_compatibility(cls, values: dict[str, Any]) -> dict[str, Any]: + # NOTE: this is necessary for dy-sidecar and legacy service until they are adjusted + if mapped_values := { + _BACKWARDS_COMPATIBILITY_SIMCORE_RUNTIME_DOCKER_LABELS_MAP[k]: v + for k, v in values.items() + if k in _BACKWARDS_COMPATIBILITY_SIMCORE_RUNTIME_DOCKER_LABELS_MAP + }: + # these values were sometimes omitted, so let's provide some defaults + for key in ["product-name", "simcore-user-agent", "swarm-stack-name"]: + mapped_values.setdefault( + f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}{key}", + _UNDEFINED_LABEL_VALUE_STR, + ) + + mapped_values.setdefault( + f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}memory-limit", + values.get("memory_limit", _UNDEFINED_LABEL_VALUE_INT), + ) + + def _convert_nano_cpus_to_cpus(nano_cpu: str) -> str: + with contextlib.suppress(ValidationError): + return f"{TypeAdapter(float).validate_python(nano_cpu) / (1.0 * 10**9):.2f}" + return _UNDEFINED_LABEL_VALUE_INT + + mapped_values.setdefault( + f"{_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX}cpu-limit", + values.get( + "cpu_limit", + _convert_nano_cpus_to_cpus( + values.get( + "nano_cpus_limit", + _UNDEFINED_LABEL_VALUE_INT, + ) + ), + ), + ) + return mapped_values + return values + + def to_simcore_runtime_docker_labels(self) -> dict[DockerLabelKey, str]: + """returns a dictionary of strings as required by docker""" + return { + to_simcore_runtime_docker_label_key(k): f"{v}" + for k, v in sorted(self.model_dump().items()) + } + + @classmethod + def from_docker_task(cls, docker_task: Task) -> "SimcoreContainerLabels": + assert docker_task.spec # nosec + assert docker_task.spec.container_spec # nosec + task_labels = docker_task.spec.container_spec.labels or {} + return cls.model_validate(task_labels) + + model_config = ConfigDict( + populate_by_name=True, + json_schema_extra={ + "examples": [ + # legacy service labels + { + "study_id": "29f393fc-1410-47b3-b4b9-61dfce21a2a6", + "swarm_stack_name": "devel-simcore", + "user_id": "5", + "uuid": "1f963626-66e1-43f1-a777-33955c08b909", + }, + # legacy container labels + { + "mem_limit": "1073741824", + "nano_cpus_limit": "4000000000", + "node_id": "1f963626-66e1-43f1-a777-33955c08b909", + "simcore_user_agent": "puppeteer", + "study_id": "29f393fc-1410-47b3-b4b9-61dfce21a2a6", + "swarm_stack_name": "devel-simcore", + "user_id": "5", + }, + # dy-sidecar service labels + { + "study_id": "29f393fc-1410-47b3-b4b9-61dfce21a2a6", + "swarm_stack_name": "devel-simcore", + "user_id": "5", + "uuid": "1f963626-66e1-43f1-a777-33955c08b909", + }, + # dy-sidecar container labels + { + "mem_limit": "1073741824", + "nano_cpus_limit": "4000000000", + "study_id": "29f393fc-1410-47b3-b4b9-61dfce21a2a6", + "user_id": "5", + "uuid": "1f963626-66e1-43f1-a777-33955c08b909", + }, + # dy-proxy service labels + { + "dynamic-type": "dynamic-sidecar", + "study_id": "29f393fc-1410-47b3-b4b9-61dfce21a2a6", + "swarm_stack_name": "devel-simcore", + "type": "dependency-v2", + "user_id": "5", + "uuid": "1f963626-66e1-43f1-a777-33955c08b909", + }, + # dy-proxy container labels + { + "study_id": "29f393fc-1410-47b3-b4b9-61dfce21a2a6", + "user_id": "5", + "uuid": "1f963626-66e1-43f1-a777-33955c08b909", + }, + # dy-sidecar user-services labels + { + "product_name": "osparc", + "simcore_user_agent": "puppeteer", + "study_id": "29f393fc-1410-47b3-b4b9-61dfce21a2a6", + "user_id": "5", + "uuid": "1f963626-66e1-43f1-a777-33955c08b909", + }, + # modern both dynamic-sidecar services and computational services + { + "io.simcore.runtime.cpu-limit": "2.4", + "io.simcore.runtime.memory-limit": "1073741824", + "io.simcore.runtime.node-id": "1f963626-66e1-43f1-a777-33955c08b909", + "io.simcore.runtime.product-name": "osparc", + "io.simcore.runtime.project-id": "29f393fc-1410-47b3-b4b9-61dfce21a2a6", + "io.simcore.runtime.simcore-user-agent": "puppeteer", + "io.simcore.runtime.swarm-stack-name": "devel-osparc", + "io.simcore.runtime.user-id": "5", + }, + ] + }, + ) diff --git a/packages/models-library/src/models_library/users.py b/packages/models-library/src/models_library/users.py index 3b8d4344b4b6..eba810e7df9d 100644 --- a/packages/models-library/src/models_library/users.py +++ b/packages/models-library/src/models_library/users.py @@ -2,7 +2,6 @@ from typing import Annotated, TypeAlias from common_library.users_enums import UserRole -from models_library.basic_types import IDStr from pydantic import BaseModel, ConfigDict, Field, PositiveInt, StringConstraints from pydantic.config import JsonDict from typing_extensions import ( # https://docs.pydantic.dev/latest/api/standard_library_types/#typeddict @@ -12,8 +11,9 @@ from .emails import LowerCaseEmailStr UserID: TypeAlias = PositiveInt -UserNameID: TypeAlias = IDStr - +UserNameID: TypeAlias = Annotated[ + str, StringConstraints(strip_whitespace=True, min_length=1, max_length=100) +] FirstNameStr: TypeAlias = Annotated[ str, StringConstraints(strip_whitespace=True, max_length=255) @@ -38,6 +38,7 @@ class MyProfile(BaseModel): email: LowerCaseEmailStr role: UserRole privacy: PrivacyDict + phone: str | None expiration_date: datetime.date | None = None @staticmethod @@ -50,6 +51,7 @@ def _update_json_schema_extra(schema: JsonDict) -> None: "user_name": "PtN5Ab0uv", "first_name": "PtN5Ab0uv", "last_name": "", + "phone": None, "role": "GUEST", "privacy": { "hide_email": True, @@ -69,8 +71,11 @@ class UserBillingDetails(BaseModel): institution: str | None address: str | None city: str | None - state: str | None = Field(description="State, province, canton, ...") - country: str # Required for taxes + state: Annotated[str | None, Field(description="State, province, canton, ...")] + country: Annotated[ + str, + Field(description="Billing country (with standardize name) required for taxes"), + ] postal_code: str | None phone: str | None diff --git a/packages/models-library/src/models_library/utils/enums.py b/packages/models-library/src/models_library/utils/enums.py index 7f0ff7eaf486..59e95d61e3da 100644 --- a/packages/models-library/src/models_library/utils/enums.py +++ b/packages/models-library/src/models_library/utils/enums.py @@ -6,7 +6,9 @@ @unique class StrAutoEnum(StrEnum): @staticmethod - def _generate_next_value_(name, start, count, last_values): + def _generate_next_value_( + name: str, start: int, count: int, last_values: list[str] # noqa: ARG004 + ) -> str: return name.upper() diff --git a/packages/models-library/src/models_library/utils/json_schema.py b/packages/models-library/src/models_library/utils/json_schema.py index 1c5afc4ca55d..2c6255a193ea 100644 --- a/packages/models-library/src/models_library/utils/json_schema.py +++ b/packages/models-library/src/models_library/utils/json_schema.py @@ -5,6 +5,7 @@ See how is used to validate input/output content-schemas of service models """ + # SEE possible enhancements in https://github.com/ITISFoundation/osparc-simcore/issues/3008 @@ -13,8 +14,11 @@ from copy import deepcopy from typing import Any +import jsonref # type: ignore[import-untyped] import jsonschema from jsonschema import validators +from pydantic.json_schema import GenerateJsonSchema, JsonSchemaMode, JsonSchemaValue +from pydantic_core import CoreSchema # ERRORS @@ -92,6 +96,19 @@ def any_ref_key(obj): return False +class GenerateResolvedJsonSchema(GenerateJsonSchema): + """Generates a json-schema with all $ref resolved + Usage: pydantic_base_model.model_json_schema(schema_generator=GenerateResolvedJsonSchema) returns a json schema where it is guaranteed that all json references are resolved. + """ + + def generate( + self, schema: CoreSchema, mode: JsonSchemaMode = "validation" + ) -> JsonSchemaValue: + schema_value = super().generate(schema=schema, mode=mode) + schema_value = jsonref.replace_refs(schema_value, jsonschema=True) + return JsonSchemaValue(schema_value) + + __all__: tuple[str, ...] = ( "any_ref_key", "InvalidJsonSchema", diff --git a/packages/models-library/src/models_library/utils/types.py b/packages/models-library/src/models_library/utils/types.py new file mode 100644 index 000000000000..ac310a82b882 --- /dev/null +++ b/packages/models-library/src/models_library/utils/types.py @@ -0,0 +1,34 @@ +from functools import lru_cache +from typing import Annotated, Any, Union, get_args, get_origin + + +@lru_cache +def get_types_from_annotated_union(annotated_alias: Any) -> tuple[type, ...]: + """ + Introspects a complex Annotated alias to extract the base types from its inner Union. + """ + if get_origin(annotated_alias) is not Annotated: + msg = "Expected an Annotated type." + raise TypeError(msg) + + # Get the contents of Annotated, e.g., (Union[...], Discriminator(...)) + annotated_args = get_args(annotated_alias) + union_type = annotated_args[0] + + # The Union can be from typing.Union or the | operator + if get_origin(union_type) is not Union: + msg = "Expected a Union inside the Annotated type." + raise TypeError(msg) + + # Get the members of the Union, e.g., (Annotated[TypeA, ...], Annotated[TypeB, ...]) + union_members = get_args(union_type) + + extracted_types = [] + for member in union_members: + # Each member is also Annotated, so we extract its base type + if get_origin(member) is Annotated: + extracted_types.append(get_args(member)[0]) + else: + extracted_types.append(member) # Handle non-annotated members in the union + + return tuple(extracted_types) diff --git a/packages/models-library/tests/test__pydantic_models.py b/packages/models-library/tests/test__pydantic_models.py index 1d9dc14e3b46..4e2aa2f4a231 100644 --- a/packages/models-library/tests/test__pydantic_models.py +++ b/packages/models-library/tests/test__pydantic_models.py @@ -1,4 +1,4 @@ -""" This test suite does not intend to re-test pydantic but rather +"""This test suite does not intend to re-test pydantic but rather check some "corner cases" or critical setups with pydantic model such that: - we can ensure a given behaviour is preserved through updates @@ -6,13 +6,14 @@ """ -from typing import Any, Union, get_args, get_origin +from typing import Any, Literal, Union, get_args, get_origin import pytest from common_library.json_serialization import json_dumps from models_library.projects_nodes import InputTypes, OutputTypes from models_library.projects_nodes_io import SimCoreFileLink -from pydantic import BaseModel, Field, TypeAdapter, ValidationError +from models_library.utils.change_case import snake_to_camel +from pydantic import BaseModel, ConfigDict, Field, TypeAdapter, ValidationError from pydantic.types import Json from pydantic.version import version_short @@ -120,7 +121,7 @@ class Func(BaseModel): {"$ref": "#/$defs/DatCoreFileLink"}, {"$ref": "#/$defs/DownloadLink"}, {"type": "array", "items": {}}, - {"type": "object"}, + {"type": "object", "additionalProperties": True}, ], } @@ -154,7 +155,7 @@ class Func(BaseModel): MINIMAL = 2 # <--- index of the example with the minimum required fields assert SimCoreFileLink in get_args(OutputTypes) example = SimCoreFileLink.model_validate( - SimCoreFileLink.model_config["json_schema_extra"]["examples"][MINIMAL] + SimCoreFileLink.model_json_schema()["examples"][MINIMAL] ) model = Func.model_validate( { @@ -183,7 +184,9 @@ def test_nullable_fields_from_pydantic_v1(): # SEE https://github.com/ITISFoundation/osparc-simcore/pull/6751 class MyModel(BaseModel): # pydanticv1 would add a default to fields set as nullable - nullable_required: str | None # <--- This was default to =None in pydantic 1 !!! + nullable_required: ( + str | None + ) # <--- This was default to =None in pydantic 1 !!! nullable_required_with_hyphen: str | None = Field(default=...) nullable_optional: str | None = None @@ -209,3 +212,112 @@ class MyModel(BaseModel): data["nullable_required"] = None model = MyModel.model_validate(data) assert model.model_dump(exclude_unset=True) == data + + +# BELOW some tests related to deprecated `populate_by_name` in pydantic v2.11+ !! +# +# https://docs.pydantic.dev/latest/api/config/#pydantic.config.ConfigDict.populate_by_name +# +# `populate_by_name` usage is not recommended in v2.11+ and will be deprecated in v3. Instead, you should use the validate_by_name configuration setting. +# When validate_by_name=True and validate_by_alias=True, this is strictly equivalent to the previous behavior of populate_by_name=True. +# In v2.11, we also introduced a validate_by_alias setting that introduces more fine grained control for validation behavior. +# Here's how you might go about using the new settings to achieve the same behavior: +# + + +@pytest.mark.parametrize("extra", ["ignore", "allow", "forbid"]) +@pytest.mark.parametrize( + "validate_by_alias, validate_by_name", + [ + # NOTE: (False, False) is not allowed: at least one has to be True! + # SEE https://docs.pydantic.dev/latest/api/config/#pydantic.config.ConfigDict.validate_by_alias + (False, True), + (True, False), + (True, True), + ], +) +def test_model_config_validate_by_alias_and_name( + validate_by_alias: bool, + validate_by_name: bool, + extra: Literal["ignore", "allow", "forbid"], +): + class TestModel(BaseModel): + snake_case: str | None = None + + model_config = ConfigDict( + validate_by_alias=validate_by_alias, + validate_by_name=validate_by_name, + extra=extra, + alias_generator=snake_to_camel, + ) + + assert TestModel.model_config.get("populate_by_name") is None + assert TestModel.model_config.get("validate_by_alias") is validate_by_alias + assert TestModel.model_config.get("validate_by_name") is validate_by_name + assert TestModel.model_config.get("extra") == extra + + if validate_by_alias is False: + + if extra == "forbid": + with pytest.raises(ValidationError): + TestModel.model_validate({"snakeCase": "foo"}) + + elif extra == "ignore": + model = TestModel.model_validate({"snakeCase": "foo"}) + assert model.snake_case is None + assert model.model_dump() == {"snake_case": None} + + elif extra == "allow": + model = TestModel.model_validate({"snakeCase": "foo"}) + assert model.snake_case is None + assert model.model_dump() == {"snake_case": None, "snakeCase": "foo"} + + else: + assert TestModel.model_validate({"snakeCase": "foo"}).snake_case == "foo" + + if validate_by_name is False: + if extra == "forbid": + with pytest.raises(ValidationError): + TestModel.model_validate({"snake_case": "foo"}) + + elif extra == "ignore": + model = TestModel.model_validate({"snake_case": "foo"}) + assert model.snake_case is None + assert model.model_dump() == {"snake_case": None} + + elif extra == "allow": + model = TestModel.model_validate({"snake_case": "foo"}) + assert model.snake_case is None + assert model.model_dump() == {"snake_case": "foo"} + else: + assert TestModel.model_validate({"snake_case": "foo"}).snake_case == "foo" + + +@pytest.mark.parametrize("populate_by_name", [True, False]) +def test_model_config_populate_by_name(populate_by_name: bool): + # SEE https://docs.pydantic.dev/latest/api/config/#pydantic.config.ConfigDict.populate_by_name + class TestModel(BaseModel): + snake_case: str | None = None + + model_config = ConfigDict( + populate_by_name=populate_by_name, + extra="forbid", # easier to check the effect of populate_by_name! + alias_generator=snake_to_camel, + ) + + # checks how they are set + assert TestModel.model_config.get("populate_by_name") is populate_by_name + assert TestModel.model_config.get("extra") == "forbid" + + # NOTE how defaults work with populate_by_name!! + assert TestModel.model_config.get("validate_by_name") == populate_by_name + assert TestModel.model_config.get("validate_by_alias") is True # Default + + # validate_by_alias BEHAVIUOR defaults to True + TestModel.model_validate({"snakeCase": "foo"}) + + if populate_by_name: + assert TestModel.model_validate({"snake_case": "foo"}).snake_case == "foo" + else: + with pytest.raises(ValidationError): + TestModel.model_validate({"snake_case": "foo"}) diff --git a/packages/models-library/tests/test_api_schemas_long_running_tasks_tasks.py b/packages/models-library/tests/test_api_schemas_long_running_tasks_tasks.py new file mode 100644 index 000000000000..5acdf168ccc5 --- /dev/null +++ b/packages/models-library/tests/test_api_schemas_long_running_tasks_tasks.py @@ -0,0 +1,56 @@ +import pytest +from models_library.api_schemas_long_running_tasks.tasks import TaskGet +from pydantic import TypeAdapter + + +def _get_data_without_task_name(task_id: str) -> dict: + return { + "task_id": task_id, + "status_href": "", + "result_href": "", + "abort_href": "", + } + + +@pytest.mark.parametrize( + "data, expected_task_name", + [ + (_get_data_without_task_name("a.b.c.d"), "b"), + (_get_data_without_task_name("a.b.c"), "b"), + (_get_data_without_task_name("a.b"), "b"), + (_get_data_without_task_name("a"), "a"), + ], +) +def test_try_extract_task_name(data: dict, expected_task_name: str) -> None: + task_get = TaskGet(**data) + assert task_get.task_name == expected_task_name + + task_get = TypeAdapter(TaskGet).validate_python(data) + assert task_get.task_name == expected_task_name + + +def _get_data_with_task_name(task_id: str, task_name: str) -> dict: + return { + "task_id": task_id, + "task_name": task_name, + "status_href": "", + "result_href": "", + "abort_href": "", + } + + +@pytest.mark.parametrize( + "data, expected_task_name", + [ + (_get_data_with_task_name("a.b.c.d", "a_name"), "a_name"), + (_get_data_with_task_name("a.b.c", "a_name"), "a_name"), + (_get_data_with_task_name("a.b", "a_name"), "a_name"), + (_get_data_with_task_name("a", "a_name"), "a_name"), + ], +) +def test_task_name_is_provided(data: dict, expected_task_name: str) -> None: + task_get = TaskGet(**data) + assert task_get.task_name == expected_task_name + + task_get = TypeAdapter(TaskGet).validate_python(data) + assert task_get.task_name == expected_task_name diff --git a/packages/models-library/tests/test_api_schemas_webserver_users.py b/packages/models-library/tests/test_api_schemas_webserver_users.py index afefb91c4811..43375a67e208 100644 --- a/packages/models-library/tests/test_api_schemas_webserver_users.py +++ b/packages/models-library/tests/test_api_schemas_webserver_users.py @@ -8,44 +8,44 @@ import pytest from common_library.users_enums import UserRole from models_library.api_schemas_webserver.users import ( - MyProfileGet, - MyProfilePatch, + MyProfileRestGet, + MyProfileRestPatch, ) from pydantic import ValidationError @pytest.mark.parametrize("user_role", [u.name for u in UserRole]) def test_profile_get_role(user_role: str): - for example in MyProfileGet.model_json_schema()["examples"]: + for example in MyProfileRestGet.model_json_schema()["examples"]: data = deepcopy(example) data["role"] = user_role - m1 = MyProfileGet(**data) + m1 = MyProfileRestGet(**data) data["role"] = UserRole(user_role) - m2 = MyProfileGet(**data) + m2 = MyProfileRestGet(**data) assert m1 == m2 def test_my_profile_patch_username_min_len(): # minimum length username is 4 with pytest.raises(ValidationError) as err_info: - MyProfilePatch.model_validate({"userName": "abc"}) + MyProfileRestPatch.model_validate({"userName": "abc"}) assert err_info.value.error_count() == 1 assert err_info.value.errors()[0]["type"] == "too_short" - MyProfilePatch.model_validate({"userName": "abcd"}) # OK + MyProfileRestPatch.model_validate({"userName": "abcd"}) # OK def test_my_profile_patch_username_valid_characters(): # Ensure valid characters (alphanumeric + . _ -) with pytest.raises(ValidationError, match="start with a letter") as err_info: - MyProfilePatch.model_validate({"userName": "1234"}) + MyProfileRestPatch.model_validate({"userName": "1234"}) assert err_info.value.error_count() == 1 assert err_info.value.errors()[0]["type"] == "value_error" - MyProfilePatch.model_validate({"userName": "u1234"}) # OK + MyProfileRestPatch.model_validate({"userName": "u1234"}) # OK def test_my_profile_patch_username_special_characters(): @@ -53,29 +53,29 @@ def test_my_profile_patch_username_special_characters(): with pytest.raises( ValidationError, match="consecutive special characters" ) as err_info: - MyProfilePatch.model_validate({"userName": "u1__234"}) + MyProfileRestPatch.model_validate({"userName": "u1__234"}) assert err_info.value.error_count() == 1 assert err_info.value.errors()[0]["type"] == "value_error" - MyProfilePatch.model_validate({"userName": "u1_234"}) # OK + MyProfileRestPatch.model_validate({"userName": "u1_234"}) # OK # Ensure it doesn't end with a special character with pytest.raises(ValidationError, match="end with") as err_info: - MyProfilePatch.model_validate({"userName": "u1234_"}) + MyProfileRestPatch.model_validate({"userName": "u1234_"}) assert err_info.value.error_count() == 1 assert err_info.value.errors()[0]["type"] == "value_error" - MyProfilePatch.model_validate({"userName": "u1_234"}) # OK + MyProfileRestPatch.model_validate({"userName": "u1_234"}) # OK def test_my_profile_patch_username_reserved_words(): # Check reserved words (example list; extend as needed) with pytest.raises(ValidationError, match="cannot be used") as err_info: - MyProfilePatch.model_validate({"userName": "admin"}) + MyProfileRestPatch.model_validate({"userName": "admin"}) assert err_info.value.error_count() == 1 assert err_info.value.errors()[0]["type"] == "value_error" - MyProfilePatch.model_validate({"userName": "midas"}) # OK + MyProfileRestPatch.model_validate({"userName": "midas"}) # OK diff --git a/packages/models-library/tests/test_docker.py b/packages/models-library/tests/test_docker.py index ae1c636a9e22..3fc8f5ba3df4 100644 --- a/packages/models-library/tests/test_docker.py +++ b/packages/models-library/tests/test_docker.py @@ -9,10 +9,12 @@ import pytest from faker import Faker from models_library.docker import ( - _SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX, DockerGenericTag, DockerLabelKey, - StandardSimcoreDockerLabels, +) +from models_library.services_metadata_runtime import ( + _SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX, + SimcoreContainerLabels, ) from pydantic import ByteSize, TypeAdapter, ValidationError @@ -104,29 +106,27 @@ def test_docker_generic_tag(image_name: str, valid: bool): @pytest.mark.parametrize( "obj_data", - StandardSimcoreDockerLabels.model_config["json_schema_extra"]["examples"], + SimcoreContainerLabels.model_config["json_schema_extra"]["examples"], ids=str, ) def test_simcore_service_docker_label_keys(obj_data: dict[str, Any]): - simcore_service_docker_label_keys = StandardSimcoreDockerLabels.model_validate( - obj_data - ) + simcore_service_docker_label_keys = SimcoreContainerLabels.model_validate(obj_data) exported_dict = simcore_service_docker_label_keys.to_simcore_runtime_docker_labels() - assert all( - isinstance(v, str) for v in exported_dict.values() - ), "docker labels must be strings!" + assert all(isinstance(v, str) for v in exported_dict.values()), ( + "docker labels must be strings!" + ) assert all( key.startswith(_SIMCORE_RUNTIME_DOCKER_LABEL_PREFIX) for key in exported_dict ) - re_imported_docker_label_keys = TypeAdapter( - StandardSimcoreDockerLabels - ).validate_python(exported_dict) + re_imported_docker_label_keys = TypeAdapter(SimcoreContainerLabels).validate_python( + exported_dict + ) assert re_imported_docker_label_keys assert simcore_service_docker_label_keys == re_imported_docker_label_keys def test_simcore_service_docker_label_keys_construction(): - simcore_service_docker_label_keys = StandardSimcoreDockerLabels( + simcore_service_docker_label_keys = SimcoreContainerLabels( user_id=8268, project_id=UUID("5ea24ce0-0e4d-4ee6-a3f1-e4799752a684"), node_id=UUID("c17c6279-23c6-412f-8826-867323a7711a"), diff --git a/packages/models-library/tests/test_project_nodes.py b/packages/models-library/tests/test_project_nodes.py index 54a5d14bf241..c25703b6f953 100644 --- a/packages/models-library/tests/test_project_nodes.py +++ b/packages/models-library/tests/test_project_nodes.py @@ -29,6 +29,7 @@ def test_create_minimal_node(minimal_node_data_sample: dict[str, Any]): assert node.state.current_status == RunningState.NOT_STARTED assert node.state.modified is True assert node.state.dependencies == set() + assert node.state.lock_state is None assert node.parent is None assert node.progress is None @@ -37,7 +38,7 @@ def test_create_minimal_node(minimal_node_data_sample: dict[str, Any]): def test_create_minimal_node_with_new_data_type( - minimal_node_data_sample: dict[str, Any] + minimal_node_data_sample: dict[str, Any], ): old_node_data = minimal_node_data_sample # found some old data with this aspect diff --git a/packages/models-library/tests/test_projects_state.py b/packages/models-library/tests/test_projects_state.py index 08493f9f3b13..9405a4d36cb7 100644 --- a/packages/models-library/tests/test_projects_state.py +++ b/packages/models-library/tests/test_projects_state.py @@ -1,9 +1,13 @@ import pytest -from models_library.projects_state import ProjectLocked, ProjectStatus +from models_library.projects_state import ( + ProjectLocked, + ProjectShareState, + ProjectStatus, +) def test_project_locked_with_missing_owner_raises(): - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=r"1 validation error for ProjectLocked"): ProjectLocked(value=True, status=ProjectStatus.OPENED) ProjectLocked.model_validate({"value": False, "status": ProjectStatus.OPENED}) @@ -22,5 +26,38 @@ def test_project_locked_with_missing_owner_ok_during_maintaining(): + [(True, ProjectStatus.CLOSED)], ) def test_project_locked_with_allowed_values(lock: bool, status: ProjectStatus): - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=r"1 validation error for ProjectLocked"): ProjectLocked.model_validate({"value": lock, "status": status}) + + +@pytest.mark.parametrize( + "status,locked,current_users,should_raise", + [ + (ProjectStatus.CLOSED, False, [], False), + (ProjectStatus.OPENING, False, [1, 2], False), + (ProjectStatus.OPENED, False, [1], False), + (ProjectStatus.CLONING, True, [1], False), + (ProjectStatus.EXPORTING, True, [1], False), + (ProjectStatus.MAINTAINING, True, [1], False), + # Invalid: locked but no users + (ProjectStatus.CLONING, True, [], True), + # Invalid: closed but has users + (ProjectStatus.CLOSED, False, [1], True), + # Invalid: not closed but no users + (ProjectStatus.OPENED, False, [], True), + ], +) +def test_project_share_state_validations(status, locked, current_users, should_raise): + data = { + "status": status, + "locked": locked, + "current_user_groupids": current_users, + } + if should_raise: + with pytest.raises(ValueError, match=r"If the project is "): + ProjectShareState.model_validate(data) + else: + state = ProjectShareState.model_validate(data) + assert state.status == status + assert state.locked == locked + assert state.current_user_groupids == current_users diff --git a/packages/models-library/tests/test_service_settings_labels.py b/packages/models-library/tests/test_service_settings_labels.py index c056902d8e92..1d10aa4741b4 100644 --- a/packages/models-library/tests/test_service_settings_labels.py +++ b/packages/models-library/tests/test_service_settings_labels.py @@ -52,12 +52,12 @@ class _Parametrization(NamedTuple): ), "dynamic-service": _Parametrization( example=SimcoreServiceLabels.model_json_schema()["examples"][1], - items=5, + items=6, uses_dynamic_sidecar=True, ), "dynamic-service-with-compose-spec": _Parametrization( example=SimcoreServiceLabels.model_json_schema()["examples"][2], - items=6, + items=7, uses_dynamic_sidecar=True, ), } diff --git a/packages/models-library/tests/test_service_settings_nat_rule.py b/packages/models-library/tests/test_service_settings_nat_rule.py index c6f9f05497cb..c931985a27a8 100644 --- a/packages/models-library/tests/test_service_settings_nat_rule.py +++ b/packages/models-library/tests/test_service_settings_nat_rule.py @@ -9,6 +9,7 @@ replace_osparc_variable_identifier, ) from models_library.service_settings_nat_rule import NATRule +from models_library.utils.types import get_types_from_annotated_union from pydantic import TypeAdapter SUPPORTED_TEMPLATES: set[str] = { @@ -111,13 +112,13 @@ def test_______(replace_with_value: Any): a_var = TypeAdapter(OsparcVariableIdentifier).validate_python( "$OSPARC_VARIABLE_some_var" ) - assert isinstance(a_var, OsparcVariableIdentifier) + assert isinstance(a_var, get_types_from_annotated_union(OsparcVariableIdentifier)) replaced_var = replace_osparc_variable_identifier( a_var, {"OSPARC_VARIABLE_some_var": replace_with_value} ) # NOTE: after replacement the original reference still points - assert isinstance(a_var, OsparcVariableIdentifier) + assert isinstance(a_var, get_types_from_annotated_union(OsparcVariableIdentifier)) assert replaced_var == replace_with_value @@ -154,7 +155,7 @@ def test_replace_an_instance_of_osparc_variable_identifier( formatted_template = var_template a_var = TypeAdapter(OsparcVariableIdentifier).validate_python(formatted_template) - assert isinstance(a_var, OsparcVariableIdentifier) + assert isinstance(a_var, get_types_from_annotated_union(OsparcVariableIdentifier)) replace_with_identifier_default = identifier_has_default and replace_with_default replacement_content = ( @@ -162,7 +163,7 @@ def test_replace_an_instance_of_osparc_variable_identifier( ) replaced_var = replace_osparc_variable_identifier(a_var, replacement_content) # NOTE: after replacement the original reference still points - assert isinstance(a_var, OsparcVariableIdentifier) + assert isinstance(a_var, get_types_from_annotated_union(OsparcVariableIdentifier)) if replace_with_identifier_default: assert replaced_var == default_value else: diff --git a/packages/models-library/tests/test_services.py b/packages/models-library/tests/test_services.py index c7b7562eaa63..fca51e57e334 100644 --- a/packages/models-library/tests/test_services.py +++ b/packages/models-library/tests/test_services.py @@ -29,7 +29,7 @@ def minimal_service_common_data() -> dict[str, Any]: def test_create_minimal_service_common_data( - minimal_service_common_data: dict[str, Any] + minimal_service_common_data: dict[str, Any], ): service = ServiceBaseDisplay(**minimal_service_common_data) @@ -67,7 +67,7 @@ def test_node_with_thumbnail(minimal_service_common_data: dict[str, Any]): ) -@pytest.mark.parametrize("pattern", (SERVICE_KEY_RE, SERVICE_ENCODED_KEY_RE)) +@pytest.mark.parametrize("pattern", [SERVICE_KEY_RE, SERVICE_ENCODED_KEY_RE]) @pytest.mark.parametrize( "service_key", [ @@ -182,9 +182,9 @@ def _find_pattern_entry(obj: dict[str, Any], key: str) -> Any: def test_boot_option_wrong_default() -> None: - for example in [deepcopy(x) for x in BootOption.model_config["json_schema_extra"]["examples"]]: - with pytest.raises(ValueError): - example["default"] = "__undefined__" + for example in [deepcopy(x) for x in BootOption.model_json_schema()["examples"]]: + example["default"] = "__undefined__" + with pytest.raises(ValueError): # noqa: PT011 assert BootOption(**example) @@ -202,7 +202,8 @@ def test_service_docker_data_labels_convesion(): convension_breaking_fields: set[tuple[str, str]] = set() fields_with_aliases: list[tuple[str, str]] = [ - (name, info.alias) for name, info in ServiceMetaDataPublished.model_fields.items() + (name, info.alias) + for name, info in ServiceMetaDataPublished.model_fields.items() if info.alias is not None ] diff --git a/packages/models-library/tests/test_services_types.py b/packages/models-library/tests/test_services_types.py index 206c531a78fd..e3f0c9d472b2 100644 --- a/packages/models-library/tests/test_services_types.py +++ b/packages/models-library/tests/test_services_types.py @@ -1,9 +1,13 @@ import pytest from models_library.projects import ProjectID from models_library.projects_nodes import NodeID -from models_library.services_types import ServiceRunID +from models_library.services_types import ServiceKey, ServiceRunID, ServiceVersion from models_library.users import UserID -from pydantic import PositiveInt +from pydantic import PositiveInt, TypeAdapter +from pytest_simcore.helpers.faker_factories import ( + random_service_key, + random_service_version, +) @pytest.mark.parametrize( @@ -38,3 +42,14 @@ def test_get_resource_tracking_run_id_for_dynamic(): assert isinstance( ServiceRunID.get_resource_tracking_run_id_for_dynamic(), ServiceRunID ) + + +@pytest.mark.parametrize( + "service_key, service_version", + [(random_service_key(), random_service_version()) for _ in range(10)], +) +def test_faker_factory_service_key_and_version_are_in_sync( + service_key: ServiceKey, service_version: ServiceVersion +): + TypeAdapter(ServiceKey).validate_python(service_key) + TypeAdapter(ServiceVersion).validate_python(service_version) diff --git a/packages/models-library/tests/test_users.py b/packages/models-library/tests/test_users.py index 4c9d2756934e..d1e5dbe4efdc 100644 --- a/packages/models-library/tests/test_users.py +++ b/packages/models-library/tests/test_users.py @@ -1,11 +1,18 @@ -from models_library.api_schemas_webserver.users import MyProfileGet +import pytest +from models_library.api_schemas_webserver.users import ( + MyProfileRestGet, +) from models_library.api_schemas_webserver.users_preferences import Preference from models_library.groups import AccessRightsDict, Group, GroupsByTypeTuple from models_library.users import MyProfile from pydantic import TypeAdapter -def test_adapter_from_model_to_schema(): +@pytest.mark.parametrize("with_support_group", [True, False]) +@pytest.mark.parametrize("with_standard_groups", [True, False]) +def test_adapter_from_model_to_schema( + with_support_group: bool, with_standard_groups: bool +): my_profile = MyProfile.model_validate(MyProfile.model_json_schema()["example"]) groups = TypeAdapter(list[Group]).validate_python( @@ -15,13 +22,22 @@ def test_adapter_from_model_to_schema(): ar = AccessRightsDict(read=False, write=False, delete=False) my_groups_by_type = GroupsByTypeTuple( - primary=(groups[1], ar), standard=[(groups[2], ar)], everyone=(groups[0], ar) + primary=(groups[1], ar), + standard=[(groups[2], ar)] if with_standard_groups else [], + everyone=(groups[0], ar), ) - my_product_group = groups[-1], AccessRightsDict( + my_product_group = groups[3], AccessRightsDict( read=False, write=False, delete=False ) + + my_support_group = groups[4] + my_preferences = {"foo": Preference(default_value=3, value=1)} - MyProfileGet.from_domain_model( - my_profile, my_groups_by_type, my_product_group, my_preferences + MyProfileRestGet.from_domain_model( + my_profile, + my_groups_by_type, + my_product_group, + my_preferences, + my_support_group if with_support_group else None, ) diff --git a/packages/notifications-library/requirements/_base.txt b/packages/notifications-library/requirements/_base.txt index 59793cd02df6..8e19a1ebe991 100644 --- a/packages/notifications-library/requirements/_base.txt +++ b/packages/notifications-library/requirements/_base.txt @@ -14,12 +14,8 @@ attrs==25.1.0 # via # jsonschema # referencing -click==8.1.8 +click==8.2.1 # via typer -deprecated==1.2.18 - # via - # opentelemetry-api - # opentelemetry-semantic-conventions dnspython==2.7.0 # via email-validator email-validator==2.2.0 @@ -30,9 +26,7 @@ idna==3.10 # via # email-validator # yarl -importlib-metadata==8.5.0 - # via opentelemetry-api -jinja2==3.1.5 +jinja2==3.1.6 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -43,11 +37,13 @@ jinja2==3.1.5 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in +jsonref==1.1.0 + # via -r requirements/../../../packages/models-library/requirements/_base.in jsonschema==4.23.0 # via -r requirements/../../../packages/models-library/requirements/_base.in jsonschema-specifications==2024.10.1 # via jsonschema -mako==1.3.9 +mako==1.3.10 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -68,19 +64,6 @@ mdurl==0.1.2 # via markdown-it-py multidict==6.1.0 # via yarl -opentelemetry-api==1.30.0 - # via - # opentelemetry-instrumentation - # opentelemetry-instrumentation-asyncpg - # opentelemetry-semantic-conventions -opentelemetry-instrumentation==0.51b0 - # via opentelemetry-instrumentation-asyncpg -opentelemetry-instrumentation-asyncpg==0.51b0 - # via -r requirements/../../../packages/postgres-database/requirements/_base.in -opentelemetry-semantic-conventions==0.51b0 - # via - # opentelemetry-instrumentation - # opentelemetry-instrumentation-asyncpg orjson==3.10.15 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -96,13 +79,11 @@ orjson==3.10.15 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in -packaging==24.2 - # via opentelemetry-instrumentation propcache==0.3.0 # via yarl psycopg2-binary==2.9.10 # via sqlalchemy -pydantic==2.10.6 +pydantic==2.11.7 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -121,9 +102,9 @@ pydantic==2.10.6 # -r requirements/../../../packages/settings-library/requirements/_base.in # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.2 +pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.10.2 +pydantic-extra-types==2.10.5 # via # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -160,7 +141,7 @@ referencing==0.35.1 # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications -rich==13.9.4 +rich==14.1.0 # via # -r requirements/../../../packages/settings-library/requirements/_base.in # typer @@ -184,22 +165,19 @@ sqlalchemy==1.4.54 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/postgres-database/requirements/_base.in # alembic -typer==0.15.2 +typer==0.16.1 # via -r requirements/../../../packages/settings-library/requirements/_base.in types-python-dateutil==2.9.0.20241206 # via arrow -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # alembic # pydantic # pydantic-core # pydantic-extra-types # typer -wrapt==1.17.2 - # via - # deprecated - # opentelemetry-instrumentation + # typing-inspection +typing-inspection==0.4.1 + # via pydantic yarl==1.18.3 # via -r requirements/../../../packages/postgres-database/requirements/_base.in -zipp==3.21.0 - # via importlib-metadata diff --git a/packages/notifications-library/requirements/_test.txt b/packages/notifications-library/requirements/_test.txt index b378ea239846..5c89b9907e09 100644 --- a/packages/notifications-library/requirements/_test.txt +++ b/packages/notifications-library/requirements/_test.txt @@ -2,7 +2,7 @@ aiodocker==0.24.0 # via -r requirements/_test.in aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../requirements/constraints.txt # aiodocker @@ -48,17 +48,20 @@ multidict==6.1.0 # -c requirements/_base.txt # aiohttp # yarl -mypy==1.15.0 +mypy==1.16.1 # via sqlalchemy -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via mypy packaging==24.2 # via - # -c requirements/_base.txt # pytest # pytest-sugar +pathspec==0.12.1 + # via mypy pluggy==1.5.0 - # via pytest + # via + # pytest + # pytest-cov pprintpp==0.4.0 # via pytest-icdiff propcache==0.3.0 @@ -66,7 +69,11 @@ propcache==0.3.0 # -c requirements/_base.txt # aiohttp # yarl -pytest==8.3.5 +pygments==2.19.1 + # via + # -c requirements/_base.txt + # pytest +pytest==8.4.1 # via # -r requirements/_test.in # pytest-asyncio @@ -75,15 +82,15 @@ pytest==8.3.5 # pytest-instafail # pytest-mock # pytest-sugar -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via -r requirements/_test.in -pytest-cov==6.0.0 +pytest-cov==6.2.1 # via -r requirements/_test.in pytest-icdiff==0.9 # via -r requirements/_test.in pytest-instafail==0.5.0 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in @@ -97,7 +104,7 @@ pyyaml==6.0.2 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_test.in -requests==2.32.3 +requests==2.32.4 # via docker sqlalchemy==1.4.54 # via @@ -112,14 +119,14 @@ termcolor==2.5.0 # via pytest-sugar types-aiofiles==24.1.0.20241221 # via -r requirements/_test.in -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # mypy # sqlalchemy2-stubs tzdata==2025.1 # via faker -urllib3==2.3.0 +urllib3==2.5.0 # via # -c requirements/../../../requirements/constraints.txt # docker diff --git a/packages/notifications-library/requirements/_tools.txt b/packages/notifications-library/requirements/_tools.txt index b5ed94588ea8..aff37438cc0f 100644 --- a/packages/notifications-library/requirements/_tools.txt +++ b/packages/notifications-library/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # black @@ -27,11 +27,11 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via # -c requirements/_test.txt # black @@ -40,12 +40,14 @@ nodeenv==1.9.1 # via pre-commit packaging==24.2 # via - # -c requirements/_base.txt # -c requirements/_test.txt # black # build pathspec==0.12.1 - # via black + # via + # -c requirements/_test.txt + # black + # mypy pip==25.0.1 # via pip-tools pip-tools==7.4.1 @@ -70,11 +72,11 @@ pyyaml==6.0.2 # pre-commit ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.2 +setuptools==80.9.0 # via pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # -c requirements/_test.txt diff --git a/packages/notifications-library/src/notifications_library/_models.py b/packages/notifications-library/src/notifications_library/_models.py index c7087cf7f7eb..1b60d80bcd3c 100644 --- a/packages/notifications-library/src/notifications_library/_models.py +++ b/packages/notifications-library/src/notifications_library/_models.py @@ -30,7 +30,6 @@ class SharerData: @dataclass(frozen=True) class ProductUIData: - project_alias: str logo_url: str | None = ( None # default_logo = "https://raw.githubusercontent.com/ITISFoundation/osparc-simcore/refs/heads/master/services/static-webserver/client/source/resource/osparc/osparc-white.svg" in base.html ) diff --git a/packages/notifications-library/src/notifications_library/templates/on_account_requested.email.content.html b/packages/notifications-library/src/notifications_library/templates/on_account_requested.email.content.html index ae26bed2bf47..5a317574c958 100644 --- a/packages/notifications-library/src/notifications_library/templates/on_account_requested.email.content.html +++ b/packages/notifications-library/src/notifications_library/templates/on_account_requested.email.content.html @@ -4,7 +4,11 @@

Dear Support team

- We have received the following request form for an account in {{ product.display_name }} from {{ host }} + We have received the following request form for an account in : +

    +
  1. Product: {{ product.display_name }}
  2. +
  3. Host: {{ host }}
  4. +

diff --git a/packages/notifications-library/src/notifications_library/templates/on_account_requested.email.content.txt b/packages/notifications-library/src/notifications_library/templates/on_account_requested.email.content.txt
index 0eb9d7d4a641..67b1801b9123 100644
--- a/packages/notifications-library/src/notifications_library/templates/on_account_requested.email.content.txt
+++ b/packages/notifications-library/src/notifications_library/templates/on_account_requested.email.content.txt
@@ -1,6 +1,8 @@
 Dear Support team,
 
-We have received the following request form for an account in {{ product.display_name }} from **{{ host }}**:
+We have received the following request form for an account in :
+- Product: **{{ product.display_name }}**
+- Host: **{{ host }}**
 
 {{ dumps(request_form) }}
 
diff --git a/packages/notifications-library/src/notifications_library/templates/on_share_project.email.content.html b/packages/notifications-library/src/notifications_library/templates/on_share_project.email.content.html
index 2bfd9404271e..5e9121137d98 100644
--- a/packages/notifications-library/src/notifications_library/templates/on_share_project.email.content.html
+++ b/packages/notifications-library/src/notifications_library/templates/on_share_project.email.content.html
@@ -3,9 +3,9 @@
 {% block content %}
 

Dear {{ user.first_name or user.user_name }},

-

Great news! {{ sharer.user_name }} has shared a {{ product.ui.project_alias }} with you on {{ product.display_name }}.

+

Great news! {{ sharer.user_name }} has shared a project with you on {{ product.display_name }}.

-

To view the {{ product.ui.project_alias }} and accept the sharing, follow below:

+

To view the project and accept the sharing, follow below:

{% if sharer.message %} diff --git a/packages/notifications-library/src/notifications_library/templates/on_share_project.email.content.txt b/packages/notifications-library/src/notifications_library/templates/on_share_project.email.content.txt index 2fae91408f5c..f1ad3335cdef 100644 --- a/packages/notifications-library/src/notifications_library/templates/on_share_project.email.content.txt +++ b/packages/notifications-library/src/notifications_library/templates/on_share_project.email.content.txt @@ -1,8 +1,8 @@ Dear {{ user.first_name or user.user_name }}, -Great news! {{ sharer.user_name }} has shared a {{ product.ui.project_alias }} with you on {{ product.display_name }}. +Great news! {{ sharer.user_name }} has shared a project with you on {{ product.display_name }}. -To view the {{ product.ui.project_alias }} and accept the sharing, follow below: +To view the project and accept the sharing, follow below: {{ sharer.message }} {{ accept_link }} diff --git a/packages/notifications-library/src/notifications_library/templates/on_share_project.email.subject.txt b/packages/notifications-library/src/notifications_library/templates/on_share_project.email.subject.txt index 0a7f2157a39a..59d89b0a8ede 100644 --- a/packages/notifications-library/src/notifications_library/templates/on_share_project.email.subject.txt +++ b/packages/notifications-library/src/notifications_library/templates/on_share_project.email.subject.txt @@ -1 +1 @@ -A {{ product.ui.project_alias }} was shared with you on {{ host }} +A project was shared with you on {{ host }} diff --git a/packages/notifications-library/tests/conftest.py b/packages/notifications-library/tests/conftest.py index 006b7ed1a7b3..c2575aab6d07 100644 --- a/packages/notifications-library/tests/conftest.py +++ b/packages/notifications-library/tests/conftest.py @@ -66,13 +66,12 @@ def product_data( product_ui = ProductUIData( logo_url=vendor_ui.get("logo_url"), strong_color=vendor_ui.get("strong_color"), - project_alias=vendor_ui["project_alias"], ) return ProductData( # type: ignore product_name=product_name, display_name=product["display_name"], - vendor_display_inline=f"{vendor.get('name','')}, {vendor.get('address','')}", + vendor_display_inline=f"{vendor.get('name', '')}, {vendor.get('address', '')}", support_email=product["support_email"], homepage_url=vendor.get("url"), ui=product_ui, diff --git a/packages/notifications-library/tests/with_db/conftest.py b/packages/notifications-library/tests/with_db/conftest.py index 9dda5da676d3..0ddf0d9f464e 100644 --- a/packages/notifications-library/tests/with_db/conftest.py +++ b/packages/notifications-library/tests/with_db/conftest.py @@ -16,11 +16,14 @@ from models_library.users import UserID from notifications_library._templates import get_default_named_templates from pydantic import validate_call +from pytest_simcore.helpers.postgres_tools import insert_and_get_row_lifespan +from pytest_simcore.helpers.postgres_users import ( + insert_and_get_user_and_secrets_lifespan, +) from simcore_postgres_database.models.jinja2_templates import jinja2_templates from simcore_postgres_database.models.payments_transactions import payments_transactions from simcore_postgres_database.models.products import products from simcore_postgres_database.models.products_to_templates import products_to_templates -from simcore_postgres_database.models.users import users from sqlalchemy.engine.row import Row from sqlalchemy.ext.asyncio.engine import AsyncEngine @@ -50,16 +53,11 @@ async def user( and injects a user in db """ assert user_id == user["id"] - pk_args = users.c.id, user["id"] - - # NOTE: creation of primary group and setting `groupid`` is automatically triggered after creation of user by postgres - async with sqlalchemy_async_engine.begin() as conn: - row: Row = await _insert_and_get_row(conn, users, user, *pk_args) - - yield row._asdict() - - async with sqlalchemy_async_engine.begin() as conn: - await _delete_row(conn, users, *pk_args) + async with insert_and_get_user_and_secrets_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup + sqlalchemy_async_engine, + **user, + ) as row: + yield row @pytest.fixture @@ -82,15 +80,14 @@ async def product( # NOTE: osparc product is already in db. This is another product assert product["name"] != "osparc" - pk_args = products.c.name, product["name"] - - async with sqlalchemy_async_engine.begin() as conn: - row: Row = await _insert_and_get_row(conn, products, product, *pk_args) - - yield row._asdict() - - async with sqlalchemy_async_engine.begin() as conn: - await _delete_row(conn, products, *pk_args) + async with insert_and_get_row_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup + sqlalchemy_async_engine, + table=products, + values=product, + pk_col=products.c.name, + pk_value=product["name"], + ) as row: + yield row @pytest.fixture diff --git a/packages/postgres-database/docker/Dockerfile b/packages/postgres-database/docker/Dockerfile index cc5be3400397..09cb7f30c7bc 100644 --- a/packages/postgres-database/docker/Dockerfile +++ b/packages/postgres-database/docker/Dockerfile @@ -1,6 +1,6 @@ # syntax=docker/dockerfile:1 ARG PYTHON_VERSION="3.11.9" -ARG UV_VERSION="0.6" +ARG UV_VERSION="0.7" FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build # we docker image is built based on debian FROM python:${PYTHON_VERSION}-slim-bookworm AS base @@ -32,10 +32,9 @@ COPY --from=uv_build /uv /uvx /bin/ # NOTE: python virtualenv is used here such that installed packages may be moved to production image easily by copying the venv RUN uv venv "${VIRTUAL_ENV}" -RUN --mount=type=cache,target=/root/.cache/uv \ - uv pip install --upgrade \ - wheel \ - setuptools +# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode +ENV UV_COMPILE_BYTECODE=1 \ + UV_LINK_MODE=copy ARG GIT_BRANCH ARG GIT_REPOSITORY @@ -46,8 +45,6 @@ RUN git clone --single-branch --branch ${GIT_BRANCH} ${GIT_REPOSITORY} osparc-si FROM base AS production ENV PYTHONOPTIMIZE=TRUE -# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode -ENV UV_COMPILE_BYTECODE=1 WORKDIR /home/scu # ensure home folder is read/writable for user scu @@ -58,11 +55,5 @@ COPY entrypoint.bash /home/entrypoint.bash RUN chmod +x /home/entrypoint.bash -ENV POSTGRES_USER=scu \ - POSTGRES_PASSWORD=adminadmin \ - POSTGRES_HOST=postgres \ - POSTGRES_PORT=5432 \ - POSTGRES_DB=simcoredb - ENTRYPOINT [ "/bin/bash", "/home/entrypoint.bash" ] CMD [ "sc-pg", "upgrade" ] diff --git a/packages/postgres-database/requirements/_base.in b/packages/postgres-database/requirements/_base.in index c5aa128b710f..0294edf9114f 100644 --- a/packages/postgres-database/requirements/_base.in +++ b/packages/postgres-database/requirements/_base.in @@ -6,7 +6,6 @@ --requirement ../../../packages/common-library/requirements/_base.in alembic -opentelemetry-instrumentation-asyncpg pydantic sqlalchemy[postgresql_psycopg2binary,postgresql_asyncpg] # SEE extras in https://github.com/sqlalchemy/sqlalchemy/blob/main/setup.cfg#L43 yarl diff --git a/packages/postgres-database/requirements/_base.txt b/packages/postgres-database/requirements/_base.txt index b16bdd318cfa..ad96d677f661 100644 --- a/packages/postgres-database/requirements/_base.txt +++ b/packages/postgres-database/requirements/_base.txt @@ -4,17 +4,11 @@ annotated-types==0.7.0 # via pydantic asyncpg==0.30.0 # via sqlalchemy -deprecated==1.2.18 - # via - # opentelemetry-api - # opentelemetry-semantic-conventions greenlet==3.1.1 # via sqlalchemy idna==3.10 # via yarl -importlib-metadata==8.5.0 - # via opentelemetry-api -mako==1.3.9 +mako==1.3.10 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -23,40 +17,25 @@ markupsafe==3.0.2 # via mako multidict==6.1.0 # via yarl -opentelemetry-api==1.30.0 - # via - # opentelemetry-instrumentation - # opentelemetry-instrumentation-asyncpg - # opentelemetry-semantic-conventions -opentelemetry-instrumentation==0.51b0 - # via opentelemetry-instrumentation-asyncpg -opentelemetry-instrumentation-asyncpg==0.51b0 - # via -r requirements/_base.in -opentelemetry-semantic-conventions==0.51b0 - # via - # opentelemetry-instrumentation - # opentelemetry-instrumentation-asyncpg orjson==3.10.15 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/common-library/requirements/_base.in -packaging==24.2 - # via opentelemetry-instrumentation propcache==0.3.0 # via yarl psycopg2-binary==2.9.10 # via sqlalchemy -pydantic==2.10.6 +pydantic==2.11.7 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/_base.in # pydantic-extra-types -pydantic-core==2.27.2 +pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.10.2 +pydantic-extra-types==2.10.5 # via -r requirements/../../../packages/common-library/requirements/_base.in sqlalchemy==1.4.54 # via @@ -64,17 +43,14 @@ sqlalchemy==1.4.54 # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in # alembic -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # alembic # pydantic # pydantic-core # pydantic-extra-types -wrapt==1.17.2 - # via - # deprecated - # opentelemetry-instrumentation + # typing-inspection +typing-inspection==0.4.1 + # via pydantic yarl==1.18.3 # via -r requirements/_base.in -zipp==3.21.0 - # via importlib-metadata diff --git a/packages/postgres-database/requirements/_migration.txt b/packages/postgres-database/requirements/_migration.txt index a9f890849e77..c472ad904b09 100644 --- a/packages/postgres-database/requirements/_migration.txt +++ b/packages/postgres-database/requirements/_migration.txt @@ -8,7 +8,7 @@ certifi==2025.1.31 # requests charset-normalizer==3.4.1 # via requests -click==8.1.8 +click==8.2.1 # via -r requirements/_migration.in docker==7.1.0 # via -r requirements/_migration.in @@ -20,7 +20,7 @@ idna==3.10 # via # -c requirements/_base.txt # requests -mako==1.3.9 +mako==1.3.10 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -29,7 +29,7 @@ markupsafe==3.0.2 # via # -c requirements/_base.txt # mako -requests==2.32.3 +requests==2.32.4 # via docker sqlalchemy==1.4.54 # via @@ -38,11 +38,11 @@ sqlalchemy==1.4.54 # alembic tenacity==9.0.0 # via -r requirements/_migration.in -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # alembic -urllib3==2.3.0 +urllib3==2.5.0 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_migration.in diff --git a/packages/postgres-database/requirements/_test.txt b/packages/postgres-database/requirements/_test.txt index da15f704e3e4..23620af98a03 100644 --- a/packages/postgres-database/requirements/_test.txt +++ b/packages/postgres-database/requirements/_test.txt @@ -19,33 +19,37 @@ greenlet==3.1.1 # sqlalchemy iniconfig==2.0.0 # via pytest -mypy==1.15.0 +mypy==1.16.1 # via sqlalchemy -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via mypy packaging==24.2 + # via pytest +pathspec==0.12.1 + # via mypy +pluggy==1.5.0 # via - # -c requirements/_base.txt # pytest -pluggy==1.5.0 - # via pytest + # pytest-cov psycopg2-binary==2.9.10 # via # -c requirements/_base.txt # aiopg # sqlalchemy -pytest==8.3.5 +pygments==2.19.2 + # via pytest +pytest==8.4.1 # via # -r requirements/_test.in # pytest-asyncio # pytest-cov # pytest-docker # pytest-instafail -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via -r requirements/_test.in -pytest-cov==6.0.0 +pytest-cov==6.2.1 # via -r requirements/_test.in -pytest-docker==3.2.0 +pytest-docker==3.2.3 # via -r requirements/_test.in pytest-instafail==0.5.0 # via -r requirements/_test.in @@ -76,7 +80,7 @@ types-python-dateutil==2.9.0.20241206 # via arrow types-requests==2.32.0.20250301 # via types-docker -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # -c requirements/_migration.txt @@ -84,7 +88,7 @@ typing-extensions==4.12.2 # sqlalchemy2-stubs tzdata==2025.1 # via faker -urllib3==2.3.0 +urllib3==2.5.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_migration.txt diff --git a/packages/postgres-database/requirements/_tools.txt b/packages/postgres-database/requirements/_tools.txt index f896126c0b04..3f4b1c1e0fe7 100644 --- a/packages/postgres-database/requirements/_tools.txt +++ b/packages/postgres-database/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.8 +click==8.2.1 # via # black # pip-tools @@ -26,11 +26,11 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via # -c requirements/_test.txt # black @@ -39,12 +39,14 @@ nodeenv==1.9.1 # via pre-commit packaging==24.2 # via - # -c requirements/_base.txt # -c requirements/_test.txt # black # build pathspec==0.12.1 - # via black + # via + # -c requirements/_test.txt + # black + # mypy pip==25.0.1 # via pip-tools pip-tools==7.4.1 @@ -69,11 +71,11 @@ pyyaml==6.0.2 # pre-commit ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.2 +setuptools==80.9.0 # via pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # -c requirements/_test.txt diff --git a/packages/postgres-database/scripts/erd/Dockerfile b/packages/postgres-database/scripts/erd/Dockerfile index 1adbe09416aa..2e427c9373b1 100644 --- a/packages/postgres-database/scripts/erd/Dockerfile +++ b/packages/postgres-database/scripts/erd/Dockerfile @@ -2,7 +2,7 @@ # Define arguments in the global scope ARG PYTHON_VERSION="3.11.9" -ARG UV_VERSION="0.6" +ARG UV_VERSION="0.7" FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build # we docker image is built based on debian FROM python:${PYTHON_VERSION}-slim-bookworm AS base @@ -20,11 +20,6 @@ RUN apt-get update \ && apt-get clean -RUN --mount=type=cache,target=/root/.cache/uv \ - uv pip install --upgrade \ - wheel \ - setuptools - # devenv COPY requirements.txt requirements.txt diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/06eafd25d004_add_state_type_unknown.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/06eafd25d004_add_state_type_unknown.py new file mode 100644 index 000000000000..449529aff44f --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/06eafd25d004_add_state_type_unknown.py @@ -0,0 +1,53 @@ +"""add state type unknown + +Revision ID: 06eafd25d004 +Revises: ec4f62595e0c +Create Date: 2025-09-01 12:25:25.617790+00:00 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "06eafd25d004" +down_revision = "ec4f62595e0c" +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute("ALTER TYPE statetype ADD VALUE 'UNKNOWN'") + + +def downgrade() -> None: + # NOTE: PostgreSQL doesn't support removing enum values directly + # This downgrades only ensure that StateType.UNKNOWN is not used + # + + # Find all tables and columns that use statetype enum + result = op.get_bind().execute( + sa.DDL( + """ + SELECT t.table_name, c.column_name, c.column_default + FROM information_schema.columns c + JOIN information_schema.tables t ON c.table_name = t.table_name + WHERE c.udt_name = 'statetype' + AND t.table_schema = 'public' + """ + ) + ) + + tables_columns = result.fetchall() + + # Update UNKNOWN states to FAILED in all affected tables + for table_name, column_name, _ in tables_columns: + op.execute( + sa.DDL( + f""" + UPDATE {table_name} + SET {column_name} = 'FAILED' + WHERE {column_name} = 'UNKNOWN' + """ + ) + ) diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/42ec7816c0b4_computational_collection_runs.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/42ec7816c0b4_computational_collection_runs.py new file mode 100644 index 000000000000..75bc05590371 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/42ec7816c0b4_computational_collection_runs.py @@ -0,0 +1,121 @@ +"""computational collection runs + +Revision ID: 42ec7816c0b4 +Revises: d159ac30983c +Create Date: 2025-07-01 13:30:02.736058+00:00 + +""" + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "42ec7816c0b4" +down_revision = "d159ac30983c" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "comp_runs_collections", + sa.Column( + "collection_run_id", + postgresql.UUID(as_uuid=True), + server_default=sa.text("gen_random_uuid()"), + nullable=False, + ), + sa.Column("client_or_system_generated_id", sa.String(), nullable=False), + sa.Column( + "client_or_system_generated_display_name", sa.String(), nullable=False + ), + sa.Column("is_generated_by_system", sa.Boolean(), nullable=False), + sa.Column( + "created", + sa.DateTime(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.Column( + "modified", + sa.DateTime(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.PrimaryKeyConstraint("collection_run_id"), + ) + op.create_index( + "ix_comp_runs_collections_client_or_system_generated_id", + "comp_runs_collections", + ["client_or_system_generated_id"], + unique=False, + ) + op.add_column( + "comp_runs", sa.Column("collection_run_id", sa.String(), nullable=True) + ) + op.create_unique_constraint( + "comp_runs_project_collection_run_id_unique_constraint", + "comp_runs", + ["project_uuid", "collection_run_id"], + ) + + # Data migration: Create collection run records for existing comp_runs + op.execute( + """ + INSERT INTO comp_runs_collections ( + collection_run_id, + client_or_system_generated_id, + client_or_system_generated_display_name, + is_generated_by_system + ) + SELECT DISTINCT + gen_random_uuid(), + 'migration-generated-' || run_id::text, + 'Migration Generated Collection Run', + TRUE + FROM comp_runs + WHERE collection_run_id IS NULL + """ + ) + + # Update comp_runs to reference the newly created collection runs + op.execute( + """ + UPDATE comp_runs + SET collection_run_id = ( + SELECT collection_run_id::text + FROM comp_runs_collections + WHERE client_or_system_generated_id = 'migration-generated-' || comp_runs.run_id::text + ) + WHERE collection_run_id IS NULL + """ + ) + + op.alter_column( + "comp_runs", + "collection_run_id", + existing_type=sa.String(), + nullable=False, + ) + + op.create_index( + "ix_comp_runs_collection_run_id", + "comp_runs", + ["collection_run_id"], + unique=False, + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index("ix_comp_runs_collection_run_id", table_name="comp_runs") + op.drop_column("comp_runs", "collection_run_id") + op.drop_index( + "ix_comp_runs_collections_client_or_system_generated_id", + table_name="comp_runs_collections", + ) + op.drop_table("comp_runs_collections") + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/4f6fd2586491_add_functions_api_access_rights.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/4f6fd2586491_add_functions_api_access_rights.py new file mode 100644 index 000000000000..8dede1708eb5 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/4f6fd2586491_add_functions_api_access_rights.py @@ -0,0 +1,72 @@ +"""Add functions api access rights + +Revision ID: 4f6fd2586491 +Revises: afb1ba08f3c2 +Create Date: 2025-06-13 12:14:59.317685+00:00 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "4f6fd2586491" +down_revision = "afb1ba08f3c2" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "funcapi_group_api_access_rights", + sa.Column("group_id", sa.BigInteger(), nullable=False), + sa.Column("product_name", sa.String(), nullable=False), + sa.Column("read_functions", sa.Boolean(), nullable=True), + sa.Column("write_functions", sa.Boolean(), nullable=True), + sa.Column("execute_functions", sa.Boolean(), nullable=True), + sa.Column("read_function_jobs", sa.Boolean(), nullable=True), + sa.Column("write_function_jobs", sa.Boolean(), nullable=True), + sa.Column("execute_function_jobs", sa.Boolean(), nullable=True), + sa.Column("read_function_job_collections", sa.Boolean(), nullable=True), + sa.Column("write_function_job_collections", sa.Boolean(), nullable=True), + sa.Column("execute_function_job_collections", sa.Boolean(), nullable=True), + sa.Column( + "created", + sa.DateTime(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.Column( + "modified", + sa.DateTime(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.ForeignKeyConstraint( + ["group_id"], + ["groups.gid"], + name="fk_func_access_to_groups_group_id", + onupdate="CASCADE", + ondelete="CASCADE", + ), + sa.ForeignKeyConstraint( + ["product_name"], + ["products.name"], + name="fk_func_access_to_products_product_name", + onupdate="CASCADE", + ondelete="CASCADE", + ), + sa.PrimaryKeyConstraint( + "group_id", + "product_name", + name="pk_func_group_product_name_to_api_access_rights", + ), + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table("funcapi_group_api_access_rights") + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/5679165336c8_new_users_secrets.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/5679165336c8_new_users_secrets.py new file mode 100644 index 000000000000..1187c800a65c --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/5679165336c8_new_users_secrets.py @@ -0,0 +1,77 @@ +"""new users secrets + +Revision ID: 5679165336c8 +Revises: 61b98a60e934 +Create Date: 2025-07-17 17:07:20.200038+00:00 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "5679165336c8" +down_revision = "61b98a60e934" +branch_labels = None +depends_on = None + + +def upgrade(): + op.create_table( + "users_secrets", + sa.Column("user_id", sa.BigInteger(), nullable=False), + sa.Column("password_hash", sa.String(), nullable=False), + sa.Column( + "modified", + sa.DateTime(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.ForeignKeyConstraint( + ["user_id"], + ["users.id"], + name="fk_users_secrets_user_id_users", + onupdate="CASCADE", + ondelete="CASCADE", + ), + sa.PrimaryKeyConstraint("user_id", name="users_secrets_pkey"), + ) + + # Copy password data from users table to users_secrets table + op.execute( + sa.DDL( + """ + INSERT INTO users_secrets (user_id, password_hash, modified) + SELECT id, password_hash, created_at + FROM users + WHERE password_hash IS NOT NULL + """ + ) + ) + + op.drop_column("users", "password_hash") + + +def downgrade(): + # Add column as nullable first + op.add_column( + "users", + sa.Column("password_hash", sa.VARCHAR(), autoincrement=False, nullable=True), + ) + + # Copy password data back from users_secrets table to users table + op.execute( + sa.DDL( + """ + UPDATE users + SET password_hash = us.password_hash + FROM users_secrets us + WHERE users.id = us.user_id + """ + ) + ) + + # Now make the column NOT NULL + op.alter_column("users", "password_hash", nullable=False) + + op.drop_table("users_secrets") diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/5b998370916a_introduce_data_deleted_in_projects_to_.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/5b998370916a_introduce_data_deleted_in_projects_to_.py new file mode 100644 index 000000000000..95457ee0c4cd --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/5b998370916a_introduce_data_deleted_in_projects_to_.py @@ -0,0 +1,40 @@ +"""introduce data_deleted in projects_to_jobs table + +Revision ID: 5b998370916a +Revises: 5679165336c8 +Create Date: 2025-08-11 13:58:38.424398+00:00 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "5b998370916a" +down_revision = "5679165336c8" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "projects_to_jobs", + sa.Column("storage_assets_deleted", sa.Boolean(), nullable=True), + ) + + op.execute("UPDATE projects_to_jobs SET storage_assets_deleted = false") + + op.alter_column( + "projects_to_jobs", + "storage_assets_deleted", + existing_type=sa.BOOLEAN(), + nullable=False, + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("projects_to_jobs", "storage_assets_deleted") + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/61b98a60e934_computational_collection_uniquencess.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/61b98a60e934_computational_collection_uniquencess.py new file mode 100644 index 000000000000..1de7d9da7f89 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/61b98a60e934_computational_collection_uniquencess.py @@ -0,0 +1,35 @@ +"""computational collection uniquencess + +Revision ID: 61b98a60e934 +Revises: df61d1b2b967 +Create Date: 2025-07-08 15:40:12.714684+00:00 + +""" + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "61b98a60e934" +down_revision = "df61d1b2b967" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_unique_constraint( + "client_or_system_generated_id_uniqueness", + "comp_runs_collections", + ["client_or_system_generated_id"], + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint( + "client_or_system_generated_id_uniqueness", + "comp_runs_collections", + type_="unique", + ) + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/7e92447558e0_update_api_keys_uniqueness_constraint.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/7e92447558e0_update_api_keys_uniqueness_constraint.py new file mode 100644 index 000000000000..e3a42a641256 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/7e92447558e0_update_api_keys_uniqueness_constraint.py @@ -0,0 +1,37 @@ +"""Update api-keys uniqueness constraint + +Revision ID: 7e92447558e0 +Revises: 06eafd25d004 +Create Date: 2025-09-12 09:56:45.164921+00:00 + +""" + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "7e92447558e0" +down_revision = "06eafd25d004" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint("display_name_userid_uniqueness", "api_keys", type_="unique") + op.create_unique_constraint( + "display_name_userid_product_name_uniqueness", + "api_keys", + ["display_name", "user_id", "product_name"], + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint( + "display_name_userid_product_name_uniqueness", "api_keys", type_="unique" + ) + op.create_unique_constraint( + "display_name_userid_uniqueness", "api_keys", ["display_name", "user_id"] + ) + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/b566f1b29012_modify_conversations.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/b566f1b29012_modify_conversations.py new file mode 100644 index 000000000000..8d211f483c28 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/b566f1b29012_modify_conversations.py @@ -0,0 +1,61 @@ +"""modify conversations + +Revision ID: b566f1b29012 +Revises: 5b998370916a +Create Date: 2025-08-14 15:02:54.784186+00:00 + +""" + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "b566f1b29012" +down_revision = "5b998370916a" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "conversations", + sa.Column( + "extra_context", + postgresql.JSONB(astext_type=sa.Text()), + server_default=sa.text("'{}'::jsonb"), + nullable=False, + ), + ) + op.add_column( + "products", + sa.Column("support_standard_group_id", sa.BigInteger(), nullable=True), + ) + op.create_foreign_key( + "fk_products_support_standard_group_id", + "products", + "groups", + ["support_standard_group_id"], + ["gid"], + onupdate="CASCADE", + ondelete="SET NULL", + ) + + op.execute( + """ + ALTER TYPE conversationtype ADD VALUE 'SUPPORT'; + """ + ) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint( + "fk_products_support_standard_group_id", "products", type_="foreignkey" + ) + op.drop_column("products", "support_standard_group_id") + op.drop_column("conversations", "extra_context") + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/d159ac30983c_make_func_api_access_non_nullable.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/d159ac30983c_make_func_api_access_non_nullable.py new file mode 100644 index 000000000000..788945d77944 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/d159ac30983c_make_func_api_access_non_nullable.py @@ -0,0 +1,191 @@ +"""Make func api access non-nullable + +Revision ID: d159ac30983c +Revises: 4f6fd2586491 +Create Date: 2025-07-01 08:50:29.095068+00:00 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "d159ac30983c" +down_revision = "4f6fd2586491" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.execute( + """ + UPDATE funcapi_group_api_access_rights + SET read_functions = false + WHERE read_functions IS NULL + """ + ) + op.execute( + """ + UPDATE funcapi_group_api_access_rights + SET write_functions = false + WHERE write_functions IS NULL + """ + ) + op.execute( + """ + UPDATE funcapi_group_api_access_rights + SET execute_functions = false + WHERE execute_functions IS NULL + """ + ) + op.execute( + """ + UPDATE funcapi_group_api_access_rights + SET read_function_jobs = false + WHERE read_function_jobs IS NULL + """ + ) + op.execute( + """ + UPDATE funcapi_group_api_access_rights + SET write_function_jobs = false + WHERE write_function_jobs IS NULL + """ + ) + op.execute( + """ + UPDATE funcapi_group_api_access_rights + SET execute_function_jobs = false + WHERE execute_function_jobs IS NULL + """ + ) + op.execute( + """ + UPDATE funcapi_group_api_access_rights + SET read_function_job_collections = false + WHERE read_function_job_collections IS NULL + """ + ) + op.execute( + """ + UPDATE funcapi_group_api_access_rights + SET write_function_job_collections = false + WHERE write_function_job_collections IS NULL + """ + ) + op.execute( + """ + UPDATE funcapi_group_api_access_rights + SET execute_function_job_collections = false + WHERE execute_function_job_collections IS NULL + """ + ) + op.alter_column( + "funcapi_group_api_access_rights", + "write_functions", + existing_type=sa.BOOLEAN(), + nullable=False, + ) + op.alter_column( + "funcapi_group_api_access_rights", + "execute_functions", + existing_type=sa.BOOLEAN(), + nullable=False, + ) + op.alter_column( + "funcapi_group_api_access_rights", + "read_function_jobs", + existing_type=sa.BOOLEAN(), + nullable=False, + ) + op.alter_column( + "funcapi_group_api_access_rights", + "write_function_jobs", + existing_type=sa.BOOLEAN(), + nullable=False, + ) + op.alter_column( + "funcapi_group_api_access_rights", + "execute_function_jobs", + existing_type=sa.BOOLEAN(), + nullable=False, + ) + op.alter_column( + "funcapi_group_api_access_rights", + "read_function_job_collections", + existing_type=sa.BOOLEAN(), + nullable=False, + ) + op.alter_column( + "funcapi_group_api_access_rights", + "write_function_job_collections", + existing_type=sa.BOOLEAN(), + nullable=False, + ) + op.alter_column( + "funcapi_group_api_access_rights", + "execute_function_job_collections", + existing_type=sa.BOOLEAN(), + nullable=False, + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column( + "funcapi_group_api_access_rights", + "execute_function_job_collections", + existing_type=sa.BOOLEAN(), + nullable=True, + ) + op.alter_column( + "funcapi_group_api_access_rights", + "write_function_job_collections", + existing_type=sa.BOOLEAN(), + nullable=True, + ) + op.alter_column( + "funcapi_group_api_access_rights", + "read_function_job_collections", + existing_type=sa.BOOLEAN(), + nullable=True, + ) + op.alter_column( + "funcapi_group_api_access_rights", + "execute_function_jobs", + existing_type=sa.BOOLEAN(), + nullable=True, + ) + op.alter_column( + "funcapi_group_api_access_rights", + "write_function_jobs", + existing_type=sa.BOOLEAN(), + nullable=True, + ) + op.alter_column( + "funcapi_group_api_access_rights", + "read_function_jobs", + existing_type=sa.BOOLEAN(), + nullable=True, + ) + op.alter_column( + "funcapi_group_api_access_rights", + "execute_functions", + existing_type=sa.BOOLEAN(), + nullable=True, + ) + op.alter_column( + "funcapi_group_api_access_rights", + "write_functions", + existing_type=sa.BOOLEAN(), + nullable=True, + ) + op.alter_column( + "funcapi_group_api_access_rights", + "read_functions", + existing_type=sa.BOOLEAN(), + nullable=True, + ) + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/df61d1b2b967_computational_collection_runs_2.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/df61d1b2b967_computational_collection_runs_2.py new file mode 100644 index 000000000000..e4b986921b9f --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/df61d1b2b967_computational_collection_runs_2.py @@ -0,0 +1,38 @@ +"""computational collection runs 2 + +Revision ID: df61d1b2b967 +Revises: 42ec7816c0b4 +Create Date: 2025-07-02 16:04:02.458800+00:00 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "df61d1b2b967" +down_revision = "42ec7816c0b4" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column( + "funcapi_group_api_access_rights", + "read_functions", + existing_type=sa.BOOLEAN(), + nullable=False, + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column( + "funcapi_group_api_access_rights", + "read_functions", + existing_type=sa.BOOLEAN(), + nullable=True, + ) + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/ec4f62595e0c_add_support_fogbugz_fields.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/ec4f62595e0c_add_support_fogbugz_fields.py new file mode 100644 index 000000000000..f5b22003fce4 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/ec4f62595e0c_add_support_fogbugz_fields.py @@ -0,0 +1,38 @@ +"""add support fogbugz fields + +Revision ID: ec4f62595e0c +Revises: b566f1b29012 +Create Date: 2025-08-26 13:06:10.879081+00:00 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "ec4f62595e0c" +down_revision = "b566f1b29012" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "products", + sa.Column("support_assigned_fogbugz_person_id", sa.BigInteger(), nullable=True), + ) + op.add_column( + "products", + sa.Column( + "support_assigned_fogbugz_project_id", sa.BigInteger(), nullable=True + ), + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("products", "support_assigned_fogbugz_project_id") + op.drop_column("products", "support_assigned_fogbugz_person_id") + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/models/_common.py b/packages/postgres-database/src/simcore_postgres_database/models/_common.py index 47bfeb6ebf08..6b2405548547 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/_common.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/_common.py @@ -16,24 +16,28 @@ class RefActions: NO_ACTION: Final[str] = "NO ACTION" -def column_created_datetime(*, timezone: bool = True) -> sa.Column: +def column_created_datetime( + *, timezone: bool = True, doc="Timestamp auto-generated upon creation" +) -> sa.Column: return sa.Column( "created", sa.DateTime(timezone=timezone), nullable=False, server_default=sa.sql.func.now(), - doc="Timestamp auto-generated upon creation", + doc=doc, ) -def column_modified_datetime(*, timezone: bool = True) -> sa.Column: +def column_modified_datetime( + *, timezone: bool = True, doc="Timestamp with last row update" +) -> sa.Column: return sa.Column( "modified", sa.DateTime(timezone=timezone), nullable=False, server_default=sa.sql.func.now(), onupdate=sa.sql.func.now(), - doc="Timestamp with last row update", + doc=doc, ) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/api_keys.py b/packages/postgres-database/src/simcore_postgres_database/models/api_keys.py index 2c3f12eca3ab..02a2fc58bbc2 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/api_keys.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/api_keys.py @@ -75,7 +75,10 @@ "If set to NULL then the key does not expire.", ), sa.UniqueConstraint( - "display_name", "user_id", name="display_name_userid_uniqueness" + "display_name", + "user_id", + "product_name", + name="display_name_userid_product_name_uniqueness", ), ) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/comp_pipeline.py b/packages/postgres-database/src/simcore_postgres_database/models/comp_pipeline.py index a4e5645860c0..ac2387084e13 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/comp_pipeline.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/comp_pipeline.py @@ -1,6 +1,5 @@ -""" Computational Pipeline Table +"""Computational Pipeline Table""" -""" import enum import uuid @@ -24,6 +23,7 @@ class StateType(enum.Enum): ABORTED = "ABORTED" WAITING_FOR_RESOURCES = "WAITING_FOR_RESOURCES" WAITING_FOR_CLUSTER = "WAITING_FOR_CLUSTER" + UNKNOWN = "UNKNOWN" def _new_uuid(): diff --git a/packages/postgres-database/src/simcore_postgres_database/models/comp_runs.py b/packages/postgres-database/src/simcore_postgres_database/models/comp_runs.py index efc1716cf106..13157505041c 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/comp_runs.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/comp_runs.py @@ -105,6 +105,13 @@ server_default=sa.text("'{}'::jsonb"), nullable=False, ), + sa.Column( + "collection_run_id", + sa.String, + nullable=False, + ), sa.UniqueConstraint("project_uuid", "user_id", "iteration"), sa.Index("ix_comp_runs_user_id", "user_id"), + sa.Index("ix_comp_runs_collection_run_id", "collection_run_id"), + sa.UniqueConstraint("project_uuid", "collection_run_id"), ) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/comp_runs_collections.py b/packages/postgres-database/src/simcore_postgres_database/models/comp_runs_collections.py new file mode 100644 index 000000000000..31439acc458d --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/models/comp_runs_collections.py @@ -0,0 +1,41 @@ +import sqlalchemy as sa +from sqlalchemy.dialects.postgresql import UUID + +from ._common import column_created_datetime, column_modified_datetime +from .base import metadata + +comp_runs_collections = sa.Table( + "comp_runs_collections", + metadata, + sa.Column( + "collection_run_id", + UUID(as_uuid=True), + server_default=sa.text("gen_random_uuid()"), + primary_key=True, + ), + sa.Column( + "client_or_system_generated_id", + sa.String, + nullable=False, + doc="Unique identifier for the collection run, generated by the client (ex. Third party using our public api) or system (ex. osparc webserver)", + ), + sa.Column( + "client_or_system_generated_display_name", + sa.String, + nullable=False, + ), + sa.Column( + "is_generated_by_system", + sa.Boolean, + nullable=False, + ), + column_created_datetime(timezone=True), + column_modified_datetime(timezone=True), + sa.Index( + "ix_comp_runs_collections_client_or_system_generated_id", + "client_or_system_generated_id", + ), + sa.UniqueConstraint( + "client_or_system_generated_id", name="client_or_system_generated_id_uniqueness" + ), +) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/conversations.py b/packages/postgres-database/src/simcore_postgres_database/models/conversations.py index 3072d91dda55..a301a7ea70a8 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/conversations.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/conversations.py @@ -1,7 +1,7 @@ import enum import sqlalchemy as sa -from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.dialects.postgresql import JSONB, UUID from ._common import RefActions, column_created_datetime, column_modified_datetime from .base import metadata @@ -12,6 +12,7 @@ class ConversationType(enum.Enum): PROJECT_STATIC = "PROJECT_STATIC" # Static conversation for the project PROJECT_ANNOTATION = "PROJECT_ANNOTATION" # Something like sticky note, can be located anywhere in the pipeline UI + SUPPORT = "SUPPORT" # Support conversation conversations = sa.Table( @@ -70,6 +71,13 @@ class ConversationType(enum.Enum): nullable=False, doc="Product name identifier. If None, then the item is not exposed", ), + sa.Column( + "extra_context", + JSONB, + nullable=False, + server_default=sa.text("'{}'::jsonb"), + doc="Free JSON to store extra context", + ), column_created_datetime(timezone=True), column_modified_datetime(timezone=True), ) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/funcapi_api_access_rights_table.py b/packages/postgres-database/src/simcore_postgres_database/models/funcapi_api_access_rights_table.py new file mode 100644 index 000000000000..04957c1c9b26 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/models/funcapi_api_access_rights_table.py @@ -0,0 +1,96 @@ +"""Function api access rights of groups (read, write, execute)""" + +import sqlalchemy as sa +from simcore_postgres_database.models._common import ( + RefActions, + column_created_datetime, + column_modified_datetime, +) + +from .base import metadata + +funcapi_api_access_rights_table = sa.Table( + "funcapi_group_api_access_rights", + metadata, + sa.Column( + "group_id", + sa.ForeignKey( + "groups.gid", + name="fk_func_access_to_groups_group_id", + onupdate=RefActions.CASCADE, + ondelete=RefActions.CASCADE, + ), + nullable=False, + ), + sa.Column( + "product_name", + sa.ForeignKey( + "products.name", + name="fk_func_access_to_products_product_name", + onupdate=RefActions.CASCADE, + ondelete=RefActions.CASCADE, + ), + nullable=False, + ), + sa.Column( + "read_functions", + sa.Boolean, + default=False, + nullable=False, + ), + sa.Column( + "write_functions", + sa.Boolean, + default=False, + nullable=False, + ), + sa.Column( + "execute_functions", + sa.Boolean, + default=False, + nullable=False, + ), + sa.Column( + "read_function_jobs", + sa.Boolean, + default=False, + nullable=False, + ), + sa.Column( + "write_function_jobs", + sa.Boolean, + default=False, + nullable=False, + ), + sa.Column( + "execute_function_jobs", + sa.Boolean, + default=False, + nullable=False, + ), + sa.Column( + "read_function_job_collections", + sa.Boolean, + default=False, + nullable=False, + ), + sa.Column( + "write_function_job_collections", + sa.Boolean, + default=False, + nullable=False, + ), + sa.Column( + "execute_function_job_collections", + sa.Boolean, + default=False, + nullable=False, + ), + column_created_datetime(), + column_modified_datetime(), + sa.PrimaryKeyConstraint( + "group_id", + "product_name", + name="pk_func_group_product_name_to_api_access_rights", + ), +) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/funcapi_functions_access_rights_table.py b/packages/postgres-database/src/simcore_postgres_database/models/funcapi_functions_access_rights_table.py index b01f8d287790..a4a60b2dffb0 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/funcapi_functions_access_rights_table.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/funcapi_functions_access_rights_table.py @@ -22,7 +22,6 @@ ondelete=RefActions.CASCADE, ), nullable=False, - doc="Unique identifier of the function", ), sa.Column( "group_id", @@ -33,7 +32,6 @@ ondelete=RefActions.CASCADE, ), nullable=False, - doc="Group id", ), sa.Column( "product_name", @@ -44,25 +42,21 @@ ondelete=RefActions.CASCADE, ), nullable=False, - doc="Name of the product", ), sa.Column( "read", sa.Boolean, default=False, - doc="Read access right for the function", ), sa.Column( "write", sa.Boolean, default=False, - doc="Write access right for the function", ), sa.Column( "execute", sa.Boolean, default=False, - doc="Execute access right for the function", ), column_created_datetime(), column_modified_datetime(), diff --git a/packages/postgres-database/src/simcore_postgres_database/models/products.py b/packages/postgres-database/src/simcore_postgres_database/models/products.py index bdb8e080d23e..414e7e4b2c06 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/products.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/products.py @@ -32,7 +32,6 @@ class VendorUI(TypedDict, total=True): logo_url: str # vendor logo url strong_color: str # vendor main color - project_alias: str # project alias for the product (e.g. "project" or "study") class Vendor(TypedDict, total=False): @@ -269,5 +268,32 @@ class ProductLoginSettingsDict(TypedDict, total=False): nullable=True, doc="Group associated to this product", ), + sa.Column( + "support_standard_group_id", + sa.BigInteger, + sa.ForeignKey( + groups.c.gid, + name="fk_products_support_standard_group_id", + ondelete=RefActions.SET_NULL, + onupdate=RefActions.CASCADE, + ), + unique=False, + nullable=True, + doc="Group associated to this product support", + ), + sa.Column( + "support_assigned_fogbugz_person_id", + sa.BigInteger, + unique=False, + nullable=True, + doc="Fogbugz person ID to assign support case", + ), + sa.Column( + "support_assigned_fogbugz_project_id", + sa.BigInteger, + unique=False, + nullable=True, + doc="Fogbugz project ID to assign support case", + ), sa.PrimaryKeyConstraint("name", name="products_pk"), ) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/projects_to_jobs.py b/packages/postgres-database/src/simcore_postgres_database/models/projects_to_jobs.py index 4f3859fb36e5..4013cf9e4355 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/projects_to_jobs.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/projects_to_jobs.py @@ -28,6 +28,12 @@ "the relative resource name is shelves/shelf1/jobs/job2, " "the parent resource name is shelves/shelf1.", ), + sa.Column( + "storage_assets_deleted", + sa.Boolean, + nullable=False, + doc="Indicates whether the job's S3 assets have been actively deleted.", + ), # Composite key (project_uuid, job_parent_resource_name) uniquely identifies very row sa.UniqueConstraint( "project_uuid", diff --git a/packages/postgres-database/src/simcore_postgres_database/models/users.py b/packages/postgres-database/src/simcore_postgres_database/models/users.py index 7be2161ff864..62dffd58c66d 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/users.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/users.py @@ -67,15 +67,6 @@ "NOTE: new policy (NK) is that the same phone can be reused therefore it does not has to be unique", ), # - # User Secrets ------------------ - # - sa.Column( - "password_hash", - sa.String(), - nullable=False, - doc="Hashed password", - ), - # # User Account ------------------ # sa.Column( diff --git a/packages/postgres-database/src/simcore_postgres_database/models/users_secrets.py b/packages/postgres-database/src/simcore_postgres_database/models/users_secrets.py new file mode 100644 index 000000000000..1a1ae04ec637 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/models/users_secrets.py @@ -0,0 +1,34 @@ +import sqlalchemy as sa + +from ._common import RefActions, column_modified_datetime +from .base import metadata + +__all__: tuple[str, ...] = ("users_secrets",) + +users_secrets = sa.Table( + "users_secrets", + metadata, + # + # User Secrets ------------------ + # + sa.Column( + "user_id", + sa.BigInteger(), + sa.ForeignKey( + "users.id", + name="fk_users_secrets_user_id_users", + onupdate=RefActions.CASCADE, + ondelete=RefActions.CASCADE, + ), + nullable=False, + ), + sa.Column( + "password_hash", + sa.String(), + nullable=False, + doc="Hashed password", + ), + column_modified_datetime(timezone=True, doc="Last password modification timestamp"), + # --------------------------- + sa.PrimaryKeyConstraint("user_id", name="users_secrets_pkey"), +) diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_projects.py b/packages/postgres-database/src/simcore_postgres_database/utils_projects.py index 577f9441004b..ee6a8a132e89 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_projects.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_projects.py @@ -7,7 +7,7 @@ from sqlalchemy.ext.asyncio import AsyncConnection from .models.projects import projects -from .utils_repos import transaction_context +from .utils_repos import pass_or_acquire_connection, transaction_context class DBBaseProjectError(OsparcErrorMixin, Exception): @@ -22,6 +22,23 @@ class ProjectsRepo: def __init__(self, engine): self.engine = engine + async def exists( + self, + project_uuid: uuid.UUID, + *, + connection: AsyncConnection | None = None, + ) -> bool: + async with pass_or_acquire_connection(self.engine, connection) as conn: + return ( + await conn.scalar( + sa.select(1) + .select_from(projects) + .where(projects.c.uuid == f"{project_uuid}") + .limit(1) + ) + is not None + ) + async def get_project_last_change_date( self, project_uuid: uuid.UUID, diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py b/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py index 6fc72990b30e..4bb6855b0bff 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_projects_nodes.py @@ -1,21 +1,21 @@ import datetime import uuid from dataclasses import dataclass -from typing import Any +from typing import Annotated, Any import asyncpg.exceptions # type: ignore[import-untyped] -import sqlalchemy import sqlalchemy.exc from common_library.async_tools import maybe_await +from common_library.basic_types import DEFAULT_FACTORY from common_library.errors_classes import OsparcErrorMixin from pydantic import BaseModel, ConfigDict, Field -from simcore_postgres_database.utils_aiosqlalchemy import map_db_exception from sqlalchemy.dialects.postgresql import insert as pg_insert from ._protocols import DBConnection from .aiopg_errors import ForeignKeyViolation, UniqueViolation from .models.projects_node_to_pricing_unit import projects_node_to_pricing_unit from .models.projects_nodes import projects_nodes +from .utils_aiosqlalchemy import map_db_exception # @@ -47,7 +47,9 @@ class ProjectNodesDuplicateNodeError(BaseProjectNodesError): class ProjectNodeCreate(BaseModel): node_id: uuid.UUID - required_resources: dict[str, Any] = Field(default_factory=dict) + required_resources: Annotated[dict[str, Any], Field(default_factory=dict)] = ( + DEFAULT_FACTORY + ) key: str version: str label: str @@ -56,6 +58,7 @@ class ProjectNodeCreate(BaseModel): input_access: dict[str, Any] | None = None input_nodes: list[str] | None = None inputs: dict[str, Any] | None = None + inputs_required: list[str] | None = None inputs_units: dict[str, Any] | None = None output_nodes: list[str] | None = None outputs: dict[str, Any] | None = None @@ -100,17 +103,18 @@ async def add( """ if not nodes: return [] + + values = [ + { + "project_uuid": f"{self.project_uuid}", + **node.model_dump(mode="json"), + } + for node in nodes + ] + insert_stmt = ( projects_nodes.insert() - .values( - [ - { - "project_uuid": f"{self.project_uuid}", - **node.model_dump(exclude_unset=True, mode="json"), - } - for node in nodes - ] - ) + .values(values) .returning( *[ c @@ -126,14 +130,17 @@ async def add( rows = await maybe_await(result.fetchall()) assert isinstance(rows, list) # nosec return [ProjectNode.model_validate(r) for r in rows] + except ForeignKeyViolation as exc: # this happens when the project does not exist, as we first check the node exists raise ProjectNodesProjectNotFoundError( project_uuid=self.project_uuid ) from exc + except UniqueViolation as exc: # this happens if the node already exists on creation raise ProjectNodesDuplicateNodeError from exc + except sqlalchemy.exc.IntegrityError as exc: raise map_db_exception( exc, diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_users.py b/packages/postgres-database/src/simcore_postgres_database/utils_users.py index 587f90ee504b..baf0b0e16cb2 100644 --- a/packages/postgres-database/src/simcore_postgres_database/utils_users.py +++ b/packages/postgres-database/src/simcore_postgres_database/utils_users.py @@ -5,17 +5,21 @@ import re import secrets import string +from dataclasses import dataclass, fields from datetime import datetime from typing import Any, Final import sqlalchemy as sa -from common_library.async_tools import maybe_await from sqlalchemy import Column +from sqlalchemy.engine.result import Row +from sqlalchemy.exc import IntegrityError +from sqlalchemy.ext.asyncio.engine import AsyncConnection, AsyncEngine +from sqlalchemy.sql import Select -from ._protocols import DBConnection -from .aiopg_errors import UniqueViolation from .models.users import UserRole, UserStatus, users from .models.users_details import users_pre_registration_details +from .models.users_secrets import users_secrets +from .utils_repos import pass_or_acquire_connection, transaction_context class BaseUserRepoError(Exception): @@ -52,74 +56,126 @@ def generate_alternative_username(username: str) -> str: return f"{username}_{_generate_random_chars()}" +@dataclass(frozen=True) +class UserRow: + id: int + name: str + email: str + role: UserRole + status: UserStatus + first_name: str | None = None + last_name: str | None = None + phone: str | None = None + + @classmethod + def from_row(cls, row: Row) -> "UserRow": + return cls(**{f.name: getattr(row, f.name) for f in fields(cls)}) + + class UsersRepo: - @staticmethod + _user_columns = ( + users.c.id, + users.c.name, + users.c.email, + users.c.role, + users.c.status, + users.c.first_name, + users.c.last_name, + users.c.phone, + ) + + def __init__(self, engine: AsyncEngine): + self._engine = engine + + async def _get_scalar_or_raise( + self, + query: Select, + connection: AsyncConnection | None = None, + ) -> Any: + """Execute a scalar query and raise UserNotFoundInRepoError if no value found.""" + async with pass_or_acquire_connection(self._engine, connection) as conn: + value = await conn.scalar(query) + if value is not None: + return value + raise UserNotFoundInRepoError + async def new_user( - conn: DBConnection, + self, + connection: AsyncConnection | None = None, + *, email: str, password_hash: str, status: UserStatus, expires_at: datetime | None, - ) -> Any: - data: dict[str, Any] = { + role: UserRole = UserRole.USER, + ) -> UserRow: + user_data: dict[str, Any] = { "name": _generate_username_from_email(email), "email": email, - "password_hash": password_hash, "status": status, - "role": UserRole.USER, + "role": role, "expires_at": expires_at, } user_id = None while user_id is None: try: - user_id = await conn.scalar( - users.insert().values(**data).returning(users.c.id) - ) - except UniqueViolation: - data["name"] = generate_alternative_username(data["name"]) - - result = await conn.execute( - sa.select( - users.c.id, - users.c.name, - users.c.email, - users.c.role, - users.c.status, - ).where(users.c.id == user_id) - ) - return await maybe_await(result.first()) + async with transaction_context(self._engine, connection) as conn: + # Insert user record + user_id = await conn.scalar( + users.insert().values(**user_data).returning(users.c.id) + ) + + # Insert password hash into users_secrets table + await conn.execute( + users_secrets.insert().values( + user_id=user_id, + password_hash=password_hash, + ) + ) + except IntegrityError: + user_data["name"] = generate_alternative_username(user_data["name"]) + user_id = None # Reset to retry with new username + + async with pass_or_acquire_connection(self._engine, connection) as conn: + result = await conn.execute( + sa.select(*self._user_columns).where(users.c.id == user_id) + ) + return UserRow.from_row(result.one()) - @staticmethod async def link_and_update_user_from_pre_registration( - conn: DBConnection, + self, + connection: AsyncConnection | None = None, *, new_user_id: int, new_user_email: str, - update_user: bool = True, ) -> None: """After a user is created, it can be associated with information provided during invitation - WARNING: Use ONLY upon new user creation. It might override user_details.user_id, users.first_name, users.last_name etc if already applied - or changes happen in users table + Links ALL pre-registrations for the given email to the user, regardless of product_name. + + WARNING: Use ONLY upon new user creation. It might override user_details.user_id, + users.first_name, users.last_name etc if already applied or changes happen in users table """ assert new_user_email # nosec assert new_user_id > 0 # nosec - # link both tables first - result = await conn.execute( - users_pre_registration_details.update() - .where(users_pre_registration_details.c.pre_email == new_user_email) - .values(user_id=new_user_id) - ) + async with transaction_context(self._engine, connection) as conn: + # Link ALL pre-registrations for this email to the user + result = await conn.execute( + users_pre_registration_details.update() + .where(users_pre_registration_details.c.pre_email == new_user_email) + .values(user_id=new_user_id) + ) - if update_user: # COPIES some pre-registration details to the users table pre_columns = ( users_pre_registration_details.c.pre_first_name, users_pre_registration_details.c.pre_last_name, - # NOTE: pre_phone is not copied since it has to be validated. Otherwise, if - # phone is wrong, currently user won't be able to login! + # NOTE: pre_phone is NOT copied since it has to be validated. + # It remains here as informative. In the future it might be given + # as a hint to the front end? + # Otherwise, if phone is wrong, currently user won't be able to login! ) assert {c.name for c in pre_columns} == { # nosec @@ -133,103 +189,177 @@ async def link_and_update_user_from_pre_registration( and c.name.startswith("pre_") }, "Different pre-cols detected. This code might need an update update" + # Get the most recent pre-registration data to copy to users table result = await conn.execute( - sa.select(*pre_columns).where( - users_pre_registration_details.c.pre_email == new_user_email - ) + sa.select(*pre_columns) + .where(users_pre_registration_details.c.pre_email == new_user_email) + .order_by(users_pre_registration_details.c.created.desc()) + .limit(1) ) - if pre_registration_details_data := result.first(): - # NOTE: could have many products! which to use? + if pre_registration_details_data := result.one_or_none(): await conn.execute( users.update() .where(users.c.id == new_user_id) .values( - first_name=pre_registration_details_data.pre_first_name, # type: ignore[union-attr] - last_name=pre_registration_details_data.pre_last_name, # type: ignore[union-attr] + first_name=pre_registration_details_data.pre_first_name, + last_name=pre_registration_details_data.pre_last_name, ) ) - @staticmethod - def get_billing_details_query(user_id: int): - return ( - sa.select( - users.c.first_name, - users.c.last_name, - users_pre_registration_details.c.institution, - users_pre_registration_details.c.address, - users_pre_registration_details.c.city, - users_pre_registration_details.c.state, - users_pre_registration_details.c.country, - users_pre_registration_details.c.postal_code, - users.c.phone, - ) - .select_from( - users.join( - users_pre_registration_details, - users.c.id == users_pre_registration_details.c.user_id, - ) - ) - .where(users.c.id == user_id) + async def get_role( + self, connection: AsyncConnection | None = None, *, user_id: int + ) -> UserRole: + value = await self._get_scalar_or_raise( + sa.select(users.c.role).where(users.c.id == user_id), + connection=connection, ) - - @staticmethod - async def get_billing_details(conn: DBConnection, user_id: int) -> Any | None: - result = await conn.execute( - UsersRepo.get_billing_details_query(user_id=user_id) + assert isinstance(value, UserRole) # nosec + return UserRole(value) + + async def get_email( + self, connection: AsyncConnection | None = None, *, user_id: int + ) -> str: + value = await self._get_scalar_or_raise( + sa.select(users.c.email).where(users.c.id == user_id), + connection=connection, ) - return await maybe_await(result.fetchone()) + assert isinstance(value, str) # nosec + return value - @staticmethod - async def get_role(conn: DBConnection, user_id: int) -> UserRole: - value: UserRole | None = await conn.scalar( - sa.select(users.c.role).where(users.c.id == user_id) + async def get_active_user_email( + self, connection: AsyncConnection | None = None, *, user_id: int + ) -> str: + value = await self._get_scalar_or_raise( + sa.select(users.c.email).where( + (users.c.status == UserStatus.ACTIVE) & (users.c.id == user_id) + ), + connection=connection, ) - if value: - assert isinstance(value, UserRole) # nosec - return UserRole(value) + assert isinstance(value, str) # nosec + return value + + async def get_password_hash( + self, connection: AsyncConnection | None = None, *, user_id: int + ) -> str: + value = await self._get_scalar_or_raise( + sa.select(users_secrets.c.password_hash).where( + users_secrets.c.user_id == user_id + ), + connection=connection, + ) + assert isinstance(value, str) # nosec + return value - raise UserNotFoundInRepoError + async def get_user_by_email_or_none( + self, connection: AsyncConnection | None = None, *, email: str + ) -> UserRow | None: + async with pass_or_acquire_connection(self._engine, connection) as conn: + result = await conn.execute( + sa.select(*self._user_columns).where(users.c.email == email.lower()) + ) + row = result.one_or_none() + return UserRow.from_row(row) if row else None - @staticmethod - async def get_email(conn: DBConnection, user_id: int) -> str: - value: str | None = await conn.scalar( - sa.select(users.c.email).where(users.c.id == user_id) - ) - if value: - assert isinstance(value, str) # nosec - return value + async def get_user_by_id_or_none( + self, connection: AsyncConnection | None = None, *, user_id: int + ) -> UserRow | None: + async with pass_or_acquire_connection(self._engine, connection) as conn: + result = await conn.execute( + sa.select(*self._user_columns).where(users.c.id == user_id) + ) + row = result.one_or_none() + return UserRow.from_row(row) if row else None - raise UserNotFoundInRepoError + async def update_user_phone( + self, connection: AsyncConnection | None = None, *, user_id: int, phone: str + ) -> None: + async with transaction_context(self._engine, connection) as conn: + await conn.execute( + users.update().where(users.c.id == user_id).values(phone=phone) + ) - @staticmethod - async def get_active_user_email(conn: DBConnection, user_id: int) -> str: - value: str | None = await conn.scalar( - sa.select(users.c.email).where( - (users.c.status == UserStatus.ACTIVE) & (users.c.id == user_id) + async def update_user_password_hash( + self, + connection: AsyncConnection | None = None, + *, + user_id: int, + password_hash: str, + ) -> None: + async with transaction_context(self._engine, connection) as conn: + await self.get_password_hash( + connection=conn, user_id=user_id + ) # ensure user exists + await conn.execute( + users_secrets.update() + .where(users_secrets.c.user_id == user_id) + .values(password_hash=password_hash) ) - ) - if value is not None: - assert isinstance(value, str) # nosec - return value - raise UserNotFoundInRepoError + async def is_email_used( + self, connection: AsyncConnection | None = None, *, email: str + ) -> bool: - @staticmethod - async def is_email_used(conn: DBConnection, email: str) -> bool: - email = email.lower() + async with pass_or_acquire_connection(self._engine, connection) as conn: - registered = await conn.scalar( - sa.select(users.c.id).where(users.c.email == email) - ) - if registered: - return True + email = email.lower() - pre_registered = await conn.scalar( - sa.select(users_pre_registration_details.c.user_id).where( - users_pre_registration_details.c.pre_email == email + registered = await conn.scalar( + sa.select(users.c.id).where(users.c.email == email) ) - ) - return bool(pre_registered) + if registered: + return True + + # Check if email exists in pre-registration, regardless of user_id status + pre_registered = await conn.scalar( + sa.select(users_pre_registration_details.c.id).where( + users_pre_registration_details.c.pre_email == email + ) + ) + return bool(pre_registered) + + async def get_billing_details( + self, + connection: AsyncConnection | None = None, + *, + product_name: str, + user_id: int, + ) -> Any | None: + """Returns billing details for the specified user and product. + + - If the user is registered without a product, returns details for that registration. + - Returns None if no billing details are found. + """ + async with pass_or_acquire_connection(self._engine, connection) as conn: + result = await conn.execute( + sa.select( + users.c.first_name, + users.c.last_name, + users_pre_registration_details.c.institution, + users_pre_registration_details.c.address, + users_pre_registration_details.c.city, + users_pre_registration_details.c.state, + users_pre_registration_details.c.country, + users_pre_registration_details.c.postal_code, + users.c.phone, + ) + .select_from( + users.join( + users_pre_registration_details, + users.c.id == users_pre_registration_details.c.user_id, + ) + ) + .where( + (users.c.id == user_id) + & ( + (users_pre_registration_details.c.product_name == product_name) + | (users_pre_registration_details.c.product_name.is_(None)) + ) + ) + .order_by(users_pre_registration_details.c.created.desc()) + .limit(1) + # NOTE: might want to copy billing details to users table?? + ) + return result.one_or_none() # diff --git a/packages/postgres-database/tests/conftest.py b/packages/postgres-database/tests/conftest.py index fdac39729b6b..9016cff0d322 100644 --- a/packages/postgres-database/tests/conftest.py +++ b/packages/postgres-database/tests/conftest.py @@ -18,11 +18,10 @@ from aiopg.sa.engine import Engine from aiopg.sa.result import ResultProxy, RowProxy from faker import Faker -from pytest_simcore.helpers import postgres_tools +from pytest_simcore.helpers import postgres_tools, postgres_users from pytest_simcore.helpers.faker_factories import ( random_group, random_project, - random_user, ) from simcore_postgres_database.models.products import products from simcore_postgres_database.models.projects import projects @@ -82,7 +81,7 @@ def sync_engine(postgres_service: str) -> Iterable[sqlalchemy.engine.Engine]: def _make_asyncpg_engine(postgres_service: str) -> Callable[[bool], AsyncEngine]: # NOTE: users is responsible of `await engine.dispose()` dsn = postgres_service.replace("postgresql://", "postgresql+asyncpg://") - minsize = 1 + minsize = 2 maxsize = 50 def _(echo: bool): @@ -268,10 +267,11 @@ def create_fake_user(sync_engine: sqlalchemy.engine.Engine) -> Iterator[Callable async def _creator( conn: SAConnection, group: RowProxy | None = None, **overrides ) -> RowProxy: - user_id = await conn.scalar( - users.insert().values(**random_user(**overrides)).returning(users.c.id) + + user_id = await postgres_users.insert_user_and_secrets( + conn, + **overrides, ) - assert user_id is not None # This is done in two executions instead of one (e.g. returning(literal_column("*")) ) # to allow triggering function in db that diff --git a/packages/postgres-database/tests/test_models_api_keys.py b/packages/postgres-database/tests/test_models_api_keys.py index d8863f9ac748..d4852d199d6c 100644 --- a/packages/postgres-database/tests/test_models_api_keys.py +++ b/packages/postgres-database/tests/test_models_api_keys.py @@ -9,10 +9,10 @@ import sqlalchemy as sa from aiopg.sa.connection import SAConnection from aiopg.sa.result import RowProxy +from pytest_simcore.helpers import postgres_users from pytest_simcore.helpers.faker_factories import ( random_api_auth, random_product, - random_user, ) from simcore_postgres_database.models.api_keys import api_keys from simcore_postgres_database.models.products import products @@ -21,13 +21,12 @@ @pytest.fixture async def user_id(connection: SAConnection) -> AsyncIterable[int]: - uid = await connection.scalar( - users.insert().values(random_user()).returning(users.c.id) - ) - assert uid - yield uid + user_id = await postgres_users.insert_user_and_secrets(connection) + + assert user_id + yield user_id - await connection.execute(users.delete().where(users.c.id == uid)) + await connection.execute(users.delete().where(users.c.id == user_id)) @pytest.fixture @@ -84,7 +83,10 @@ async def test_get_session_identity_for_api_server( # authorize a session # result = await connection.execute( - sa.select(api_keys.c.user_id, api_keys.c.product_name,).where( + sa.select( + api_keys.c.user_id, + api_keys.c.product_name, + ).where( (api_keys.c.api_key == session_auth.api_key) & (api_keys.c.api_secret == session_auth.api_secret), ) diff --git a/packages/postgres-database/tests/test_models_groups.py b/packages/postgres-database/tests/test_models_groups.py index 6ce8a77c4cc3..a3c5ad154a30 100644 --- a/packages/postgres-database/tests/test_models_groups.py +++ b/packages/postgres-database/tests/test_models_groups.py @@ -10,7 +10,7 @@ from aiopg.sa.connection import SAConnection from aiopg.sa.result import ResultProxy, RowProxy from psycopg2.errors import ForeignKeyViolation, RaiseException, UniqueViolation -from pytest_simcore.helpers.faker_factories import random_user +from pytest_simcore.helpers import postgres_users from simcore_postgres_database.webserver_models import ( GroupType, groups, @@ -64,9 +64,8 @@ async def test_all_group( await connection.execute(groups.delete().where(groups.c.gid == all_group_gid)) # check adding a user is automatically added to the all group - result = await connection.execute( - users.insert().values(**random_user()).returning(literal_column("*")) - ) + user_id = await postgres_users.insert_user_and_secrets(connection) + result = await connection.execute(users.select().where(users.c.id == user_id)) user: RowProxy = await result.fetchone() result = await connection.execute( @@ -98,14 +97,10 @@ async def test_all_group( async def test_own_group( connection: SAConnection, ): - result = await connection.execute( - users.insert().values(**random_user()).returning(literal_column("*")) - ) - user: RowProxy = await result.fetchone() - assert not user.primary_gid + user_id = await postgres_users.insert_user_and_secrets(connection) # now fetch the same user that shall have a primary group set by the db - result = await connection.execute(users.select().where(users.c.id == user.id)) + result = await connection.execute(users.select().where(users.c.id == user_id)) user: RowProxy = await result.fetchone() assert user.primary_gid diff --git a/packages/postgres-database/tests/test_models_projects_to_jobs.py b/packages/postgres-database/tests/test_models_projects_to_jobs.py index d6f2879694d4..5b6436023d28 100644 --- a/packages/postgres-database/tests/test_models_projects_to_jobs.py +++ b/packages/postgres-database/tests/test_models_projects_to_jobs.py @@ -3,6 +3,7 @@ # pylint: disable=unused-variable # pylint: disable=too-many-arguments +import json from collections.abc import Iterator import pytest @@ -10,12 +11,12 @@ import sqlalchemy as sa import sqlalchemy.engine import sqlalchemy.exc +from common_library.users_enums import UserRole from faker import Faker from pytest_simcore.helpers import postgres_tools from pytest_simcore.helpers.faker_factories import random_project, random_user -from simcore_postgres_database.models.projects import projects +from simcore_postgres_database.models.projects import ProjectType, projects from simcore_postgres_database.models.projects_to_jobs import projects_to_jobs -from simcore_postgres_database.models.users import users @pytest.fixture @@ -66,9 +67,24 @@ def test_populate_projects_to_jobs_during_migration( # INSERT data (emulates data in-place) user_data = random_user( - faker, name="test_populate_projects_to_jobs_during_migration" + faker, + name="test_populate_projects_to_jobs_during_migration", + role=UserRole.USER.value, ) - stmt = users.insert().values(**user_data).returning(users.c.id) + user_data["password_hash"] = ( + "password_hash_was_still_here_at_this_migration_commit" # noqa: S105 + ) + + columns = list(user_data.keys()) + values_clause = ", ".join(f":{col}" for col in columns) + columns_clause = ", ".join(columns) + stmt = sa.text( + f""" + INSERT INTO users ({columns_clause}) + VALUES ({values_clause}) + RETURNING id + """ # noqa: S608 + ).bindparams(**user_data) result = conn.execute(stmt) user_id = result.scalar() @@ -82,7 +98,7 @@ def test_populate_projects_to_jobs_during_migration( "Study associated to solver job:" """{ "id": "cd03450c-4c17-4c2c-85fd-0d951d7dcd5a", - "name": "solvers/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/releases/2.2.1/jobs/cd03450c-4c2c-85fd-0d951d7dcd5a", + "name": "solvers/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/releases/2.2.1/jobs/cd03450c-4c17-4c2c-85fd-0d951d7dcd5a", "inputs_checksum": "015ba4cd5cf00c511a8217deb65c242e3b15dc6ae4b1ecf94982d693887d9e8a", "created_at": "2025-01-27T13:12:58.676564Z" } @@ -105,8 +121,37 @@ def test_populate_projects_to_jobs_during_migration( prj_owner=user_id, ), ] + + client_default_column_values = { + # NOTE: columns with `server_default values` must not be added here + "type": ProjectType.STANDARD.value, + "workbench": {}, + "access_rights": {}, + "published": False, + "hidden": False, + "workspace_id": None, + } + + # NOTE: cannot use `projects` table directly here because it changes + # throughout time for prj in projects_data: - conn.execute(sa.insert(projects).values(prj)) + for key, value in client_default_column_values.items(): + prj.setdefault(key, value) + + for key, value in prj.items(): + if isinstance(value, dict): + prj[key] = json.dumps(value) + + columns = list(prj.keys()) + values_clause = ", ".join(f":{col}" for col in columns) + columns_clause = ", ".join(columns) + stmt = sa.text( + f""" + INSERT INTO projects ({columns_clause}) + VALUES ({values_clause}) + """ # noqa: S608 + ).bindparams(**prj) + conn.execute(stmt) # MIGRATE UPGRADE: this should populate simcore_postgres_database.cli.upgrade.callback("head") diff --git a/packages/postgres-database/tests/test_users.py b/packages/postgres-database/tests/test_users.py index 8bfe2814ada1..038f9a53fa64 100644 --- a/packages/postgres-database/tests/test_users.py +++ b/packages/postgres-database/tests/test_users.py @@ -3,35 +3,41 @@ # pylint: disable=unused-argument # pylint: disable=unused-variable +from collections.abc import Iterator from datetime import datetime, timedelta import pytest +import simcore_postgres_database.cli import sqlalchemy as sa -from aiopg.sa.connection import SAConnection -from aiopg.sa.result import ResultProxy, RowProxy +import sqlalchemy.engine +import sqlalchemy.exc from faker import Faker +from pytest_simcore.helpers import postgres_tools from pytest_simcore.helpers.faker_factories import random_user -from simcore_postgres_database.aiopg_errors import ( - InvalidTextRepresentation, - UniqueViolation, -) from simcore_postgres_database.models.users import UserRole, UserStatus, users +from simcore_postgres_database.utils_repos import ( + pass_or_acquire_connection, + transaction_context, +) from simcore_postgres_database.utils_users import ( UsersRepo, _generate_username_from_email, generate_alternative_username, ) +from sqlalchemy.exc import DBAPIError, IntegrityError +from sqlalchemy.ext.asyncio import AsyncEngine from sqlalchemy.sql import func @pytest.fixture -async def clean_users_db_table(connection: SAConnection): +async def clean_users_db_table(asyncpg_engine: AsyncEngine): yield - await connection.execute(users.delete()) + async with transaction_context(asyncpg_engine) as connection: + await connection.execute(users.delete()) async def test_user_status_as_pending( - connection: SAConnection, faker: Faker, clean_users_db_table: None + asyncpg_engine: AsyncEngine, faker: Faker, clean_users_db_table: None ): """Checks a bug where the expression @@ -51,10 +57,13 @@ async def test_user_status_as_pending( # tests that the database never stores the word "PENDING" data = random_user(faker, status="PENDING") assert data["status"] == "PENDING" - with pytest.raises(InvalidTextRepresentation) as err_info: - await connection.execute(users.insert().values(data)) + async with transaction_context(asyncpg_engine) as connection: + with pytest.raises(DBAPIError) as err_info: + await connection.execute(users.insert().values(data)) - assert 'invalid input value for enum userstatus: "PENDING"' in f"{err_info.value}" + assert ( + 'invalid input value for enum userstatus: "PENDING"' in f"{err_info.value}" + ) @pytest.mark.parametrize( @@ -66,27 +75,30 @@ async def test_user_status_as_pending( ) async def test_user_status_inserted_as_enum_or_int( status_value: UserStatus | str, - connection: SAConnection, + asyncpg_engine: AsyncEngine, faker: Faker, clean_users_db_table: None, ): # insert as `status_value` data = random_user(faker, status=status_value) assert data["status"] == status_value - user_id = await connection.scalar(users.insert().values(data).returning(users.c.id)) - # get as UserStatus.CONFIRMATION_PENDING - user = await ( - await connection.execute(users.select().where(users.c.id == user_id)) - ).first() - assert user + async with transaction_context(asyncpg_engine) as connection: + user_id = await connection.scalar( + users.insert().values(data).returning(users.c.id) + ) - assert UserStatus(user.status) == UserStatus.CONFIRMATION_PENDING - assert user.status == UserStatus.CONFIRMATION_PENDING + # get as UserStatus.CONFIRMATION_PENDING + result = await connection.execute(users.select().where(users.c.id == user_id)) + user = result.one_or_none() + assert user + + assert UserStatus(user.status) == UserStatus.CONFIRMATION_PENDING + assert user.status == UserStatus.CONFIRMATION_PENDING async def test_unique_username( - connection: SAConnection, faker: Faker, clean_users_db_table: None + asyncpg_engine: AsyncEngine, faker: Faker, clean_users_db_table: None ): data = random_user( faker, @@ -96,33 +108,39 @@ async def test_unique_username( first_name="Pedro", last_name="Crespo Valero", ) - user_id = await connection.scalar(users.insert().values(data).returning(users.c.id)) - user = await ( - await connection.execute(users.select().where(users.c.id == user_id)) - ).first() - assert user - - assert user.id == user_id - assert user.name == "pcrespov" - - # same name fails - data["email"] = faker.email() - with pytest.raises(UniqueViolation): + async with transaction_context(asyncpg_engine) as connection: + user_id = await connection.scalar( + users.insert().values(data).returning(users.c.id) + ) + result = await connection.execute(users.select().where(users.c.id == user_id)) + user = result.one_or_none() + assert user + + assert user.id == user_id + assert user.name == "pcrespov" + + async with transaction_context(asyncpg_engine) as connection: + # same name fails + data["email"] = faker.email() + with pytest.raises(IntegrityError): + await connection.scalar(users.insert().values(data).returning(users.c.id)) + + async with transaction_context(asyncpg_engine) as connection: + # generate new name + data["name"] = _generate_username_from_email(user.email) + data["email"] = faker.email() await connection.scalar(users.insert().values(data).returning(users.c.id)) - # generate new name - data["name"] = _generate_username_from_email(user.email) - data["email"] = faker.email() - await connection.scalar(users.insert().values(data).returning(users.c.id)) + async with transaction_context(asyncpg_engine) as connection: - # and another one - data["name"] = generate_alternative_username(data["name"]) - data["email"] = faker.email() - await connection.scalar(users.insert().values(data).returning(users.c.id)) + # and another one + data["name"] = generate_alternative_username(data["name"]) + data["email"] = faker.email() + await connection.scalar(users.insert().values(data).returning(users.c.id)) async def test_new_user( - connection: SAConnection, faker: Faker, clean_users_db_table: None + asyncpg_engine: AsyncEngine, faker: Faker, clean_users_db_table: None ): data = { "email": faker.email(), @@ -130,7 +148,8 @@ async def test_new_user( "status": UserStatus.ACTIVE, "expires_at": datetime.utcnow(), } - new_user = await UsersRepo.new_user(connection, **data) + repo = UsersRepo(asyncpg_engine) + new_user = await repo.new_user(**data) assert new_user.email == data["email"] assert new_user.status == data["status"] @@ -140,51 +159,205 @@ async def test_new_user( assert _generate_username_from_email(other_email) == new_user.name other_data = {**data, "email": other_email} - other_user = await UsersRepo.new_user(connection, **other_data) + other_user = await repo.new_user(**other_data) assert other_user.email != new_user.email assert other_user.name != new_user.name - assert await UsersRepo.get_email(connection, other_user.id) == other_user.email - assert await UsersRepo.get_role(connection, other_user.id) == other_user.role - assert ( - await UsersRepo.get_active_user_email(connection, other_user.id) - == other_user.email - ) + async with pass_or_acquire_connection(asyncpg_engine) as connection: + assert ( + await repo.get_email(connection, user_id=other_user.id) == other_user.email + ) + assert await repo.get_role(connection, user_id=other_user.id) == other_user.role + assert ( + await repo.get_active_user_email(connection, user_id=other_user.id) + == other_user.email + ) -async def test_trial_accounts(connection: SAConnection, clean_users_db_table: None): +async def test_trial_accounts(asyncpg_engine: AsyncEngine, clean_users_db_table: None): EXPIRATION_INTERVAL = timedelta(minutes=5) # creates trial user client_now = datetime.utcnow() - user_id: int | None = await connection.scalar( - users.insert() - .values( - **random_user( - status=UserStatus.ACTIVE, - # Using some magic from sqlachemy ... - expires_at=func.now() + EXPIRATION_INTERVAL, + async with transaction_context(asyncpg_engine) as connection: + user_id: int | None = await connection.scalar( + users.insert() + .values( + **random_user( + status=UserStatus.ACTIVE, + # Using some magic from sqlachemy ... + expires_at=func.now() + EXPIRATION_INTERVAL, + ) ) + .returning(users.c.id) ) - .returning(users.c.id) - ) - assert user_id + assert user_id - # check expiration date - result: ResultProxy = await connection.execute( - sa.select(users.c.status, users.c.created_at, users.c.expires_at).where( - users.c.id == user_id + # check expiration date + result = await connection.execute( + sa.select(users.c.status, users.c.created_at, users.c.expires_at).where( + users.c.id == user_id + ) ) + row = result.one_or_none() + assert row + assert row.created_at - client_now < timedelta( + minutes=1 + ), "Difference between server and client now should not differ much" + assert row.expires_at - row.created_at == EXPIRATION_INTERVAL + assert row.status == UserStatus.ACTIVE + + # sets user as expired + await connection.execute( + users.update() + .values(status=UserStatus.EXPIRED) + .where(users.c.id == user_id) + ) + + +@pytest.fixture +def sync_engine_with_migration( + sync_engine: sqlalchemy.engine.Engine, db_metadata: sa.MetaData +) -> Iterator[sqlalchemy.engine.Engine]: + # EXTENDS sync_engine fixture to include cleanup and prepare migration + + # cleanup tables + db_metadata.drop_all(sync_engine) + + # prepare migration upgrade + assert simcore_postgres_database.cli.discover.callback + assert simcore_postgres_database.cli.upgrade.callback + + dsn = sync_engine.url + simcore_postgres_database.cli.discover.callback( + user=dsn.username, + password=dsn.password, + host=dsn.host, + database=dsn.database, + port=dsn.port, ) - row: RowProxy | None = await result.first() - assert row - assert row.created_at - client_now < timedelta( - minutes=1 - ), "Difference between server and client now should not differ much" - assert row.expires_at - row.created_at == EXPIRATION_INTERVAL - assert row.status == UserStatus.ACTIVE - - # sets user as expired - await connection.execute( - users.update().values(status=UserStatus.EXPIRED).where(users.c.id == user_id) - ) + + yield sync_engine + + # cleanup tables + postgres_tools.force_drop_all_tables(sync_engine) + + +def test_users_secrets_migration_upgrade_downgrade( + sync_engine_with_migration: sqlalchemy.engine.Engine, faker: Faker +): + """Tests the migration script that moves password_hash from users to users_secrets table. + + + testing + packages/postgres-database/src/simcore_postgres_database/migration/versions/5679165336c8_new_users_secrets.py + + revision = "5679165336c8" + down_revision = "61b98a60e934" + + + NOTE: all statements in conn.execute(...) must be sa.text(...) since at that migration point the schemas of the + code models might not be the same + """ + assert simcore_postgres_database.cli.discover.callback + assert simcore_postgres_database.cli.upgrade.callback + assert simcore_postgres_database.cli.downgrade.callback + + # UPGRADE just one before 5679165336c8_new_users_secrets.py + simcore_postgres_database.cli.upgrade.callback("61b98a60e934") + + with sync_engine_with_migration.connect() as conn: + # Ensure the users_secrets table does NOT exist yet + with pytest.raises(sqlalchemy.exc.ProgrammingError) as exc_info: + conn.execute( + sa.select(sa.func.count()).select_from(sa.table("users_secrets")) + ).scalar() + assert "psycopg2.errors.UndefinedTable" in f"{exc_info.value}" + + # INSERT users with password hashes (emulates data in-place before migration) + users_data_with_hashed_password = [ + { + **random_user( + faker, + name="user_with_password_1", + email="user1@example.com", + role=UserRole.USER.value, + status=UserStatus.ACTIVE, + ), + "password_hash": "hashed_password_1", # noqa: S106 + }, + { + **random_user( + faker, + name="user_with_password_2", + email="user2@example.com", + role=UserRole.USER.value, + status=UserStatus.ACTIVE, + ), + "password_hash": "hashed_password_2", # noqa: S106 + }, + ] + + inserted_user_ids = [] + for user_data in users_data_with_hashed_password: + columns = ", ".join(user_data.keys()) + values_placeholders = ", ".join(f":{key}" for key in user_data) + user_id = conn.execute( + sa.text( + f"INSERT INTO users ({columns}) VALUES ({values_placeholders}) RETURNING id" # noqa: S608 + ), + user_data, + ).scalar() + inserted_user_ids.append(user_id) + + # Verify password hashes are in users table + result = conn.execute( + sa.text("SELECT id, password_hash FROM users WHERE id = ANY(:user_ids)"), + {"user_ids": inserted_user_ids}, + ).fetchall() + + password_hashes_before = {row.id: row.password_hash for row in result} + assert len(password_hashes_before) == 2 + assert password_hashes_before[inserted_user_ids[0]] == "hashed_password_1" + assert password_hashes_before[inserted_user_ids[1]] == "hashed_password_2" + + # MIGRATE UPGRADE: this should move password hashes to users_secrets + # packages/postgres-database/src/simcore_postgres_database/migration/versions/5679165336c8_new_users_secrets.py + simcore_postgres_database.cli.upgrade.callback("5679165336c8") + + with sync_engine_with_migration.connect() as conn: + # Verify users_secrets table exists and contains the password hashes + result = conn.execute( + sa.text("SELECT user_id, password_hash FROM users_secrets ORDER BY user_id") + ).fetchall() + + # Only users with non-null password hashes should be in users_secrets + assert len(result) == 2 + secrets_data = {row.user_id: row.password_hash for row in result} + assert secrets_data[inserted_user_ids[0]] == "hashed_password_1" + assert secrets_data[inserted_user_ids[1]] == "hashed_password_2" + + # Verify password_hash column is removed from users table + with pytest.raises(sqlalchemy.exc.ProgrammingError) as exc_info: + conn.execute(sa.text("SELECT password_hash FROM users")) + assert "psycopg2.errors.UndefinedColumn" in f"{exc_info.value}" + + # MIGRATE DOWNGRADE: this should move password hashes back to users + simcore_postgres_database.cli.downgrade.callback("61b98a60e934") + + with sync_engine_with_migration.connect() as conn: + # Verify users_secrets table no longer exists + with pytest.raises(sqlalchemy.exc.ProgrammingError) as exc_info: + conn.execute(sa.text("SELECT COUNT(*) FROM users_secrets")).scalar() + assert "psycopg2.errors.UndefinedTable" in f"{exc_info.value}" + + # Verify password hashes are back in users table + result = conn.execute( + sa.text("SELECT id, password_hash FROM users WHERE id = ANY(:user_ids)"), + {"user_ids": inserted_user_ids}, + ).fetchall() + + password_hashes_after = {row.id: row.password_hash for row in result} + assert len(password_hashes_after) == 2 + assert password_hashes_after[inserted_user_ids[0]] == "hashed_password_1" + assert password_hashes_after[inserted_user_ids[1]] == "hashed_password_2" diff --git a/packages/postgres-database/tests/test_users_details.py b/packages/postgres-database/tests/test_users_details.py index e4b6bfeb70fc..8c0f84d1b33e 100644 --- a/packages/postgres-database/tests/test_users_details.py +++ b/packages/postgres-database/tests/test_users_details.py @@ -257,15 +257,18 @@ async def test_create_and_link_user_from_pre_registration( # Invitation link is clicked and the user is created and linked to the pre-registration async with transaction_context(asyncpg_engine) as connection: # user gets created - new_user = await UsersRepo.new_user( + repo = UsersRepo(asyncpg_engine) + new_user = await repo.new_user( connection, email=pre_email, password_hash="123456", # noqa: S106 status=UserStatus.ACTIVE, expires_at=None, ) - await UsersRepo.link_and_update_user_from_pre_registration( - connection, new_user_id=new_user.id, new_user_email=new_user.email + await repo.link_and_update_user_from_pre_registration( + connection, + new_user_id=new_user.id, + new_user_email=new_user.email, ) # Verify the user was created and linked @@ -285,29 +288,32 @@ async def test_create_and_link_user_from_pre_registration( async def test_get_billing_details_from_pre_registration( asyncpg_engine: AsyncEngine, pre_registered_user: tuple[str, dict[str, Any]], + product: dict[str, Any], ): """Test that billing details can be retrieved from pre-registration data.""" pre_email, fake_pre_registration_data = pre_registered_user # Create the user async with transaction_context(asyncpg_engine) as connection: - new_user = await UsersRepo.new_user( + repo = UsersRepo(asyncpg_engine) + new_user = await repo.new_user( connection, email=pre_email, password_hash="123456", # noqa: S106 status=UserStatus.ACTIVE, expires_at=None, ) - await UsersRepo.link_and_update_user_from_pre_registration( - connection, new_user_id=new_user.id, new_user_email=new_user.email + await repo.link_and_update_user_from_pre_registration( + connection, + new_user_id=new_user.id, + new_user_email=new_user.email, ) # Get billing details - async with pass_or_acquire_connection(asyncpg_engine) as connection: - invoice_data = await UsersRepo.get_billing_details( - connection, user_id=new_user.id - ) - assert invoice_data is not None + invoice_data = await repo.get_billing_details( + user_id=new_user.id, product_name=product["name"] + ) + assert invoice_data is not None # Test UserAddress model conversion user_address = UserAddress.create_from_db(invoice_data) @@ -331,15 +337,18 @@ async def test_update_user_from_pre_registration( # Create the user and link to pre-registration async with transaction_context(asyncpg_engine) as connection: - new_user = await UsersRepo.new_user( + repo = UsersRepo(asyncpg_engine) + new_user = await repo.new_user( connection, email=pre_email, password_hash="123456", # noqa: S106 status=UserStatus.ACTIVE, expires_at=None, ) - await UsersRepo.link_and_update_user_from_pre_registration( - connection, new_user_id=new_user.id, new_user_email=new_user.email + await repo.link_and_update_user_from_pre_registration( + connection, + new_user_id=new_user.id, + new_user_email=new_user.email, ) # Update the user manually @@ -358,8 +367,11 @@ async def test_update_user_from_pre_registration( # Re-link the user to pre-registration, which should override manual updates async with transaction_context(asyncpg_engine) as connection: - await UsersRepo.link_and_update_user_from_pre_registration( - connection, new_user_id=new_user.id, new_user_email=new_user.email + repo = UsersRepo(asyncpg_engine) + await repo.link_and_update_user_from_pre_registration( + connection, + new_user_id=new_user.id, + new_user_email=new_user.email, ) result = await connection.execute( @@ -487,20 +499,24 @@ async def test_user_preregisters_for_multiple_products_with_different_outcomes( assert registrations[1].account_request_reviewed_by == product_owner_user["id"] assert registrations[1].account_request_reviewed_at is not None - # 3.Now create a user account with the approved pre-registration + # 3. Now create a user account and link ALL pre-registrations for this email async with transaction_context(asyncpg_engine) as connection: - new_user = await UsersRepo.new_user( + repo = UsersRepo(asyncpg_engine) + new_user = await repo.new_user( connection, email=user_email, password_hash="123456", # noqa: S106 status=UserStatus.ACTIVE, expires_at=None, ) - await UsersRepo.link_and_update_user_from_pre_registration( - connection, new_user_id=new_user.id, new_user_email=new_user.email + # Link all pre-registrations for this email, regardless of approval status or product + await repo.link_and_update_user_from_pre_registration( + connection, + new_user_id=new_user.id, + new_user_email=new_user.email, ) - # Verify both pre-registrations are linked to the new user + # Verify ALL pre-registrations for this email are linked to the user async with pass_or_acquire_connection(asyncpg_engine) as connection: result = await connection.execute( sa.select( @@ -515,5 +531,17 @@ async def test_user_preregisters_for_multiple_products_with_different_outcomes( registrations = result.fetchall() assert len(registrations) == 2 - # Both registrations should be linked to the same user, regardless of approval status - assert all(reg.user_id == new_user.id for reg in registrations) + # Both pre-registrations should be linked to the user, regardless of approval status + product1_reg = next( + reg for reg in registrations if reg.product_name == product1["name"] + ) + product2_reg = next( + reg for reg in registrations if reg.product_name == product2["name"] + ) + + assert product1_reg.user_id == new_user.id # Linked + assert product2_reg.user_id == new_user.id # Linked + + # Verify approval status is preserved independently of linking + assert product1_reg.account_request_status == AccountRequestStatus.APPROVED + assert product2_reg.account_request_status == AccountRequestStatus.REJECTED diff --git a/packages/postgres-database/tests/test_utils_comp_runs.py b/packages/postgres-database/tests/test_utils_comp_runs.py index dc18abd8395e..97f33bbdc18d 100644 --- a/packages/postgres-database/tests/test_utils_comp_runs.py +++ b/packages/postgres-database/tests/test_utils_comp_runs.py @@ -4,13 +4,14 @@ import pytest import sqlalchemy as sa +from faker import Faker from simcore_postgres_database.models.comp_runs import comp_runs from simcore_postgres_database.utils_comp_runs import get_latest_run_id_for_project from sqlalchemy.ext.asyncio import AsyncEngine @pytest.fixture -async def sample_comp_runs(asyncpg_engine: AsyncEngine): +async def sample_comp_runs(asyncpg_engine: AsyncEngine, faker: Faker): async with asyncpg_engine.begin() as conn: await conn.execute(sa.text("SET session_replication_role = replica;")) await conn.execute(sa.delete(comp_runs)) @@ -37,6 +38,7 @@ async def sample_comp_runs(asyncpg_engine: AsyncEngine): "metadata": None, "use_on_demand_clusters": False, "dag_adjacency_list": {}, + "collection_run_id": faker.uuid4(), }, { "run_id": 2, @@ -58,6 +60,7 @@ async def sample_comp_runs(asyncpg_engine: AsyncEngine): "metadata": None, "use_on_demand_clusters": False, "dag_adjacency_list": {}, + "collection_run_id": faker.uuid4(), }, { "run_id": 3, @@ -79,6 +82,7 @@ async def sample_comp_runs(asyncpg_engine: AsyncEngine): "metadata": None, "use_on_demand_clusters": False, "dag_adjacency_list": {}, + "collection_run_id": faker.uuid4(), }, { "run_id": 4, @@ -100,6 +104,7 @@ async def sample_comp_runs(asyncpg_engine: AsyncEngine): "metadata": None, "use_on_demand_clusters": False, "dag_adjacency_list": {}, + "collection_run_id": faker.uuid4(), }, ], ) diff --git a/packages/postgres-database/tests/test_utils_users.py b/packages/postgres-database/tests/test_utils_users.py index d4a7039f1f3e..0f61ba27ed9d 100644 --- a/packages/postgres-database/tests/test_utils_users.py +++ b/packages/postgres-database/tests/test_utils_users.py @@ -8,14 +8,13 @@ from typing import Any import pytest +import sqlalchemy as sa from faker import Faker -from pytest_simcore.helpers.faker_factories import ( - random_user, +from pytest_simcore.helpers.postgres_users import ( + insert_and_get_user_and_secrets_lifespan, ) -from pytest_simcore.helpers.postgres_tools import ( - insert_and_get_row_lifespan, -) -from simcore_postgres_database.models.users import UserRole, users +from simcore_postgres_database.models.users import UserRole +from simcore_postgres_database.models.users_secrets import users_secrets from simcore_postgres_database.utils_repos import ( pass_or_acquire_connection, ) @@ -28,24 +27,71 @@ async def user( faker: Faker, asyncpg_engine: AsyncEngine, ) -> AsyncIterable[dict[str, Any]]: - async with insert_and_get_row_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup + async with insert_and_get_user_and_secrets_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup asyncpg_engine, - table=users, - values=random_user( - faker, - role=faker.random_element(elements=UserRole), - ), - pk_col=users.c.id, - ) as row: - yield row + role=faker.random_element(elements=UserRole), + ) as user_and_secrets_row: + yield user_and_secrets_row async def test_users_repo_get(asyncpg_engine: AsyncEngine, user: dict[str, Any]): - repo = UsersRepo() + repo = UsersRepo(asyncpg_engine) async with pass_or_acquire_connection(asyncpg_engine) as connection: assert await repo.get_email(connection, user_id=user["id"]) == user["email"] assert await repo.get_role(connection, user_id=user["id"]) == user["role"] + assert ( + await repo.get_password_hash(connection, user_id=user["id"]) + == user["password_hash"] + ) + assert ( + await repo.get_active_user_email(connection, user_id=user["id"]) + == user["email"] + ) with pytest.raises(UserNotFoundInRepoError): await repo.get_role(connection, user_id=55) + with pytest.raises(UserNotFoundInRepoError): + await repo.get_email(connection, user_id=55) + with pytest.raises(UserNotFoundInRepoError): + await repo.get_password_hash(connection, user_id=55) + with pytest.raises(UserNotFoundInRepoError): + await repo.get_active_user_email(connection, user_id=55) + + +async def test_update_user_password_hash_updates_modified_column( + asyncpg_engine: AsyncEngine, user: dict[str, Any], faker: Faker +): + repo = UsersRepo(asyncpg_engine) + + async with pass_or_acquire_connection(asyncpg_engine) as connection: + # Get initial modified timestamp + result = await connection.execute( + sa.select(users_secrets.c.modified).where( + users_secrets.c.user_id == user["id"] + ) + ) + initial_modified = result.scalar_one() + + # Update password hash + new_password_hash = faker.password() + await repo.update_user_password_hash( + connection, user_id=user["id"], password_hash=new_password_hash + ) + + # Get updated modified timestamp + result = await connection.execute( + sa.select(users_secrets.c.modified).where( + users_secrets.c.user_id == user["id"] + ) + ) + updated_modified = result.scalar_one() + + # Verify modified timestamp changed + assert updated_modified > initial_modified + + # Verify password hash was actually updated + assert ( + await repo.get_password_hash(connection, user_id=user["id"]) + == new_password_hash + ) diff --git a/packages/pytest-simcore/pyproject.toml b/packages/pytest-simcore/pyproject.toml deleted file mode 100644 index 4bffe2cb6a8f..000000000000 --- a/packages/pytest-simcore/pyproject.toml +++ /dev/null @@ -1,9 +0,0 @@ -[project] -name = "pytest-simcore" -version = "0.1.0" -requires-python = ">=3.11" -dependencies = [ - "fastapi[standard]>=0.115.12", - "python-socketio>=5.12.1", - "uvicorn>=0.34.0", -] diff --git a/packages/pytest-simcore/src/pytest_simcore/__init__.py b/packages/pytest-simcore/src/pytest_simcore/__init__.py index 8716d997ef21..7a7da935aadc 100644 --- a/packages/pytest-simcore/src/pytest_simcore/__init__.py +++ b/packages/pytest-simcore/src/pytest_simcore/__init__.py @@ -25,7 +25,7 @@ def keep_docker_up(request: pytest.FixtureRequest) -> bool: return flag -@pytest.fixture +@pytest.fixture(scope="session") def is_pdb_enabled(request: pytest.FixtureRequest): """Returns true if tests are set to use interactive debugger, i.e. --pdb""" options = request.config.option diff --git a/packages/pytest-simcore/src/pytest_simcore/asyncio_event_loops.py b/packages/pytest-simcore/src/pytest_simcore/asyncio_event_loops.py new file mode 100644 index 000000000000..a08a48f0b106 --- /dev/null +++ b/packages/pytest-simcore/src/pytest_simcore/asyncio_event_loops.py @@ -0,0 +1,63 @@ +""" +Our choice of plugin to test asyncio functionality is pytest-asyncio + +Some other pytest plugins, e.g. pytest-aiohttp, define their own event loop +policies and event loops, which can conflict with pytest-asyncio. + +This files unifies the event loop policy and event loop used by pytest-asyncio throughout +all the tests in this repository. + +""" + +import asyncio + +import pytest +import uvloop + + +@pytest.fixture(scope="session") +def event_loop_policy(): + """Override the event loop policy to use uvloop which is the one we use in production + + SEE https://pytest-asyncio.readthedocs.io/en/stable/how-to-guides/uvloop.html + """ + return uvloop.EventLoopPolicy() + + +async def test_using_uvloop_event_loop(): + """Tests that `pytest_simcore.asyncio_event_loops` plugin is used and has an effect + + Manually import and add it your test-suite to run this test. + """ + assert isinstance(asyncio.get_event_loop_policy(), uvloop.EventLoopPolicy) + + +@pytest.fixture +async def loop() -> asyncio.AbstractEventLoop: + """Override the event loop inside `aiohttp.pytest_plugin` with the one from `pytest-asyncio`. + + This provides the necessary fixtures to use pytest-asyncio with aiohttp!!! + + USAGE: + + pytest_plugins = [ + "aiohttp.pytest_plugin", # No need to install pytest-aiohttp separately + ] + + + ERRORS: + Otherwise error like this will be raised: + + > if connector._loop is not loop: + > raise RuntimeError("Session and connector has to use same event loop") + E RuntimeError: Session and connector has to use same event loop + + .venv/lib/python3.11/site-packages/aiohttp/client.py:375: RuntimeError + + > if connector._loop is not loop: + > raise RuntimeError("Session and connector has to use same event loop") + >E RuntimeError: Session and connector has to use same event loop + + .venv/lib/python3.11/site-packages/aiohttp/client.py:375: RuntimeError + """ + return asyncio.get_running_loop() diff --git a/packages/pytest-simcore/src/pytest_simcore/aws_ec2_service.py b/packages/pytest-simcore/src/pytest_simcore/aws_ec2_service.py index f971ef9b8f7d..efca123aaa30 100644 --- a/packages/pytest-simcore/src/pytest_simcore/aws_ec2_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/aws_ec2_service.py @@ -5,7 +5,7 @@ import contextlib import datetime import random -from collections.abc import AsyncIterator, Callable +from collections.abc import AsyncIterator, Awaitable, Callable from typing import cast import aioboto3 @@ -60,45 +60,79 @@ async def aws_vpc_id( print(f"<-- Deleted Vpc in AWS with {vpc_id=}") -@pytest.fixture(scope="session") -def subnet_cidr_block() -> str: - return "10.0.1.0/24" +@pytest.fixture +def create_subnet_cidr_block(faker: Faker) -> Callable[[], str]: + # Keep track of used subnet numbers to avoid overlaps + used_subnets: set[int] = set() + + def _() -> str: + # Generate subnet CIDR blocks within the VPC range 10.0.0.0/16 + # Using /24 subnets (10.0.X.0/24) where X is between 1-255 + while True: + subnet_number = faker.random_int(min=1, max=255) + if subnet_number not in used_subnets: + used_subnets.add(subnet_number) + return f"10.0.{subnet_number}.0/24" + + return _ @pytest.fixture -async def aws_subnet_id( +def subnet_cidr_block(create_subnet_cidr_block: Callable[[], str]) -> str: + return create_subnet_cidr_block() + + +@pytest.fixture +async def create_aws_subnet_id( aws_vpc_id: str, ec2_client: EC2Client, - subnet_cidr_block: str, -) -> AsyncIterator[str]: - subnet = await ec2_client.create_subnet( - CidrBlock=subnet_cidr_block, VpcId=aws_vpc_id - ) - assert "Subnet" in subnet - assert "SubnetId" in subnet["Subnet"] - subnet_id = subnet["Subnet"]["SubnetId"] - print(f"--> Created Subnet in AWS with {subnet_id=}") + create_subnet_cidr_block: Callable[[], str], +) -> AsyncIterator[Callable[..., Awaitable[str]]]: + created_subnet_ids: set[str] = set() - yield subnet_id + async def _(cidr_override: str | None = None) -> str: + subnet = await ec2_client.create_subnet( + CidrBlock=cidr_override or create_subnet_cidr_block(), VpcId=aws_vpc_id + ) + assert "Subnet" in subnet + assert "SubnetId" in subnet["Subnet"] + subnet_id = subnet["Subnet"]["SubnetId"] + print(f"--> Created Subnet in AWS with {subnet_id=}") + created_subnet_ids.add(subnet_id) + return subnet_id + yield _ + + # cleanup # all the instances in the subnet must be terminated before that works - instances_in_subnet = await ec2_client.describe_instances( - Filters=[{"Name": "subnet-id", "Values": [subnet_id]}] - ) - if instances_in_subnet["Reservations"]: - print(f"--> terminating {len(instances_in_subnet)} instances in subnet") - await ec2_client.terminate_instances( - InstanceIds=[ - instance["Instances"][0]["InstanceId"] # type: ignore - for instance in instances_in_subnet["Reservations"] - ] + for subnet_id in created_subnet_ids: + instances_in_subnet = await ec2_client.describe_instances( + Filters=[{"Name": "subnet-id", "Values": [subnet_id]}] ) - print(f"<-- terminated {len(instances_in_subnet)} instances in subnet") + if instances_in_subnet["Reservations"]: + print(f"--> terminating {len(instances_in_subnet)} instances in subnet") + await ec2_client.terminate_instances( + InstanceIds=[ + instance["Instances"][0]["InstanceId"] # type: ignore + for instance in instances_in_subnet["Reservations"] + ] + ) + print(f"<-- terminated {len(instances_in_subnet)} instances in subnet") - await ec2_client.delete_subnet(SubnetId=subnet_id) - subnets = await ec2_client.describe_subnets() - print(f"<-- Deleted Subnet in AWS with {subnet_id=}") - print(f"current {subnets=}") + await ec2_client.delete_subnet(SubnetId=subnet_id) + subnets = await ec2_client.describe_subnets() + print(f"<-- Deleted Subnet in AWS with {subnet_id=}") + print(f"current {subnets=}") + + +@pytest.fixture +async def aws_subnet_id( + aws_vpc_id: str, + ec2_client: EC2Client, + subnet_cidr_block: str, + create_aws_subnet_id: Callable[[], Awaitable[str]], +) -> str: + return await create_aws_subnet_id() @pytest.fixture @@ -133,7 +167,7 @@ def _creator(**overrides) -> EC2InstanceData: return EC2InstanceData( **( { - "launch_time": faker.date_time(tzinfo=datetime.timezone.utc), + "launch_time": faker.date_time(tzinfo=datetime.UTC), "id": faker.uuid4(), "aws_private_dns": f"ip-{faker.ipv4().replace('.', '-')}.ec2.internal", "aws_public_ip": faker.ipv4(), diff --git a/packages/pytest-simcore/src/pytest_simcore/celery_library_mocks.py b/packages/pytest-simcore/src/pytest_simcore/celery_library_mocks.py new file mode 100644 index 000000000000..c027bc0cbd46 --- /dev/null +++ b/packages/pytest-simcore/src/pytest_simcore/celery_library_mocks.py @@ -0,0 +1,96 @@ +# pylint: disable=redefined-outer-name + +from collections.abc import Callable + +import pytest +from faker import Faker +from pytest_mock import MockerFixture, MockType +from servicelib.celery.models import TaskStatus, TaskUUID +from servicelib.celery.task_manager import Task, TaskManager + +_faker = Faker() + + +@pytest.fixture +def submit_task_return_value() -> TaskUUID: + return TaskUUID(_faker.uuid4()) + + +@pytest.fixture +def cancel_task_return_value() -> None: + return None + + +@pytest.fixture +def get_task_result_return_value() -> dict: + return {"result": "example"} + + +@pytest.fixture +def get_task_status_return_value() -> TaskStatus: + example = TaskStatus.model_json_schema()["examples"][0] + return TaskStatus.model_validate(example) + + +@pytest.fixture +def list_tasks_return_value() -> list[Task]: + examples = Task.model_json_schema()["examples"] + assert len(examples) > 0 + return [Task.model_validate(example) for example in examples] + + +@pytest.fixture +def set_task_progress_return_value() -> None: + return None + + +@pytest.fixture +def mock_task_manager_object( + mocker: MockerFixture, + submit_task_return_value: TaskUUID, + cancel_task_return_value: None, + get_task_result_return_value: dict, + get_task_status_return_value: TaskStatus, + list_tasks_return_value: list[Task], + set_task_progress_return_value: None, +) -> MockType: + """ + Returns a TaskManager mock with overridable return values for each method. + If a return value is an Exception, the method will raise it. + """ + mock = mocker.Mock(spec=TaskManager) + + def _set_return_or_raise(method, value): + if isinstance(value, Exception): + method.side_effect = lambda *a, **kw: (_ for _ in ()).throw(value) + else: + method.return_value = value + + _set_return_or_raise(mock.submit_task, submit_task_return_value) + _set_return_or_raise(mock.cancel_task, cancel_task_return_value) + _set_return_or_raise(mock.get_task_result, get_task_result_return_value) + _set_return_or_raise(mock.get_task_status, get_task_status_return_value) + _set_return_or_raise(mock.list_tasks, list_tasks_return_value) + _set_return_or_raise(mock.set_task_progress, set_task_progress_return_value) + return mock + + +@pytest.fixture +def mock_task_manager_object_raising_factory( + mocker: MockerFixture, +) -> Callable[[Exception], MockType]: + def _factory(task_manager_exception: Exception) -> MockType: + mock = mocker.Mock(spec=TaskManager) + + def _raise_exc(*args, **kwargs): + raise task_manager_exception + + mock.submit_task.side_effect = _raise_exc + mock.cancel_task.side_effect = _raise_exc + mock.get_task_result.side_effect = _raise_exc + mock.get_task_status.side_effect = _raise_exc + mock.list_tasks.side_effect = _raise_exc + mock.set_task_progress.side_effect = _raise_exc + return mock + + return _factory diff --git a/packages/pytest-simcore/src/pytest_simcore/db_entries_mocks.py b/packages/pytest-simcore/src/pytest_simcore/db_entries_mocks.py index 15f28daf3162..1b1ec0ff762e 100644 --- a/packages/pytest-simcore/src/pytest_simcore/db_entries_mocks.py +++ b/packages/pytest-simcore/src/pytest_simcore/db_entries_mocks.py @@ -3,6 +3,7 @@ # pylint:disable=redefined-outer-name # pylint:disable=no-value-for-parameter +import contextlib from collections.abc import AsyncIterator, Awaitable, Callable, Iterator from typing import Any from uuid import uuid4 @@ -10,65 +11,77 @@ import pytest import sqlalchemy as sa from faker import Faker +from models_library.products import ProductName from models_library.projects import ProjectAtDB, ProjectID from models_library.projects_nodes_io import NodeID from simcore_postgres_database.models.comp_pipeline import StateType, comp_pipeline from simcore_postgres_database.models.comp_tasks import comp_tasks +from simcore_postgres_database.models.products import products from simcore_postgres_database.models.projects import ProjectType, projects -from simcore_postgres_database.models.users import UserRole, UserStatus, users +from simcore_postgres_database.models.projects_to_products import projects_to_products +from simcore_postgres_database.models.services import services_access_rights +from simcore_postgres_database.models.users import UserRole, UserStatus from simcore_postgres_database.utils_projects_nodes import ( ProjectNodeCreate, ProjectNodesRepo, ) from sqlalchemy.ext.asyncio import AsyncEngine +from .helpers.postgres_tools import insert_and_get_row_lifespan +from .helpers.postgres_users import sync_insert_and_get_user_and_secrets_lifespan + @pytest.fixture() def create_registered_user( - postgres_db: sa.engine.Engine, faker: Faker + postgres_db: sa.engine.Engine, ) -> Iterator[Callable[..., dict]]: + """Fixture to create a registered user with secrets in the database.""" created_user_ids = [] - def creator(**user_kwargs) -> dict[str, Any]: - with postgres_db.connect() as con: - # removes all users before continuing - user_config = { - "id": len(created_user_ids) + 1, - "name": faker.name(), - "email": faker.email(), - "password_hash": faker.password(), - "status": UserStatus.ACTIVE, - "role": UserRole.USER, - } - user_config.update(user_kwargs) + with contextlib.ExitStack() as stack: - con.execute( - users.insert().values(user_config).returning(sa.literal_column("*")) + def _(**user_kwargs) -> dict[str, Any]: + user_id = len(created_user_ids) + 1 + user = stack.enter_context( + sync_insert_and_get_user_and_secrets_lifespan( + postgres_db, + status=UserStatus.ACTIVE, + role=UserRole.USER, + id=user_id, + **user_kwargs, + ) ) - # this is needed to get the primary_gid correctly - result = con.execute( - sa.select(users).where(users.c.id == user_config["id"]) - ) - user = result.first() - assert user + print(f"--> created {user=}") + assert user["id"] == user_id created_user_ids.append(user["id"]) - return dict(user._asdict()) + return user - yield creator + yield _ - with postgres_db.connect() as con: - con.execute(users.delete().where(users.c.id.in_(created_user_ids))) print(f"<-- deleted users {created_user_ids=}") @pytest.fixture -async def project( - sqlalchemy_async_engine: AsyncEngine, faker: Faker +async def with_product( + sqlalchemy_async_engine: AsyncEngine, product: dict[str, Any] +) -> AsyncIterator[dict[str, Any]]: + async with insert_and_get_row_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup + sqlalchemy_async_engine, + table=products, + values=product, + pk_col=products.c.name, + ) as created_product: + yield created_product + + +@pytest.fixture +async def create_project( + sqlalchemy_async_engine: AsyncEngine, faker: Faker, product_name: ProductName ) -> AsyncIterator[Callable[..., Awaitable[ProjectAtDB]]]: created_project_ids: list[str] = [] - async def creator( + async def _( user: dict[str, Any], *, project_nodes_overrides: dict[str, Any] | None = None, @@ -108,15 +121,24 @@ async def creator( await project_nodes_repo.add( con, nodes=[ - ProjectNodeCreate(node_id=NodeID(node_id), **default_node_config) - for node_id in inserted_project.workbench + ProjectNodeCreate( + node_id=NodeID(node_id), + **(default_node_config | node_data.model_dump(mode="json")), + ) + for node_id, node_data in inserted_project.workbench.items() ], ) + await con.execute( + projects_to_products.insert().values( + project_uuid=f"{inserted_project.uuid}", + product_name=product_name, + ) + ) print(f"--> created {inserted_project=}") created_project_ids.append(f"{inserted_project.uuid}") return inserted_project - yield creator + yield _ # cleanup async with sqlalchemy_async_engine.begin() as con: @@ -127,18 +149,20 @@ async def creator( @pytest.fixture -def pipeline(postgres_db: sa.engine.Engine) -> Iterator[Callable[..., dict[str, Any]]]: +async def create_pipeline( + sqlalchemy_async_engine: AsyncEngine, +) -> AsyncIterator[Callable[..., Awaitable[dict[str, Any]]]]: created_pipeline_ids: list[str] = [] - def creator(**pipeline_kwargs) -> dict[str, Any]: + async def _(**pipeline_kwargs) -> dict[str, Any]: pipeline_config = { "project_id": f"{uuid4()}", "dag_adjacency_list": {}, "state": StateType.NOT_STARTED, } pipeline_config.update(**pipeline_kwargs) - with postgres_db.connect() as conn: - result = conn.execute( + async with sqlalchemy_async_engine.begin() as conn: + result = await conn.execute( comp_pipeline.insert() .values(**pipeline_config) .returning(sa.literal_column("*")) @@ -148,11 +172,11 @@ def creator(**pipeline_kwargs) -> dict[str, Any]: created_pipeline_ids.append(new_pipeline["project_id"]) return new_pipeline - yield creator + yield _ # cleanup - with postgres_db.connect() as conn: - conn.execute( + async with sqlalchemy_async_engine.begin() as conn: + await conn.execute( comp_pipeline.delete().where( comp_pipeline.c.project_id.in_(created_pipeline_ids) ) @@ -160,13 +184,15 @@ def creator(**pipeline_kwargs) -> dict[str, Any]: @pytest.fixture -def comp_task(postgres_db: sa.engine.Engine) -> Iterator[Callable[..., dict[str, Any]]]: +async def create_comp_task( + sqlalchemy_async_engine: AsyncEngine, +) -> AsyncIterator[Callable[..., Awaitable[dict[str, Any]]]]: created_task_ids: list[int] = [] - def creator(project_id: ProjectID, **task_kwargs) -> dict[str, Any]: + async def _(project_id: ProjectID, **task_kwargs) -> dict[str, Any]: task_config = {"project_id": f"{project_id}"} | task_kwargs - with postgres_db.connect() as conn: - result = conn.execute( + async with sqlalchemy_async_engine.begin() as conn: + result = await conn.execute( comp_tasks.insert() .values(**task_config) .returning(sa.literal_column("*")) @@ -176,10 +202,81 @@ def creator(project_id: ProjectID, **task_kwargs) -> dict[str, Any]: created_task_ids.append(new_task["task_id"]) return new_task - yield creator + yield _ # cleanup - with postgres_db.connect() as conn: - conn.execute( + async with sqlalchemy_async_engine.begin() as conn: + await conn.execute( comp_tasks.delete().where(comp_tasks.c.task_id.in_(created_task_ids)) ) + + +@pytest.fixture +def grant_service_access_rights( + postgres_db: sa.engine.Engine, +) -> Iterator[Callable[..., dict[str, Any]]]: + """Fixture to grant access rights on a service for a given group. + + Creates a row in the services_access_rights table with the provided parameters and cleans up after the test. + """ + created_entries: list[tuple[str, str, int, str]] = [] + + def _( + *, + service_key: str, + service_version: str, + group_id: int = 1, + product_name: str = "osparc", + execute_access: bool = True, + write_access: bool = False, + ) -> dict[str, Any]: + values = { + "key": service_key, + "version": service_version, + "gid": group_id, + "product_name": product_name, + "execute_access": execute_access, + "write_access": write_access, + } + + # Directly use SQLAlchemy to insert and retrieve the row + with postgres_db.begin() as conn: + # Insert the row + conn.execute(services_access_rights.insert().values(**values)) + + # Retrieve the inserted row + result = conn.execute( + sa.select(services_access_rights).where( + sa.and_( + services_access_rights.c.key == service_key, + services_access_rights.c.version == service_version, + services_access_rights.c.gid == group_id, + services_access_rights.c.product_name == product_name, + ) + ) + ) + row = result.one() + + # Track the entry for cleanup + created_entries.append( + (service_key, service_version, group_id, product_name) + ) + + # Convert row to dict + return dict(row._asdict()) + + yield _ + + # Cleanup all created entries + with postgres_db.begin() as conn: + for key, version, gid, product in created_entries: + conn.execute( + services_access_rights.delete().where( + sa.and_( + services_access_rights.c.key == key, + services_access_rights.c.version == version, + services_access_rights.c.gid == gid, + services_access_rights.c.product_name == product, + ) + ) + ) diff --git a/packages/pytest-simcore/src/pytest_simcore/docker_registry.py b/packages/pytest-simcore/src/pytest_simcore/docker_registry.py index 84b4d1e4b24a..a5b411947f12 100644 --- a/packages/pytest-simcore/src/pytest_simcore/docker_registry.py +++ b/packages/pytest-simcore/src/pytest_simcore/docker_registry.py @@ -15,6 +15,7 @@ import docker import jsonschema import pytest +import pytest_asyncio import tenacity from pytest_simcore.helpers.logging_tools import log_context from pytest_simcore.helpers.typing_env import EnvVarsDict @@ -22,35 +23,45 @@ from .helpers.host import get_localhost_ip -log = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) @pytest.fixture(scope="session") def docker_registry(keep_docker_up: bool) -> Iterator[str]: + """sets up and runs a docker registry container locally and returns its URL""" + yield from _docker_registry_impl(keep_docker_up, registry_version="3") + + +@pytest.fixture(scope="session") +def docker_registry_v2() -> Iterator[str]: + """sets up and runs a docker registry v2 container locally and returns its URL""" + yield from _docker_registry_impl(keep_docker_up=False, registry_version="2") + + +def _docker_registry_impl(keep_docker_up: bool, registry_version: str) -> Iterator[str]: """sets up and runs a docker registry container locally and returns its URL""" # run the registry outside of the stack docker_client = docker.from_env() # try to login to private registry host = "127.0.0.1" - port = 5000 + port = 5000 if registry_version == "3" else 5001 url = f"{host}:{port}" + container_name = f"pytest_registry_v{registry_version}" + volume_name = f"pytest_registry_v{registry_version}_data" + container = None try: docker_client.login(registry=url, username="simcore") - container = docker_client.containers.list(filters={"name": "pytest_registry"})[ - 0 - ] + container = docker_client.containers.list(filters={"name": container_name})[0] print("Warning: docker registry is already up!") except Exception: # pylint: disable=broad-except container = docker_client.containers.run( - "registry:2", - ports={"5000": "5000"}, - name="pytest_registry", + f"registry:{registry_version}", + ports={"5000": port}, + name=container_name, environment=["REGISTRY_STORAGE_DELETE_ENABLED=true"], restart_policy={"Name": "always"}, - volumes={ - "pytest_registry_data": {"bind": "/var/lib/registry", "mode": "rw"} - }, + volumes={volume_name: {"bind": "/var/lib/registry", "mode": "rw"}}, detach=True, ) @@ -63,7 +74,7 @@ def docker_registry(keep_docker_up: bool) -> Iterator[str]: docker_client.login(registry=url, username="simcore") # tag the image repo = url + "/hello-world:dev" - assert hello_world_image.tag(repo) == True + assert hello_world_image.tag(repo) # push the image to the private registry docker_client.images.push(repo) # wipe the images @@ -79,9 +90,9 @@ def docker_registry(keep_docker_up: bool) -> Iterator[str]: os.environ["REGISTRY_SSL"] = "False" os.environ["REGISTRY_AUTH"] = "False" # the registry URL is how to access from the container (e.g. for accessing the API) - os.environ["REGISTRY_URL"] = f"{get_localhost_ip()}:5000" + os.environ["REGISTRY_URL"] = f"{get_localhost_ip()}:{port}" # the registry PATH is how the docker engine shall access the images (usually same as REGISTRY_URL but for testing) - os.environ["REGISTRY_PATH"] = "127.0.0.1:5000" + os.environ["REGISTRY_PATH"] = f"127.0.0.1:{port}" os.environ["REGISTRY_USER"] = "simcore" os.environ["REGISTRY_PW"] = "" @@ -124,7 +135,7 @@ def registry_settings( @tenacity.retry( wait=tenacity.wait_fixed(2), stop=tenacity.stop_after_delay(20), - before_sleep=tenacity.before_sleep_log(log, logging.INFO), + before_sleep=tenacity.before_sleep_log(_logger, logging.INFO), reraise=True, ) def wait_till_registry_is_responsive(url: str) -> bool: @@ -136,7 +147,7 @@ def wait_till_registry_is_responsive(url: str) -> bool: # ********************************************************* Services *************************************** -def _pull_push_service( +async def _pull_push_service( pull_key: str, tag: str, new_registry: str, @@ -145,40 +156,45 @@ def _pull_push_service( ) -> dict[str, Any]: client = docker.from_env() # pull image from original location - print(f"Pulling {pull_key}:{tag} ...") - image = client.images.pull(pull_key, tag=tag) - assert image, f"image {pull_key}:{tag} could NOT be pulled!" + with log_context(logging.INFO, msg=f"Pulling {pull_key}:{tag} ..."): + image = client.images.pull(pull_key, tag=tag) + assert image, f"image {pull_key}:{tag} could NOT be pulled!" # get io.simcore.* labels image_labels: dict = dict(image.labels) if owner_email: - print(f"Overriding labels to take ownership as {owner_email} ...") - # By overriding these labels, user owner_email gets ownership of the service - # and the catalog service automatically gives full access rights for testing it - # otherwise it does not even get read rights - - image_labels.update({"io.simcore.contact": f'{{"contact": "{owner_email}"}}'}) - image_labels.update( - { - "io.simcore.authors": f'{{"authors": [{{"name": "Tester", "email": "{owner_email}", "affiliation": "IT\'IS Foundation"}}] }}' - } - ) - image_labels.update({"maintainer": f"{owner_email}"}) - - df_path = Path("Dockerfile").resolve() - df_path.write_text(f"FROM {pull_key}:{tag}") + with log_context( + logging.INFO, + msg=f"Overriding labels to take ownership as {owner_email} ...", + ): + # By overriding these labels, user owner_email gets ownership of the service + # and the catalog service automatically gives full access rights for testing it + # otherwise it does not even get read rights - try: - # Rebuild to override image labels AND re-tag - image2, _ = client.images.build( - path=str(df_path.parent), labels=image_labels, tag=f"{tag}-owned" + image_labels.update( + {"io.simcore.contact": f'{{"contact": "{owner_email}"}}'} + ) + image_labels.update( + { + "io.simcore.authors": f'{{"authors": [{{"name": "Tester", "email": "{owner_email}", "affiliation": "IT\'IS Foundation"}}] }}' + } ) - print(json.dumps(image2.labels, indent=2)) - image = image2 + image_labels.update({"maintainer": f"{owner_email}"}) - finally: - df_path.unlink() + df_path = Path("Dockerfile").resolve() + df_path.write_text(f"FROM {pull_key}:{tag}") + + try: + # Rebuild to override image labels AND re-tag + image2, _ = client.images.build( + path=str(df_path.parent), labels=image_labels, tag=f"{tag}-owned" + ) + print(json.dumps(image2.labels, indent=2)) + image = image2 + + finally: + df_path.unlink() assert image_labels io_simcore_labels = { @@ -195,11 +211,16 @@ def _pull_push_service( new_image_tag = ( f"{new_registry}/{io_simcore_labels['key']}:{io_simcore_labels['version']}" ) - assert image.tag(new_image_tag) == True + assert image.tag(new_image_tag) # push the image to the new location - print(f"Pushing {pull_key}:{tag} -> {new_image_tag}...") - client.images.push(new_image_tag) + async with aiodocker.Docker() as client: + await client.images.push(new_image_tag) + # with log_context( + # logging.INFO, + # msg=f"Pushing {pull_key}:{tag} -> {new_image_tag} ...", + # ): + # client.images.push(new_image_tag) # return image io.simcore.* labels image_labels = dict(image.labels) @@ -212,10 +233,10 @@ def _pull_push_service( } -@pytest.fixture(scope="session") +@pytest_asyncio.fixture(scope="session", loop_scope="session") def docker_registry_image_injector( docker_registry: str, node_meta_schema: dict -) -> Callable[..., dict[str, Any]]: +) -> Callable[[str, str, str | None], Awaitable[dict[str, Any]]]: def inject_image( source_image_repo: str, source_image_tag: str, owner_email: str | None = None ): @@ -231,29 +252,33 @@ def inject_image( @pytest.fixture -def osparc_service( +async def osparc_service( docker_registry: str, node_meta_schema: dict, service_repo: str, service_tag: str ) -> dict[str, Any]: """pulls the service from service_repo:service_tag and pushes to docker_registry using the oSparc node meta schema NOTE: 'service_repo' and 'service_tag' defined as parametrization """ - return _pull_push_service( + return await _pull_push_service( service_repo, service_tag, docker_registry, node_meta_schema ) -@pytest.fixture(scope="session") -def sleeper_service(docker_registry: str, node_meta_schema: dict) -> dict[str, Any]: +@pytest_asyncio.fixture(scope="session", loop_scope="session") +async def sleeper_service( + docker_registry: str, node_meta_schema: dict +) -> dict[str, Any]: """Adds a itisfoundation/sleeper in docker registry""" - return _pull_push_service( + return await _pull_push_service( "itisfoundation/sleeper", "1.0.0", docker_registry, node_meta_schema ) -@pytest.fixture(scope="session") -def jupyter_service(docker_registry: str, node_meta_schema: dict) -> dict[str, Any]: +@pytest_asyncio.fixture(scope="session", loop_scope="session") +async def jupyter_service( + docker_registry: str, node_meta_schema: dict +) -> dict[str, Any]: """Adds a itisfoundation/jupyter-base-notebook in docker registry""" - return _pull_push_service( + return await _pull_push_service( "itisfoundation/jupyter-base-notebook", "2.13.0", docker_registry, @@ -261,20 +286,20 @@ def jupyter_service(docker_registry: str, node_meta_schema: dict) -> dict[str, A ) -@pytest.fixture(scope="session", params=["2.0.7"]) +@pytest_asyncio.fixture(scope="session", loop_scope="session", params=["2.0.7"]) def dy_static_file_server_version(request: pytest.FixtureRequest): return request.param -@pytest.fixture(scope="session") -def dy_static_file_server_service( +@pytest_asyncio.fixture(scope="session", loop_scope="session") +async def dy_static_file_server_service( docker_registry: str, node_meta_schema: dict, dy_static_file_server_version: str ) -> dict[str, Any]: """ Adds the below service in docker registry itisfoundation/dy-static-file-server """ - return _pull_push_service( + return await _pull_push_service( "itisfoundation/dy-static-file-server", dy_static_file_server_version, docker_registry, @@ -282,15 +307,15 @@ def dy_static_file_server_service( ) -@pytest.fixture(scope="session") -def dy_static_file_server_dynamic_sidecar_service( +@pytest_asyncio.fixture(scope="session", loop_scope="session") +async def dy_static_file_server_dynamic_sidecar_service( docker_registry: str, node_meta_schema: dict, dy_static_file_server_version: str ) -> dict[str, Any]: """ Adds the below service in docker registry itisfoundation/dy-static-file-server-dynamic-sidecar """ - return _pull_push_service( + return await _pull_push_service( "itisfoundation/dy-static-file-server-dynamic-sidecar", dy_static_file_server_version, docker_registry, @@ -298,15 +323,15 @@ def dy_static_file_server_dynamic_sidecar_service( ) -@pytest.fixture(scope="session") -def dy_static_file_server_dynamic_sidecar_compose_spec_service( +@pytest_asyncio.fixture(scope="session", loop_scope="session") +async def dy_static_file_server_dynamic_sidecar_compose_spec_service( docker_registry: str, node_meta_schema: dict, dy_static_file_server_version: str ) -> dict[str, Any]: """ Adds the below service in docker registry itisfoundation/dy-static-file-server-dynamic-sidecar-compose-spec """ - return _pull_push_service( + return await _pull_push_service( "itisfoundation/dy-static-file-server-dynamic-sidecar-compose-spec", dy_static_file_server_version, docker_registry, diff --git a/packages/pytest-simcore/src/pytest_simcore/environment_configs.py b/packages/pytest-simcore/src/pytest_simcore/environment_configs.py index 6495f1f7cc1a..6a87e3536e95 100644 --- a/packages/pytest-simcore/src/pytest_simcore/environment_configs.py +++ b/packages/pytest-simcore/src/pytest_simcore/environment_configs.py @@ -3,15 +3,19 @@ # pylint: disable=unused-variable +import logging import re from pathlib import Path from typing import Any import pytest +from faker import Faker from .helpers.monkeypatch_envs import load_dotenv, setenvs_from_dict from .helpers.typing_env import EnvVarsDict +_logger = logging.getLogger(__name__) + def pytest_addoption(parser: pytest.Parser): simcore_group = parser.getgroup("simcore") @@ -20,12 +24,17 @@ def pytest_addoption(parser: pytest.Parser): action="store", type=Path, default=None, - help="Path to an env file. Consider passing a link to repo configs, i.e. `ln -s /path/to/osparc-ops-config/repo.config`", + help="Path to an env file. Replaces .env-devel in the tests by an external envfile." + "e.g. consider " + " `ln -s /path/to/osparc-ops-config/repo.config .secrets` and then " + " `pytest --external-envfile=.secrets --pdb tests/unit/test_core_settings.py`", ) @pytest.fixture(scope="session") -def external_envfile_dict(request: pytest.FixtureRequest) -> EnvVarsDict: +def external_envfile_dict( + request: pytest.FixtureRequest, osparc_simcore_root_dir: Path +) -> EnvVarsDict: """ If a file under test folder prefixed with `.env-secret` is present, then this fixture captures it. @@ -35,19 +44,43 @@ def external_envfile_dict(request: pytest.FixtureRequest) -> EnvVarsDict: """ envs = {} if envfile := request.config.getoption("--external-envfile"): - print("🚨 EXTERNAL `envfile` option detected. Loading", envfile, "...") + _logger.warning( + "🚨 EXTERNAL `envfile` option detected. Loading '%s' ...", envfile + ) assert isinstance(envfile, Path) assert envfile.exists() assert envfile.is_file() + envfile = envfile.resolve() + osparc_simcore_root_dir = osparc_simcore_root_dir.resolve() + + if osparc_simcore_root_dir in envfile.parents and not any( + term in envfile.name.lower() for term in ("ignore", "secret") + ): + _logger.warning( + "🚨 CAUTION: The external envfile '%s' may contain sensitive data and could be accidentally versioned. " + "To prevent this, include the words 'secret' or 'ignore' in the filename.", + envfile.name, + ) + envs = load_dotenv(envfile) + if envs: + response = input( + f"🚨 CAUTION: You are about to run tests using environment variables loaded from '{envfile}'.\n" + "This may cause tests to interact with or modify real external systems (e.g., production or staging environments).\n" + "Proceeding could result in data loss or unintended side effects.\n" + "Are you sure you want to continue? [y/N]: " + ) + if response.strip().lower() not in ("y", "yes"): + pytest.exit("Aborted by user due to external envfile usage.") + return envs @pytest.fixture(scope="session") -def skip_if_external_envfile_dict(external_envfile_dict: EnvVarsDict) -> None: +def skip_if_no_external_envfile(external_envfile_dict: EnvVarsDict) -> None: if not external_envfile_dict: pytest.skip(reason="Skipping test since external-envfile is not set") @@ -80,7 +113,7 @@ def service_name(project_slug_dir: Path) -> str: @pytest.fixture(scope="session") -def services_docker_compose_dict(services_docker_compose_file: Path) -> EnvVarsDict: +def docker_compose_services_dict(services_docker_compose_file: Path) -> EnvVarsDict: # NOTE: By keeping import here, this library is ONLY required when the fixture is used import yaml @@ -89,11 +122,30 @@ def services_docker_compose_dict(services_docker_compose_file: Path) -> EnvVarsD return content +@pytest.fixture +def docker_compose_service_hostname( + faker: Faker, service_name: str, docker_compose_services_dict: dict[str, Any] +) -> str: + """Evaluates `hostname` from docker-compose service""" + hostname_template = docker_compose_services_dict["services"][service_name][ + "hostname" + ] + + # Generate fake values to replace Docker Swarm template variables + node_hostname = faker.hostname(levels=1) + task_slot = faker.random_int(min=0, max=10) + + # Replace the Docker Swarm template variables with faker values + return hostname_template.replace("{{.Node.Hostname}}", node_hostname).replace( + "{{.Task.Slot}}", str(task_slot) + ) + + @pytest.fixture def docker_compose_service_environment_dict( - services_docker_compose_dict: dict[str, Any], - env_devel_dict: EnvVarsDict, + docker_compose_services_dict: dict[str, Any], service_name: str, + env_devel_dict: EnvVarsDict, env_devel_file: Path, ) -> EnvVarsDict: """Returns env vars dict from the docker-compose `environment` section @@ -101,10 +153,10 @@ def docker_compose_service_environment_dict( - env_devel_dict in environment_configs plugin - service_name needs to be defined """ - service = services_docker_compose_dict["services"][service_name] + service = docker_compose_services_dict["services"][service_name] def _substitute(key, value) -> tuple[str, str]: - if m := re.match(r"\${([^{}:-]\w+)", value): + if m := re.match(r"\${([^{}:-]\w+)", f"{value}"): expected_env_var = m.group(1) try: # NOTE: if this raises, then the RHS env-vars in the docker-compose are diff --git a/packages/pytest-simcore/src/pytest_simcore/faker_products_data.py b/packages/pytest-simcore/src/pytest_simcore/faker_products_data.py index e55c1e489f09..e91f21ca7e6b 100644 --- a/packages/pytest-simcore/src/pytest_simcore/faker_products_data.py +++ b/packages/pytest-simcore/src/pytest_simcore/faker_products_data.py @@ -4,9 +4,9 @@ # pylint: disable=unused-argument # pylint: disable=unused-variable """ - Fixtures to produce fake data for a product: - - it is self-consistent - - granular customization by overriding fixtures +Fixtures to produce fake data for a product: + - it is self-consistent + - granular customization by overriding fixtures """ from typing import Any @@ -65,11 +65,25 @@ def bcc_email(request: pytest.FixtureRequest, product_name: ProductName) -> Emai ) +@pytest.fixture +def support_standard_group_id(faker: Faker) -> int | None: + # NOTE: override to change + return None + + @pytest.fixture def product( - faker: Faker, product_name: ProductName, support_email: EmailStr + faker: Faker, + product_name: ProductName, + support_email: EmailStr, + support_standard_group_id: int | None, ) -> dict[str, Any]: - return random_product(name=product_name, support_email=support_email, fake=faker) + return random_product( + name=product_name, + support_email=support_email, + support_standard_group_id=support_standard_group_id, + fake=faker, + ) @pytest.fixture diff --git a/packages/pytest-simcore/src/pytest_simcore/faker_users_data.py b/packages/pytest-simcore/src/pytest_simcore/faker_users_data.py index 4e59b6db93a4..070087982e7d 100644 --- a/packages/pytest-simcore/src/pytest_simcore/faker_users_data.py +++ b/packages/pytest-simcore/src/pytest_simcore/faker_users_data.py @@ -3,9 +3,9 @@ # pylint: disable=unused-variable # pylint: disable=too-many-arguments """ - Fixtures to produce fake data for a user: - - it is self-consistent - - granular customization by overriding fixtures +Fixtures to produce fake data for a user: + - it is self-consistent + - granular customization by overriding fixtures """ from typing import Any @@ -16,7 +16,11 @@ from models_library.users import UserID from pydantic import EmailStr, TypeAdapter -from .helpers.faker_factories import DEFAULT_TEST_PASSWORD, random_user +from .helpers.faker_factories import ( + DEFAULT_TEST_PASSWORD, + random_user, + random_user_secrets, +) _MESSAGE = ( "If set, it overrides the fake value of `{}` fixture." @@ -125,12 +129,17 @@ def user( user_name: IDStr, user_password: str, ) -> dict[str, Any]: - return random_user( - id=user_id, - email=user_email, - name=user_name, - first_name=user_first_name, - last_name=user_last_name, - password=user_password, - fake=faker, - ) + """NOTE: it returns user data including poassword and password_hash""" + secrets = random_user_secrets(fake=faker, user_id=user_id, password=user_password) + assert secrets["user_id"] == user_id + return { + **random_user( + id=user_id, + email=user_email, + name=user_name, + first_name=user_first_name, + last_name=user_last_name, + fake=faker, + ), + "password_hash": secrets["password_hash"], + } diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/assert_checks.py b/packages/pytest-simcore/src/pytest_simcore/helpers/assert_checks.py index fc931cbebd59..afc6cdf15a3f 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/assert_checks.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/assert_checks.py @@ -97,3 +97,17 @@ def _do_assert_error( assert expected_error_code in codes return data, error + + +def assert_equal_ignoring_none(expected: dict, actual: dict): + for key, exp_value in expected.items(): + if exp_value is None: + continue + assert key in actual, f"Missing key {key}" + act_value = actual[key] + if isinstance(exp_value, dict) and isinstance(act_value, dict): + assert_equal_ignoring_none(exp_value, act_value) + else: + assert ( + act_value == exp_value + ), f"Mismatch in {key}: {act_value} != {exp_value}" diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/async_jobs_server.py b/packages/pytest-simcore/src/pytest_simcore/helpers/async_jobs_server.py new file mode 100644 index 000000000000..eba9867b529d --- /dev/null +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/async_jobs_server.py @@ -0,0 +1,88 @@ +# pylint: disable=unused-argument + +from dataclasses import dataclass + +from models_library.api_schemas_rpc_async_jobs.async_jobs import ( + AsyncJobGet, + AsyncJobId, + AsyncJobResult, + AsyncJobStatus, +) +from models_library.api_schemas_rpc_async_jobs.exceptions import BaseAsyncjobRpcError +from models_library.progress_bar import ProgressReport +from models_library.rabbitmq_basic_types import RPCNamespace +from pydantic import validate_call +from pytest_mock import MockType +from servicelib.celery.models import OwnerMetadata +from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient + + +@dataclass +class AsyncJobSideEffects: + exception: BaseAsyncjobRpcError | None = None + + @validate_call(config={"arbitrary_types_allowed": True}) + async def cancel( + self, + rabbitmq_rpc_client: RabbitMQRPCClient | MockType, + *, + rpc_namespace: RPCNamespace, + job_id: AsyncJobId, + owner_metadata: OwnerMetadata, + ) -> None: + if self.exception is not None: + raise self.exception + return None + + @validate_call(config={"arbitrary_types_allowed": True}) + async def status( + self, + rabbitmq_rpc_client: RabbitMQRPCClient | MockType, + *, + rpc_namespace: RPCNamespace, + job_id: AsyncJobId, + owner_metadata: OwnerMetadata, + ) -> AsyncJobStatus: + if self.exception is not None: + raise self.exception + + return AsyncJobStatus( + job_id=job_id, + progress=ProgressReport( + actual_value=50.0, + total=100.0, + attempt=1, + ), + done=False, + ) + + @validate_call(config={"arbitrary_types_allowed": True}) + async def result( + self, + rabbitmq_rpc_client: RabbitMQRPCClient | MockType, + *, + rpc_namespace: RPCNamespace, + job_id: AsyncJobId, + owner_metadata: OwnerMetadata, + ) -> AsyncJobResult: + if self.exception is not None: + raise self.exception + return AsyncJobResult(result="Success") + + @validate_call(config={"arbitrary_types_allowed": True}) + async def list_jobs( + self, + rabbitmq_rpc_client: RabbitMQRPCClient | MockType, + *, + rpc_namespace: RPCNamespace, + owner_metadata: OwnerMetadata, + filter_: str = "", + ) -> list[AsyncJobGet]: + if self.exception is not None: + raise self.exception + return [ + AsyncJobGet( + job_id=AsyncJobId("123e4567-e89b-12d3-a456-426614174000"), + job_name="Example Job", + ) + ] diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/autoscaling.py b/packages/pytest-simcore/src/pytest_simcore/helpers/autoscaling.py index 2d6c278d92c9..3648284faa75 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/autoscaling.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/autoscaling.py @@ -39,7 +39,7 @@ def create_fake_association( ): fake_node_to_instance_map = {} - async def _fake_node_creator( + def _fake_node_creator( _nodes: list[Node], ec2_instances: list[EC2InstanceData] ) -> tuple[list[AssociatedInstance], list[EC2InstanceData]]: def _create_fake_node_with_labels(instance: EC2InstanceData) -> Node: diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/director_v2_rpc_server.py b/packages/pytest-simcore/src/pytest_simcore/helpers/director_v2_rpc_server.py new file mode 100644 index 000000000000..8b8d9b6cc4b6 --- /dev/null +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/director_v2_rpc_server.py @@ -0,0 +1,30 @@ +# pylint: disable=no-self-use +# pylint: disable=not-context-manager +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable + + +from models_library.api_schemas_directorv2.computations import TaskLogFileIdGet +from models_library.projects import ProjectID +from pydantic import TypeAdapter, validate_call +from pytest_mock import MockType +from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient + + +class DirectorV2SideEffects: + # pylint: disable=no-self-use + @validate_call(config={"arbitrary_types_allowed": True}) + async def get_computation_task_log_file_ids( + self, + rpc_client: RabbitMQRPCClient | MockType, + *, + project_id: ProjectID, + ) -> list[TaskLogFileIdGet]: + assert rpc_client + assert project_id + + return TypeAdapter(list[TaskLogFileIdGet]).validate_python( + TaskLogFileIdGet.model_json_schema()["examples"], + ) diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py b/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py index 5aeb6c3be667..b03d29c67005 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/faker_factories.py @@ -20,10 +20,18 @@ from uuid import uuid4 import arrow -import faker from faker import Faker -DEFAULT_FAKER: Final = faker.Faker() +DEFAULT_FAKER: Final = Faker() + + +def random_service_key(fake: Faker = DEFAULT_FAKER, *, name: str | None = None) -> str: + suffix = fake.unique.pystr(min_chars=2) if name is None else name + return f"simcore/services/{fake.random_element(['dynamic', 'comp', 'frontend'])}/{suffix.lower()}" + + +def random_service_version(fake: Faker = DEFAULT_FAKER) -> str: + return ".".join([str(fake.pyint(0, 100)) for _ in range(3)]) def random_icon_url(fake: Faker): @@ -34,6 +42,15 @@ def random_thumbnail_url(fake: Faker): return fake.image_url(width=32, height=32) +def random_phone_number(fake: Faker = DEFAULT_FAKER) -> str: + # NOTE: faker.phone_number() does not validate with `phonenumbers` library. + phone = fake.random_element( + ["+41763456789", "+19104630364", "+13013044567", "+34 950 453 837"] + ) + tail = f"{fake.pyint(100, 999)}" + return phone[: -len(tail)] + tail # ensure phone keeps its length + + def _compute_hash(password: str) -> str: try: # 'passlib' will be used only if already installed. @@ -55,9 +72,7 @@ def _compute_hash(password: str) -> str: _DEFAULT_HASH = _compute_hash(DEFAULT_TEST_PASSWORD) -def random_user( - fake: Faker = DEFAULT_FAKER, password: str | None = None, **overrides -) -> dict[str, Any]: +def random_user(fake: Faker = DEFAULT_FAKER, **overrides) -> dict[str, Any]: from simcore_postgres_database.models.users import users from simcore_postgres_database.webserver_models import UserStatus @@ -67,12 +82,35 @@ def random_user( # NOTE: ensures user name is unique to avoid flaky tests "name": f"{fake.user_name()}_{fake.uuid4()}", "email": f"{fake.uuid4()}_{fake.email().lower()}", - "password_hash": _DEFAULT_HASH, "status": UserStatus.ACTIVE, } + data.update(overrides) assert set(data.keys()).issubset({c.name for c in users.columns}) + return data + + +def random_user_secrets( + fake: Faker = DEFAULT_FAKER, + *, + # foreign keys + user_id: int, + password: str | None = None, + **overrides, +) -> dict[str, Any]: + from simcore_postgres_database.models.users_secrets import users_secrets + + assert fake # nosec + + assert set(overrides.keys()).issubset({c.name for c in users_secrets.columns}) + + data = { + "user_id": user_id, + "password_hash": _DEFAULT_HASH, + } + assert set(data.keys()).issubset({c.name for c in users_secrets.columns}) + # transform password in hash if password: assert len(password) >= 12 @@ -105,7 +143,7 @@ def random_pre_registration_details( "pre_first_name": fake.first_name(), "pre_last_name": fake.last_name(), "pre_email": fake.email(), - "pre_phone": fake.phone_number(), + "pre_phone": random_phone_number(fake), "institution": fake.company(), "address": fake.address().replace("\n", ", "), "city": fake.city(), @@ -158,6 +196,26 @@ def random_project(fake: Faker = DEFAULT_FAKER, **overrides) -> dict[str, Any]: return data +def random_project_node(fake: Faker = DEFAULT_FAKER, **overrides) -> dict[str, Any]: + """Generates random fake data project nodes DATABASE table""" + from simcore_postgres_database.models.projects_nodes import projects_nodes + + fake_name = fake.name() + + data = { + "node_id": fake.uuid4(), + "project_uuid": fake.uuid4(), + "key": random_service_key(fake, name=fake_name), + "version": random_service_version(fake), + "label": fake_name, + } + + assert set(data.keys()).issubset({c.name for c in projects_nodes.columns}) + + data.update(overrides) + return data + + def random_group(fake: Faker = DEFAULT_FAKER, **overrides) -> dict[str, Any]: from simcore_postgres_database.models.groups import groups from simcore_postgres_database.webserver_models import GroupType @@ -227,6 +285,7 @@ def fake_task(**overrides) -> dict[str, Any]: def random_product( *, group_id: int | None = None, + support_standard_group_id: int | None = None, registration_email_template: str | None = None, fake: Faker = DEFAULT_FAKER, **overrides, @@ -265,7 +324,6 @@ def random_product( ui=VendorUI( logo_url="https://raw.githubusercontent.com/ITISFoundation/osparc-simcore/refs/heads/master/services/static-webserver/client/source/resource/osparc/osparc-black.svg", strong_color=fake.color(), - project_alias=fake.random_element(elements=["project", "study"]), ), ), "registration_email_template": registration_email_template, @@ -274,6 +332,7 @@ def random_product( "priority": fake.pyint(0, 10), "max_open_studies_per_user": fake.pyint(1, 10), "group_id": group_id, + "support_standard_group_id": support_standard_group_id, } if ui := fake.random_element( @@ -453,7 +512,7 @@ def random_service_meta_data( ) -> dict[str, Any]: from simcore_postgres_database.models.services import services_meta_data - _version = ".".join([str(fake.pyint()) for _ in range(3)]) + _version = random_service_version(fake) _name = fake.name() data: dict[str, Any] = { @@ -541,3 +600,32 @@ def random_itis_vip_available_download_item( data.update(**overrides) return data + + +def random_service_consume_filetype( + *, + service_key: str, + service_version: str, + fake: Faker = DEFAULT_FAKER, + **overrides, +) -> dict[str, Any]: + from simcore_postgres_database.models.services_consume_filetypes import ( + services_consume_filetypes, + ) + + data = { + "service_key": service_key, + "service_version": service_version, + "service_display_name": fake.company(), + "service_input_port": fake.word(), + "filetype": fake.random_element(["CSV", "VTK", "H5", "JSON", "TXT"]), + "preference_order": fake.pyint(min_value=0, max_value=10), + "is_guest_allowed": fake.pybool(), + } + + assert set(data.keys()).issubset( # nosec + {c.name for c in services_consume_filetypes.columns} + ) + + data.update(overrides) + return data diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/logging_tools.py b/packages/pytest-simcore/src/pytest_simcore/helpers/logging_tools.py index c12649c1c3d4..32960d9bc7a2 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/logging_tools.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/logging_tools.py @@ -1,10 +1,11 @@ import datetime import logging -from collections.abc import Iterator +import warnings +from collections.abc import Callable, Iterator from contextlib import contextmanager from dataclasses import dataclass, field from types import SimpleNamespace -from typing import TypeAlias +from typing import Final, TypeAlias def _timedelta_as_minute_second_ms(delta: datetime.timedelta) -> str: @@ -29,6 +30,19 @@ def _timedelta_as_minute_second_ms(delta: datetime.timedelta) -> str: return f"{sign}{result.strip()}" +def _resolve(val: str | Callable[[], str], prefix: str, suffix: str) -> str: + try: + return f"{prefix}{val if isinstance(val, str) else val()}{suffix}" + except Exception as exc: # pylint: disable=broad-exception-caught + warnings.warn( + f"Failed to generate {val} message: {exc!r}. " + f"Fix the callable to return a string without raising exceptions.", + UserWarning, + stacklevel=3, + ) + return f"❌❌❌ [{val} message generation failed TIP: Check how the {val} message is generated!] ❌❌❌" + + class DynamicIndentFormatter(logging.Formatter): indent_char: str = " " _cls_indent_level: int = 0 @@ -74,15 +88,26 @@ def setup(cls, logger: logging.Logger) -> None: DynamicIndentFormatter.setup(test_logger) +# Message formatting constants +_STARTING_PREFIX: Final[str] = "--> " +_STARTING_SUFFIX: Final[str] = " ⏳" +_DONE_PREFIX: Final[str] = "<-- " +_DONE_SUFFIX: Final[str] = " ✅" +_RAISED_PREFIX: Final[str] = "❌❌❌ Error: " +_RAISED_SUFFIX: Final[str] = " ❌❌❌" + + @dataclass class ContextMessages: - starting: str - done: str - raised: str = field(default="") + starting: str | Callable[[], str] + done: str | Callable[[], str] + raised: str | Callable[[], str] = field(default="") def __post_init__(self): if not self.raised: - self.raised = f"{self.done} [with error]" + self.raised = ( + lambda: f"{self.done if isinstance(self.done, str) else self.done()} [with raised error]" + ) LogLevelInt: TypeAlias = int @@ -127,9 +152,9 @@ def log_context( if isinstance(msg, str): ctx_msg = ContextMessages( - starting=f"-> {msg} starting ...", - done=f"<- {msg} done", - raised=f"! {msg} raised", + starting=f"{msg}", + done=f"{msg}", + raised=f"{msg}", ) elif isinstance(msg, tuple): ctx_msg = ContextMessages(*msg) @@ -140,13 +165,16 @@ def log_context( try: DynamicIndentFormatter.cls_increase_indent() - logger.log(level, ctx_msg.starting, *args, **kwargs) + logger.log( + level, + _resolve(ctx_msg.starting, _STARTING_PREFIX, _STARTING_SUFFIX), + *args, + **kwargs, + ) with _increased_logger_indent(logger): yield SimpleNamespace(logger=logger, messages=ctx_msg) elapsed_time = datetime.datetime.now(tz=datetime.UTC) - started_time - done_message = ( - f"{ctx_msg.done} ({_timedelta_as_minute_second_ms(elapsed_time)})" - ) + done_message = f"{_resolve(ctx_msg.done, _DONE_PREFIX, _DONE_SUFFIX)} ({_timedelta_as_minute_second_ms(elapsed_time)})" logger.log( level, done_message, @@ -156,9 +184,7 @@ def log_context( except: elapsed_time = datetime.datetime.now(tz=datetime.UTC) - started_time - error_message = ( - f"{ctx_msg.raised} ({_timedelta_as_minute_second_ms(elapsed_time)})" - ) + error_message = f"{_resolve(ctx_msg.raised, _RAISED_PREFIX, _RAISED_SUFFIX)} ({_timedelta_as_minute_second_ms(elapsed_time)})" logger.exception( error_message, *args, diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/long_running_tasks.py b/packages/pytest-simcore/src/pytest_simcore/helpers/long_running_tasks.py new file mode 100644 index 000000000000..ad85744951f1 --- /dev/null +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/long_running_tasks.py @@ -0,0 +1,39 @@ +# pylint: disable=protected-access + +import pytest +from fastapi import FastAPI +from servicelib.long_running_tasks.errors import TaskNotFoundError +from servicelib.long_running_tasks.manager import ( + LongRunningManager, +) +from servicelib.long_running_tasks.models import TaskContext +from servicelib.long_running_tasks.task import TaskId +from tenacity import ( + AsyncRetrying, + retry_if_not_exception_type, + stop_after_delay, + wait_fixed, +) + + +def get_fastapi_long_running_manager(app: FastAPI) -> LongRunningManager: + manager = app.state.long_running_manager + assert isinstance(manager, LongRunningManager) + return manager + + +async def assert_task_is_no_longer_present( + manager: LongRunningManager, task_id: TaskId, task_context: TaskContext +) -> None: + async for attempt in AsyncRetrying( + reraise=True, + wait=wait_fixed(0.1), + stop=stop_after_delay(60), + retry=retry_if_not_exception_type(TaskNotFoundError), + ): + with attempt: # noqa: SIM117 + with pytest.raises(TaskNotFoundError): + # use internals to detirmine when it's no longer here + await manager._tasks_manager._get_tracked_task( # noqa: SLF001 + task_id, task_context + ) diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/monkeypatch_envs.py b/packages/pytest-simcore/src/pytest_simcore/helpers/monkeypatch_envs.py index d81356144304..13ca659d2a4a 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/monkeypatch_envs.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/monkeypatch_envs.py @@ -3,6 +3,7 @@ """ import os +from collections.abc import Mapping from io import StringIO from pathlib import Path @@ -17,7 +18,7 @@ def setenvs_from_dict( - monkeypatch: pytest.MonkeyPatch, envs: dict[str, str | bool] + monkeypatch: pytest.MonkeyPatch, envs: Mapping[str, str | bool] ) -> EnvVarsDict: env_vars = {} diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/playwright.py b/packages/pytest-simcore/src/pytest_simcore/helpers/playwright.py index 64b86cba7bb1..8e99c459b4ef 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/playwright.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/playwright.py @@ -43,7 +43,7 @@ wait_fixed, ) -from .logging_tools import log_context +from .logging_tools import ContextMessages, log_context _logger = logging.getLogger(__name__) @@ -274,8 +274,8 @@ def __call__(self, message: str) -> bool: decoded_message = decode_socketio_42_message(message) if ( (decoded_message.name == _OSparcMessages.PROJECT_STATE_UPDATED.value) - and (decoded_message.obj["data"]["locked"]["status"] == "CLOSED") - and (decoded_message.obj["data"]["locked"]["value"] is False) + and (decoded_message.obj["data"]["shareState"]["status"] == "CLOSED") + and (decoded_message.obj["data"]["shareState"]["locked"] is False) ): self.logger.info("project successfully closed") return True @@ -302,6 +302,29 @@ def __call__(self, message: str) -> bool: return False +@dataclass +class SocketIOWaitNodeForOutputs: + logger: logging.Logger + expected_number_of_outputs: int + node_id: str + + def __call__(self, message: str) -> bool: + if message.startswith(SOCKETIO_MESSAGE_PREFIX): + decoded_message = decode_socketio_42_message(message) + if decoded_message.name == _OSparcMessages.NODE_UPDATED: + assert "data" in decoded_message.obj + assert "node_id" in decoded_message.obj + if decoded_message.obj["node_id"] == self.node_id: + assert "outputs" in decoded_message.obj["data"] + + return ( + len(decoded_message.obj["data"]["outputs"]) + == self.expected_number_of_outputs + ) + + return False + + @dataclass class SocketIOOsparcMessagePrinter: include_logger_messages: bool = False @@ -532,9 +555,10 @@ def wait_for_pipeline_state( if current_state in if_in_states: with log_context( logging.INFO, - msg=( - f"pipeline is in {current_state=}, waiting for one of {expected_states=}", - f"pipeline is now in {current_state=}", + msg=ContextMessages( + starting=f"wait for one of {expected_states=}", + done=lambda: f"wait for one of {expected_states=}, pipeline reached {current_state=}", + raised=lambda: f"pipeline failed or timed out with {current_state}. Expected one of {expected_states=}", ), ): waiter = SocketIOProjectStateUpdatedWaiter( @@ -551,7 +575,7 @@ def wait_for_pipeline_state( and current_state not in expected_states ): pytest.fail( - f"❌ Pipeline failed with state {current_state}. Expected one of {expected_states} ❌" + f"❌ Pipeline failed fast with state {current_state}. Expected one of {expected_states} ❌" ) return current_state diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/postgres_tools.py b/packages/pytest-simcore/src/pytest_simcore/helpers/postgres_tools.py index 1e854e8b6874..1086a61fc7ae 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/postgres_tools.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/postgres_tools.py @@ -88,20 +88,54 @@ async def _async_insert_and_get_row( conn: AsyncConnection, table: sa.Table, values: dict[str, Any], - pk_col: sa.Column, + pk_col: sa.Column | None = None, pk_value: Any | None = None, + pk_cols: list[sa.Column] | None = None, + pk_values: list[Any] | None = None, ) -> sa.engine.Row: - result = await conn.execute(table.insert().values(**values).returning(pk_col)) + # Validate parameters + single_pk_provided = pk_col is not None + composite_pk_provided = pk_cols is not None + + if single_pk_provided == composite_pk_provided: + msg = "Must provide either pk_col or pk_cols, but not both" + raise ValueError(msg) + + if composite_pk_provided: + if pk_values is not None and len(pk_cols) != len(pk_values): + msg = "pk_cols and pk_values must have the same length" + raise ValueError(msg) + returning_cols = pk_cols + else: + returning_cols = [pk_col] + + result = await conn.execute( + table.insert().values(**values).returning(*returning_cols) + ) row = result.one() - # Get the pk_value from the row if not provided - if pk_value is None: - pk_value = getattr(row, pk_col.name) + if composite_pk_provided: + # Handle composite primary keys + if pk_values is None: + pk_values = [getattr(row, col.name) for col in pk_cols] + else: + for col, expected_value in zip(pk_cols, pk_values, strict=True): + assert getattr(row, col.name) == expected_value + + # Build WHERE clause for composite key + where_clause = sa.and_( + *[col == val for col, val in zip(pk_cols, pk_values, strict=True)] + ) else: - # NOTE: DO NO USE row[pk_col] since you will get a deprecation error (Background on SQLAlchemy 2.0 at: https://sqlalche.me/e/b8d9) - assert getattr(row, pk_col.name) == pk_value + # Handle single primary key (existing logic) + if pk_value is None: + pk_value = getattr(row, pk_col.name) + else: + assert getattr(row, pk_col.name) == pk_value + + where_clause = pk_col == pk_value - result = await conn.execute(sa.select(table).where(pk_col == pk_value)) + result = await conn.execute(sa.select(table).where(where_clause)) return result.one() @@ -109,20 +143,52 @@ def _sync_insert_and_get_row( conn: sa.engine.Connection, table: sa.Table, values: dict[str, Any], - pk_col: sa.Column, + pk_col: sa.Column | None = None, pk_value: Any | None = None, + pk_cols: list[sa.Column] | None = None, + pk_values: list[Any] | None = None, ) -> sa.engine.Row: - result = conn.execute(table.insert().values(**values).returning(pk_col)) + # Validate parameters + single_pk_provided = pk_col is not None + composite_pk_provided = pk_cols is not None + + if single_pk_provided == composite_pk_provided: + msg = "Must provide either pk_col or pk_cols, but not both" + raise ValueError(msg) + + if composite_pk_provided: + if pk_values is not None and len(pk_cols) != len(pk_values): + msg = "pk_cols and pk_values must have the same length" + raise ValueError(msg) + returning_cols = pk_cols + else: + returning_cols = [pk_col] + + result = conn.execute(table.insert().values(**values).returning(*returning_cols)) row = result.one() - # Get the pk_value from the row if not provided - if pk_value is None: - pk_value = getattr(row, pk_col.name) + if composite_pk_provided: + # Handle composite primary keys + if pk_values is None: + pk_values = [getattr(row, col.name) for col in pk_cols] + else: + for col, expected_value in zip(pk_cols, pk_values, strict=True): + assert getattr(row, col.name) == expected_value + + # Build WHERE clause for composite key + where_clause = sa.and_( + *[col == val for col, val in zip(pk_cols, pk_values, strict=True)] + ) else: - # NOTE: DO NO USE row[pk_col] since you will get a deprecation error (Background on SQLAlchemy 2.0 at: https://sqlalche.me/e/b8d9) - assert getattr(row, pk_col.name) == pk_value + # Handle single primary key (existing logic) + if pk_value is None: + pk_value = getattr(row, pk_col.name) + else: + assert getattr(row, pk_col.name) == pk_value + + where_clause = pk_col == pk_value - result = conn.execute(sa.select(table).where(pk_col == pk_value)) + result = conn.execute(sa.select(table).where(where_clause)) return result.one() @@ -132,17 +198,125 @@ async def insert_and_get_row_lifespan( *, table: sa.Table, values: dict[str, Any], - pk_col: sa.Column, + pk_col: sa.Column | None = None, pk_value: Any | None = None, + pk_cols: list[sa.Column] | None = None, + pk_values: list[Any] | None = None, ) -> AsyncIterator[dict[str, Any]]: + """ + Context manager that inserts a row into a table and automatically deletes it on exit. + + Args: + sqlalchemy_async_engine: Async SQLAlchemy engine + table: The table to insert into + values: Dictionary of column values to insert + pk_col: Primary key column for deletion (for single-column primary keys) + pk_value: Optional primary key value (if None, will be taken from inserted row) + pk_cols: List of primary key columns (for composite primary keys) + pk_values: Optional list of primary key values (if None, will be taken from inserted row) + + Yields: + dict: The inserted row as a dictionary + + Examples: + ## Single primary key usage: + + @pytest.fixture + async def user_in_db(asyncpg_engine: AsyncEngine) -> AsyncIterator[dict]: + user_data = random_user(name="test_user", email="test@example.com") + async with insert_and_get_row_lifespan( + asyncpg_engine, + table=users, + values=user_data, + pk_col=users.c.id, + ) as row: + yield row + + ##Composite primary key usage: + + @pytest.fixture + async def service_in_db(asyncpg_engine: AsyncEngine) -> AsyncIterator[dict]: + service_data = {"key": "simcore/services/comp/test", "version": "1.0.0", "name": "Test Service"} + async with insert_and_get_row_lifespan( + asyncpg_engine, + table=services, + values=service_data, + pk_cols=[services.c.key, services.c.version], + ) as row: + yield row + + ##Multiple rows with single primary keys using AsyncExitStack: + + @pytest.fixture + async def users_in_db(asyncpg_engine: AsyncEngine) -> AsyncIterator[list[dict]]: + users_data = [ + random_user(name="user1", email="user1@example.com"), + random_user(name="user2", email="user2@example.com"), + ] + + async with AsyncExitStack() as stack: + created_users = [] + for user_data in users_data: + row = await stack.enter_async_context( + insert_and_get_row_lifespan( + asyncpg_engine, + table=users, + values=user_data, + pk_col=users.c.id, + ) + ) + created_users.append(row) + + yield created_users + + ## Multiple rows with composite primary keys using AsyncExitStack: + + @pytest.fixture + async def services_in_db(asyncpg_engine: AsyncEngine) -> AsyncIterator[list[dict]]: + services_data = [ + {"key": "simcore/services/comp/service1", "version": "1.0.0", "name": "Service 1"}, + {"key": "simcore/services/comp/service2", "version": "2.0.0", "name": "Service 2"}, + {"key": "simcore/services/comp/service1", "version": "2.0.0", "name": "Service 1 v2"}, + ] + + async with AsyncExitStack() as stack: + created_services = [] + for service_data in services_data: + row = await stack.enter_async_context( + insert_and_get_row_lifespan( + asyncpg_engine, + table=services, + values=service_data, + pk_cols=[services.c.key, services.c.version], + ) + ) + created_services.append(row) + + yield created_services + """ # SETUP: insert & get async with sqlalchemy_async_engine.begin() as conn: row = await _async_insert_and_get_row( - conn, table=table, values=values, pk_col=pk_col, pk_value=pk_value + conn, + table=table, + values=values, + pk_col=pk_col, + pk_value=pk_value, + pk_cols=pk_cols, + pk_values=pk_values, ) - # If pk_value was None, get it from the row for deletion later - if pk_value is None: - pk_value = getattr(row, pk_col.name) + + # Get pk values for deletion + if pk_cols is not None: + if pk_values is None: + pk_values = [getattr(row, col.name) for col in pk_cols] + where_clause = sa.and_( + *[col == val for col, val in zip(pk_cols, pk_values, strict=True)] + ) + else: + if pk_value is None: + pk_value = getattr(row, pk_col.name) + where_clause = pk_col == pk_value assert row @@ -150,9 +324,9 @@ async def insert_and_get_row_lifespan( # pylint: disable=protected-access yield row._asdict() - # TEAD-DOWN: delete row + # TEARDOWN: delete row async with sqlalchemy_async_engine.begin() as conn: - await conn.execute(table.delete().where(pk_col == pk_value)) + await conn.execute(table.delete().where(where_clause)) @contextmanager @@ -161,23 +335,43 @@ def sync_insert_and_get_row_lifespan( *, table: sa.Table, values: dict[str, Any], - pk_col: sa.Column, + pk_col: sa.Column | None = None, pk_value: Any | None = None, + pk_cols: list[sa.Column] | None = None, + pk_values: list[Any] | None = None, ) -> Iterator[dict[str, Any]]: """sync version of insert_and_get_row_lifespan. TIP: more convenient for **module-scope fixtures** that setup the database tables before the app starts since it does not require an `event_loop` - fixture (which is funcition-scoped ) + fixture (which is function-scoped) + + Supports both single and composite primary keys using the same parameter patterns + as the async version. """ # SETUP: insert & get with sqlalchemy_sync_engine.begin() as conn: row = _sync_insert_and_get_row( - conn, table=table, values=values, pk_col=pk_col, pk_value=pk_value + conn, + table=table, + values=values, + pk_col=pk_col, + pk_value=pk_value, + pk_cols=pk_cols, + pk_values=pk_values, ) - # If pk_value was None, get it from the row for deletion later - if pk_value is None: - pk_value = getattr(row, pk_col.name) + + # Get pk values for deletion + if pk_cols is not None: + if pk_values is None: + pk_values = [getattr(row, col.name) for col in pk_cols] + where_clause = sa.and_( + *[col == val for col, val in zip(pk_cols, pk_values, strict=True)] + ) + else: + if pk_value is None: + pk_value = getattr(row, pk_col.name) + where_clause = pk_col == pk_value assert row @@ -187,4 +381,4 @@ def sync_insert_and_get_row_lifespan( # TEARDOWN: delete row with sqlalchemy_sync_engine.begin() as conn: - conn.execute(table.delete().where(pk_col == pk_value)) + conn.execute(table.delete().where(where_clause)) diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/postgres_users.py b/packages/pytest-simcore/src/pytest_simcore/helpers/postgres_users.py new file mode 100644 index 000000000000..dd4039619cd3 --- /dev/null +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/postgres_users.py @@ -0,0 +1,106 @@ +import contextlib + +import sqlalchemy as sa +from simcore_postgres_database.models.users import users +from simcore_postgres_database.models.users_secrets import users_secrets +from sqlalchemy.ext.asyncio import AsyncEngine + +from .faker_factories import random_user, random_user_secrets +from .postgres_tools import ( + insert_and_get_row_lifespan, + sync_insert_and_get_row_lifespan, +) + + +def _get_kwargs_from_overrides(overrides: dict) -> tuple[dict, dict]: + user_kwargs = overrides.copy() + secrets_kwargs = {"password": user_kwargs.pop("password", None)} + if "password_hash" in user_kwargs: + secrets_kwargs["password_hash"] = user_kwargs.pop("password_hash") + return user_kwargs, secrets_kwargs + + +@contextlib.asynccontextmanager +async def insert_and_get_user_and_secrets_lifespan( + sqlalchemy_async_engine: AsyncEngine, **overrides +): + user_kwargs, secrets_kwargs = _get_kwargs_from_overrides(overrides) + + async with contextlib.AsyncExitStack() as stack: + # users + user = await stack.enter_async_context( + insert_and_get_row_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup + sqlalchemy_async_engine, + table=users, + values=random_user(**user_kwargs), + pk_col=users.c.id, + ) + ) + + # users_secrets + secrets = await stack.enter_async_context( + insert_and_get_row_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup + sqlalchemy_async_engine, + table=users_secrets, + values=random_user_secrets(user_id=user["id"], **secrets_kwargs), + pk_col=users_secrets.c.user_id, + ) + ) + + assert secrets.pop("user_id", None) == user["id"] + + yield {**user, **secrets} + + +@contextlib.contextmanager +def sync_insert_and_get_user_and_secrets_lifespan( + sqlalchemy_sync_engine: sa.engine.Engine, **overrides +): + user_kwargs, secrets_kwargs = _get_kwargs_from_overrides(overrides) + + with contextlib.ExitStack() as stack: + # users + user = stack.enter_context( + sync_insert_and_get_row_lifespan( + sqlalchemy_sync_engine, + table=users, + values=random_user(**user_kwargs), + pk_col=users.c.id, + ) + ) + + # users_secrets + secrets = stack.enter_context( + sync_insert_and_get_row_lifespan( + sqlalchemy_sync_engine, + table=users_secrets, + values=random_user_secrets(user_id=user["id"], **secrets_kwargs), + pk_col=users_secrets.c.user_id, + ) + ) + + assert secrets.pop("user_id", None) == user["id"] + + yield {**user, **secrets} + + +async def insert_user_and_secrets(conn, **overrides) -> int: + # NOTE: DEPRECATED: Legacy adapter. Use insert_and_get_user_and_secrets_lifespan instead + # Temporarily used where conn is produce by aiopg_engine + + user_kwargs, secrets_kwargs = _get_kwargs_from_overrides(overrides) + + # user data + user_id = await conn.scalar( + users.insert().values(**random_user(**user_kwargs)).returning(users.c.id) + ) + assert user_id is not None + + # secrets + await conn.execute( + users_secrets.insert().values( + **random_user_secrets(user_id=user_id, **secrets_kwargs) + ) + ) + + return user_id diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/storage_rpc_server.py b/packages/pytest-simcore/src/pytest_simcore/helpers/storage_rpc_server.py new file mode 100644 index 000000000000..72dc62ca438f --- /dev/null +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/storage_rpc_server.py @@ -0,0 +1,43 @@ +# pylint: disable=no-self-use +# pylint: disable=not-context-manager +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable + + +from typing import Literal + +from models_library.api_schemas_rpc_async_jobs.async_jobs import ( + AsyncJobGet, +) +from models_library.api_schemas_webserver.storage import PathToExport +from models_library.users import UserID +from pydantic import TypeAdapter, validate_call +from pytest_mock import MockType +from servicelib.celery.models import OwnerMetadata +from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient + + +class StorageSideEffects: + # pylint: disable=no-self-use + @validate_call(config={"arbitrary_types_allowed": True}) + async def start_export_data( + self, + rabbitmq_rpc_client: RabbitMQRPCClient | MockType, + *, + paths_to_export: list[PathToExport], + export_as: Literal["path", "download_link"], + owner_metadata: OwnerMetadata, + user_id: UserID + ) -> tuple[AsyncJobGet, OwnerMetadata]: + assert rabbitmq_rpc_client + assert owner_metadata + assert paths_to_export + assert export_as + + async_job_get = TypeAdapter(AsyncJobGet).validate_python( + AsyncJobGet.model_json_schema()["examples"][0], + ) + + return async_job_get, owner_metadata diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_fake_services_data.py b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_fake_services_data.py index 32d91d46c783..f3bb3c003f0b 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_fake_services_data.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_fake_services_data.py @@ -1,6 +1,7 @@ """ - NOTE: avoid creating dependencies +NOTE: avoid creating dependencies """ + from typing import Any FAKE_FILE_CONSUMER_SERVICES = [ @@ -55,7 +56,7 @@ def list_fake_file_consumers() -> list[dict[str, Any]]: consumers = [] for service in FAKE_FILE_CONSUMER_SERVICES: for consumable in service["consumes"]: - filetype, port, *_ = consumable.split(":") + ["input_1"] + filetype, port, *_ = [*consumable.split(":"), "input_1"] consumer = { "key": service["key"], "version": service["version"], diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_login.py b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_login.py index d055e3a110c0..6d843ceacdf3 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_login.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_login.py @@ -1,48 +1,21 @@ import contextlib import re from collections.abc import AsyncIterator -from datetime import datetime -from typing import Any, TypedDict +from typing import Any -from aiohttp import web from aiohttp.test_utils import TestClient -from models_library.users import UserID from servicelib.aiohttp import status -from simcore_service_webserver.db.models import UserRole, UserStatus -from simcore_service_webserver.groups.api import auto_add_user_to_product_group -from simcore_service_webserver.login._constants import MSG_LOGGED_IN from simcore_service_webserver.login._invitations_service import create_invitation_token from simcore_service_webserver.login._login_repository_legacy import ( - AsyncpgStorage, get_plugin_storage, ) -from simcore_service_webserver.products.products_service import list_products -from simcore_service_webserver.security.api import clean_auth_policy_cache +from simcore_service_webserver.login.constants import MSG_LOGGED_IN +from simcore_service_webserver.security import security_service from yarl import URL from .assert_checks import assert_status -from .faker_factories import DEFAULT_FAKER, DEFAULT_TEST_PASSWORD, random_user - - -# WARNING: DO NOT use UserDict is already in https://docs.python.org/3/library/collections.html#collections.UserDictclass UserRowDict(TypedDict): -# NOTE: this is modified dict version of packages/postgres-database/src/simcore_postgres_database/models/users.py for testing purposes -class _UserInfoDictRequired(TypedDict, total=True): - id: int - name: str - email: str - primary_gid: str - raw_password: str - status: UserStatus - role: UserRole - - -class UserInfoDict(_UserInfoDictRequired, total=False): - created_at: datetime - password_hash: str - first_name: str - last_name: str - phone: str - +from .faker_factories import DEFAULT_FAKER +from .webserver_users import NewUser, UserInfoDict, _create_account_in_db TEST_MARKS = re.compile(r"TEST (\w+):(.*)") @@ -65,76 +38,21 @@ def parse_link(text): return URL(link).path -async def _create_user(app: web.Application, data=None) -> UserInfoDict: - db: AsyncpgStorage = get_plugin_storage(app) - - # create - data = data or {} - data.setdefault("status", UserStatus.ACTIVE.name) - data.setdefault("role", UserRole.USER.name) - data.setdefault("password", DEFAULT_TEST_PASSWORD) - user = await db.create_user(random_user(**data)) - - # get - user = await db.get_user({"id": user["id"]}) - assert "first_name" in user - assert "last_name" in user - - # adds extras - extras = {"raw_password": data["password"]} - - return UserInfoDict( - **{ - key: user[key] - for key in [ - "id", - "name", - "email", - "primary_gid", - "status", - "role", - "created_at", - "password_hash", - "first_name", - "last_name", - "phone", - ] - }, - **extras, - ) - - -async def _register_user_in_default_product(app: web.Application, user_id: UserID): - products = list_products(app) - assert products - product_name = products[0].name - - return await auto_add_user_to_product_group(app, user_id, product_name=product_name) - - -async def _create_account( - app: web.Application, - user_data: dict[str, Any] | None = None, -) -> UserInfoDict: - # users, groups in db - user = await _create_user(app, user_data) - # user has default product - await _register_user_in_default_product(app, user_id=user["id"]) - return user - - async def log_client_in( client: TestClient, user_data: dict[str, Any] | None = None, *, - enable_check=True, + exit_stack: contextlib.AsyncExitStack, + enable_check: bool = True, ) -> UserInfoDict: assert client.app # create account - user = await _create_account(client.app, user_data=user_data) + user = await _create_account_in_db( + client.app, exit_stack=exit_stack, user_data=user_data + ) - # login + # login (requires) url = client.app.router["auth_login"].url_for() reponse = await client.post( str(url), @@ -150,26 +68,6 @@ async def log_client_in( return user -class NewUser: - def __init__( - self, - user_data: dict[str, Any] | None = None, - app: web.Application | None = None, - ): - self.user_data = user_data - self.user = None - assert app - self.db = get_plugin_storage(app) - self.app = app - - async def __aenter__(self) -> UserInfoDict: - self.user = await _create_account(self.app, self.user_data) - return self.user - - async def __aexit__(self, *args): - await self.db.delete_user(self.user) - - class LoggedUser(NewUser): def __init__(self, client: TestClient, user_data=None, *, check_if_succeeds=True): super().__init__(user_data, client.app) @@ -179,7 +77,10 @@ def __init__(self, client: TestClient, user_data=None, *, check_if_succeeds=True async def __aenter__(self) -> UserInfoDict: self.user = await log_client_in( - self.client, self.user_data, enable_check=self.enable_check + self.client, + self.user_data, + exit_stack=self.exit_stack, + enable_check=self.enable_check, ) return self.user @@ -187,7 +88,7 @@ async def __aexit__(self, *args): assert self.client.app # NOTE: cache key is based on an email. If the email is # reused during the test, then it creates quite some noise - await clean_auth_policy_cache(self.client.app) + await security_service.clean_auth_policy_cache(self.client.app) return await super().__aexit__(*args) @@ -231,11 +132,12 @@ def __init__( self.confirmation = None self.trial_days = trial_days self.extra_credits_in_usd = extra_credits_in_usd + self.db = get_plugin_storage(self.app) async def __aenter__(self) -> "NewInvitation": # creates host user assert self.client.app - self.user = await _create_user(self.client.app, self.user_data) + self.user = await super().__aenter__() self.confirmation = await create_invitation_token( self.db, diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py index 99ee393f3949..917d70d24cca 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py @@ -4,7 +4,6 @@ import json import uuid as uuidlib -from http import HTTPStatus from pathlib import Path from typing import Any @@ -187,7 +186,7 @@ async def __aexit__(self, *args): async def assert_get_same_project( client: TestClient, project: ProjectDict, - expected: HTTPStatus, + expected: int, api_vtag="/v0", ) -> dict: # GET /v0/projects/{project_id} diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_rpc_server.py b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_rpc_server.py index 17d8051d096e..3cedcff8aba2 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_rpc_server.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_rpc_server.py @@ -26,6 +26,14 @@ class WebserverRpcSideEffects: # pylint: disable=no-self-use + def __init__( + self, + project_job_rpc_get: ProjectJobRpcGet = ProjectJobRpcGet.model_validate( + ProjectJobRpcGet.model_json_schema()["examples"][0] + ), + ): + self.project_job_rpc_get = project_job_rpc_get + @validate_call(config={"arbitrary_types_allowed": True}) async def mark_project_as_job( self, @@ -35,12 +43,14 @@ async def mark_project_as_job( user_id: UserID, project_uuid: ProjectID, job_parent_resource_name: str, + storage_assets_deleted: bool, ) -> None: assert rpc_client assert not job_parent_resource_name.startswith("/") # nosec assert "/" in job_parent_resource_name # nosec assert not job_parent_resource_name.endswith("/") # nosec + assert isinstance(storage_assets_deleted, bool) assert product_name assert user_id @@ -84,3 +94,25 @@ async def list_projects_marked_as_jobs( limit=limit, offset=offset, ) + + @validate_call(config={"arbitrary_types_allowed": True}) + async def get_project_marked_as_job( + self, + rpc_client: RabbitMQRPCClient | MockType, + *, + product_name: ProductName, + user_id: UserID, + project_uuid: ProjectID, + job_parent_resource_name: str, + ) -> ProjectJobRpcGet: + assert rpc_client + assert product_name + assert user_id + assert project_uuid + assert job_parent_resource_name + + # Return a valid example from the schema + _data = self.project_job_rpc_get.model_dump() + _data["uuid"] = str(project_uuid) + _data["job_parent_resource_name"] = job_parent_resource_name + return ProjectJobRpcGet.model_validate(_data) diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_users.py b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_users.py new file mode 100644 index 000000000000..edb3399a14fa --- /dev/null +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_users.py @@ -0,0 +1,139 @@ +import contextlib +from datetime import datetime +from typing import Any, TypedDict + +from aiohttp import web +from common_library.users_enums import UserRole, UserStatus +from models_library.users import UserID +from simcore_service_webserver.db.plugin import get_asyncpg_engine +from simcore_service_webserver.groups import api as groups_service +from simcore_service_webserver.products.products_service import list_products +from sqlalchemy.ext.asyncio import AsyncEngine + +from .faker_factories import DEFAULT_TEST_PASSWORD +from .postgres_users import insert_and_get_user_and_secrets_lifespan + + +# WARNING: DO NOT use UserDict is already in https://docs.python.org/3/library/collections.html#collections.UserDictclass UserRowDict(TypedDict): +# NOTE: this is modified dict version of packages/postgres-database/src/simcore_postgres_database/models/users.py for testing purposes +class _UserInfoDictRequired(TypedDict, total=True): + id: int + name: str + email: str + primary_gid: str + raw_password: str + status: UserStatus + role: UserRole + + +class UserInfoDict(_UserInfoDictRequired, total=False): + created_at: datetime + password_hash: str + first_name: str + last_name: str + phone: str + + +async def _create_user_in_db( + sqlalchemy_async_engine: AsyncEngine, + exit_stack: contextlib.AsyncExitStack, + data: dict | None = None, +) -> UserInfoDict: + + # create fake + data = data or {} + data.setdefault("status", UserStatus.ACTIVE.name) + data.setdefault("role", UserRole.USER.name) + data.setdefault("password", DEFAULT_TEST_PASSWORD) + + raw_password = data["password"] + + # inject in db + user = await exit_stack.enter_async_context( + insert_and_get_user_and_secrets_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup + sqlalchemy_async_engine, **data + ) + ) + assert "first_name" in user + assert "last_name" in user + + return UserInfoDict( + # required + # - in db + id=user["id"], + name=user["name"], + email=user["email"], + primary_gid=user["primary_gid"], + status=( + UserStatus(user["status"]) + if not isinstance(user["status"], UserStatus) + else user["status"] + ), + role=( + UserRole(user["role"]) + if not isinstance(user["role"], UserRole) + else user["role"] + ), + # optional + # - in db + created_at=( + user["created_at"] + if isinstance(user["created_at"], datetime) + else datetime.fromisoformat(user["created_at"]) + ), + password_hash=user["password_hash"], + first_name=user["first_name"], + last_name=user["last_name"], + phone=user["phone"], + # extras + raw_password=raw_password, + ) + + +async def _register_user_in_default_product(app: web.Application, user_id: UserID): + products = list_products(app) + assert products + product_name = products[0].name + + return await groups_service.auto_add_user_to_product_group( + app, user_id, product_name=product_name + ) + + +async def _create_account_in_db( + app: web.Application, + exit_stack: contextlib.AsyncExitStack, + user_data: dict[str, Any] | None = None, +) -> UserInfoDict: + # users, groups in db + user = await _create_user_in_db( + get_asyncpg_engine(app), exit_stack=exit_stack, data=user_data + ) + + # user has default product + await _register_user_in_default_product(app, user_id=user["id"]) + return user + + +class NewUser: + def __init__( + self, + user_data: dict[str, Any] | None = None, + app: web.Application | None = None, + ): + self.user_data = user_data + self.user = None + + assert app + self.app = app + + self.exit_stack = contextlib.AsyncExitStack() + + async def __aenter__(self) -> UserInfoDict: + self.user = await _create_account_in_db( + self.app, self.exit_stack, self.user_data + ) + return self.user + + async def __aexit__(self, *args): + await self.exit_stack.aclose() diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_workspaces.py b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_workspaces.py index 1dbe5ebeb42a..adf767b49455 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_workspaces.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_workspaces.py @@ -5,7 +5,7 @@ from simcore_postgres_database.models.workspaces_access_rights import ( workspaces_access_rights, ) -from simcore_service_webserver.db.plugin import get_database_engine +from simcore_service_webserver.db.plugin import get_database_engine_legacy from sqlalchemy.dialects.postgresql import insert as pg_insert @@ -18,7 +18,7 @@ async def update_or_insert_workspace_group( write: bool, delete: bool, ) -> None: - async with get_database_engine(app).acquire() as conn: + async with get_database_engine_legacy(app).acquire() as conn: insert_stmt = pg_insert(workspaces_access_rights).values( workspace_id=workspace_id, gid=group_id, diff --git a/packages/pytest-simcore/src/pytest_simcore/logging.py b/packages/pytest-simcore/src/pytest_simcore/logging.py new file mode 100644 index 000000000000..2db9ae93c5d3 --- /dev/null +++ b/packages/pytest-simcore/src/pytest_simcore/logging.py @@ -0,0 +1,42 @@ +# In conftest.py or test_logging_utils.py +import contextlib +import logging +from collections.abc import Iterator +from contextlib import contextmanager + +import pytest +from pytest_mock import MockerFixture +from servicelib.logging_utils import async_loggers + + +@pytest.fixture(autouse=True) +def preserve_caplog_for_async_logging(mocker: MockerFixture) -> None: + # Patch async_loggers to preserve caplog handlers, + # and pytest logs in general as pytest captures logs in a special way + # that is not compatible with the queue handler used in async logging. + original_setup = async_loggers + + @contextmanager + def patched_async_loggers(**kwargs) -> Iterator[None]: + # Find caplog's handler in root logger + root_logger = logging.getLogger() + caplog_handlers = [ + h for h in root_logger.handlers if "LogCaptureHandler" in f"{type(h)}" + ] + + with original_setup(**kwargs): + # After setup, restore caplog handlers alongside queue handler + for handler in caplog_handlers: + if handler not in root_logger.handlers: + root_logger.addHandler(handler) + yield + + methods_to_patch = [ + "servicelib.logging_utils.async_loggers", + "servicelib.fastapi.logging_lifespan.async_loggers", + "tests.test_logging_utils.async_loggers", + ] + for method in methods_to_patch: + with contextlib.suppress(AttributeError, ModuleNotFoundError): + # Patch the method to use our patched version + mocker.patch(method, patched_async_loggers) diff --git a/packages/pytest-simcore/src/pytest_simcore/long_running_tasks.py b/packages/pytest-simcore/src/pytest_simcore/long_running_tasks.py new file mode 100644 index 000000000000..e3911dc62f5a --- /dev/null +++ b/packages/pytest-simcore/src/pytest_simcore/long_running_tasks.py @@ -0,0 +1,14 @@ +from datetime import timedelta + +import pytest +from pytest_mock import MockerFixture + + +@pytest.fixture +async def fast_long_running_tasks_cancellation( + mocker: MockerFixture, +) -> None: + mocker.patch( + "servicelib.long_running_tasks.task._CANCEL_TASKS_CHECK_INTERVAL", + new=timedelta(seconds=1), + ) diff --git a/packages/pytest-simcore/src/pytest_simcore/postgres_service.py b/packages/pytest-simcore/src/pytest_simcore/postgres_service.py index 19d9247e8eab..7814d413c07c 100644 --- a/packages/pytest-simcore/src/pytest_simcore/postgres_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/postgres_service.py @@ -250,7 +250,6 @@ def postgres_env_vars_dict(postgres_dsn: PostgresTestConfig) -> EnvVarsDict: "POSTGRES_DB": postgres_dsn["database"], "POSTGRES_HOST": postgres_dsn["host"], "POSTGRES_PORT": f"{postgres_dsn['port']}", - "POSTGRES_ENDPOINT": f"{postgres_dsn['host']}:{postgres_dsn['port']}", } diff --git a/packages/pytest-simcore/src/pytest_simcore/pydantic_models.py b/packages/pytest-simcore/src/pytest_simcore/pydantic_models.py index e8691a10724c..8266de0947bb 100644 --- a/packages/pytest-simcore/src/pytest_simcore/pydantic_models.py +++ b/packages/pytest-simcore/src/pytest_simcore/pydantic_models.py @@ -97,7 +97,6 @@ def _is_model_cls(obj) -> bool: assert inspect.ismodule(module) for model_name, model_cls in inspect.getmembers(module, _is_model_cls): - yield from iter_model_examples_in_class(model_cls, model_name) @@ -172,7 +171,7 @@ def model_cls_examples(model_cls: type[BaseModel]) -> dict[str, dict[str, Any]]: """ warnings.warn( "The 'model_cls_examples' fixture is deprecated and will be removed in a future version. " - "Please use 'iter_model_example_in_class' or 'iter_model_examples_in_module' as an alternative.", + "Please use 'iter_model_examples_in_class' or 'iter_model_examples_in_module' as an alternative.", DeprecationWarning, stacklevel=2, ) diff --git a/packages/pytest-simcore/src/pytest_simcore/redis_service.py b/packages/pytest-simcore/src/pytest_simcore/redis_service.py index 05aec86a2340..04177c7f9e28 100644 --- a/packages/pytest-simcore/src/pytest_simcore/redis_service.py +++ b/packages/pytest-simcore/src/pytest_simcore/redis_service.py @@ -4,10 +4,10 @@ import logging from collections.abc import AsyncIterator -from datetime import timedelta import pytest import tenacity +from fakeredis import FakeAsyncRedis from pytest_mock import MockerFixture from redis.asyncio import Redis, from_url from settings_library.basic_types import PortInt @@ -116,8 +116,6 @@ async def wait_till_redis_responsive(redis_url: URL | str) -> None: @pytest.fixture -def mock_redis_socket_timeout(mocker: MockerFixture) -> None: - # lowered to allow CI to properly shutdown RedisClientSDK instances - mocker.patch( - "servicelib.redis._client.DEFAULT_SOCKET_TIMEOUT", timedelta(seconds=0.25) - ) +async def use_in_memory_redis(mocker: MockerFixture) -> RedisSettings: + mocker.patch("redis.asyncio.from_url", FakeAsyncRedis) + return RedisSettings() diff --git a/packages/pytest-simcore/src/pytest_simcore/repository_paths.py b/packages/pytest-simcore/src/pytest_simcore/repository_paths.py index 6112cef627bd..0f52de8f8444 100644 --- a/packages/pytest-simcore/src/pytest_simcore/repository_paths.py +++ b/packages/pytest-simcore/src/pytest_simcore/repository_paths.py @@ -85,6 +85,14 @@ def services_docker_compose_file(services_dir: Path) -> Path: return dcpath +@pytest.fixture(scope="session") +def services_docker_compose_dev_vendors_file(osparc_simcore_services_dir: Path) -> Path: + """Path to osparc-simcore/services/docker-compose-dev-vendors.yml file""" + dcpath = osparc_simcore_services_dir / "docker-compose-dev-vendors.yml" + assert dcpath.exists() + return dcpath + + @pytest.fixture(scope="session") def pylintrc(osparc_simcore_root_dir: Path) -> Path: pylintrc = osparc_simcore_root_dir / ".pylintrc" diff --git a/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py b/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py index a001bb1d5d81..c3036d7c9de7 100644 --- a/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py +++ b/packages/pytest-simcore/src/pytest_simcore/services_api_mocks_for_aiohttp_clients.py @@ -8,7 +8,7 @@ from urllib.parse import urlparse, urlunparse import pytest -from aioresponses import aioresponses as AioResponsesMock +from aioresponses import aioresponses as AioResponsesMock # noqa: N812 from aioresponses.core import CallbackResult from faker import Faker from models_library.api_schemas_directorv2.computations import ( diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_services.py b/packages/pytest-simcore/src/pytest_simcore/simcore_services.py index 2a4f6d2ff4dc..274a8edb44a7 100644 --- a/packages/pytest-simcore/src/pytest_simcore/simcore_services.py +++ b/packages/pytest-simcore/src/pytest_simcore/simcore_services.py @@ -29,6 +29,7 @@ _SERVICES_TO_SKIP: Final[set[str]] = { + "api-worker", "agent", # global mode deploy (NO exposed ports, has http API) "dask-sidecar", # global mode deploy (NO exposed ports, **NO** http API) "migration", @@ -40,6 +41,7 @@ "whoami", "sto-worker", "sto-worker-cpu-bound", + "traefik-config-placeholder", } # TODO: unify healthcheck policies see https://github.com/ITISFoundation/osparc-simcore/pull/2281 DEFAULT_SERVICE_HEALTHCHECK_ENTRYPOINT: Final[str] = "/v0/" diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_storage_data_models.py b/packages/pytest-simcore/src/pytest_simcore/simcore_storage_data_models.py index e897b9ced75e..a41d4876612d 100644 --- a/packages/pytest-simcore/src/pytest_simcore/simcore_storage_data_models.py +++ b/packages/pytest-simcore/src/pytest_simcore/simcore_storage_data_models.py @@ -18,7 +18,8 @@ from sqlalchemy.dialects.postgresql import insert as pg_insert from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine -from .helpers.faker_factories import DEFAULT_FAKER, random_project, random_user +from .helpers.faker_factories import DEFAULT_FAKER, random_project +from .helpers.postgres_users import insert_and_get_user_and_secrets_lifespan @asynccontextmanager @@ -30,19 +31,10 @@ async def _user_context( # NOTE: Ideally this (and next fixture) should be done via webserver API but at this point # in time, the webserver service would bring more dependencies to other services # which would turn this test too complex. - - # pylint: disable=no-value-for-parameter - stmt = users.insert().values(**random_user(name=name)).returning(users.c.id) - async with sqlalchemy_async_engine.begin() as conn: - result = await conn.execute(stmt) - row = result.one() - assert isinstance(row.id, int) - - try: - yield TypeAdapter(UserID).validate_python(row.id) - finally: - async with sqlalchemy_async_engine.begin() as conn: - await conn.execute(users.delete().where(users.c.id == row.id)) + async with insert_and_get_user_and_secrets_lifespan( + sqlalchemy_async_engine, name=name + ) as user: + yield TypeAdapter(UserID).validate_python(user["id"]) @pytest.fixture diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_groups_fixtures.py b/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_groups_fixtures.py index cc31177abcec..1cc8ca080fdc 100644 --- a/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_groups_fixtures.py +++ b/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_groups_fixtures.py @@ -3,9 +3,9 @@ # pylint: disable=unused-variable """ - Fixtures for groups +Fixtures for groups - NOTE: These fixtures are used in integration and unit tests +NOTE: These fixtures are used in integration and unit tests """ @@ -18,7 +18,7 @@ from models_library.api_schemas_webserver.groups import GroupGet from models_library.groups import GroupsByTypeTuple, StandardGroupCreate from models_library.users import UserID -from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict +from pytest_simcore.helpers.webserver_users import NewUser, UserInfoDict from simcore_service_webserver.groups._groups_service import ( add_user_in_group, create_standard_group, diff --git a/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_projects_rest_api.py b/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_projects_rest_api.py index 2533cad65dda..aa88a5b5d82f 100644 --- a/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_projects_rest_api.py +++ b/packages/pytest-simcore/src/pytest_simcore/simcore_webserver_projects_rest_api.py @@ -70,7 +70,11 @@ def request_desc(self) -> str: "prjOwner": "foo@bar.com", "tags": [], "state": { - "locked": {"value": False, "status": "CLOSED"}, + "shareState": { + "status": "CLOSED", + "locked": False, + "currentUserGroupids": [], + }, "state": {"value": "NOT_STARTED"}, }, "dev": None, @@ -114,7 +118,11 @@ def request_desc(self) -> str: "quality": {}, "tags": [], "state": { - "locked": {"value": False, "status": "CLOSED"}, + "shareState": { + "status": "CLOSED", + "locked": False, + "currentUserGroupids": [], + }, "state": {"value": "NOT_STARTED"}, }, "workspace_id": None, @@ -149,14 +157,10 @@ def request_desc(self) -> str: "quality": {}, "tags": [], "state": { - "locked": { - "value": True, - "owner": { - "user_id": 1, - "first_name": "crespo", - "last_name": "", - }, + "shareState": { "status": "OPENED", + "locked": True, + "currentUserGroupids": [1], }, "state": {"value": "NOT_STARTED"}, }, @@ -284,14 +288,10 @@ def request_desc(self) -> str: }, "tags": [], "state": { - "locked": { - "value": True, - "owner": { - "user_id": 1, - "first_name": "crespo", - "last_name": "", - }, + "shareState": { "status": "OPENED", + "locked": True, + "currentUserGroupids": [1], }, "state": {"value": "NOT_STARTED"}, }, @@ -547,14 +547,10 @@ def request_desc(self) -> str: }, "tags": [], "state": { - "locked": { - "value": True, - "owner": { - "user_id": 1, - "first_name": "crespo", - "last_name": "", - }, + "shareState": { "status": "OPENED", + "locked": True, + "currentUserGroupids": [1], }, "state": {"value": "NOT_STARTED"}, }, @@ -734,7 +730,11 @@ def request_desc(self) -> str: }, "tags": [], "state": { - "locked": {"value": False, "status": "CLOSED"}, + "shareState": { + "status": "CLOSED", + "locked": False, + "currentUserGroupids": [], + }, "state": {"value": "NOT_STARTED"}, }, } @@ -988,7 +988,11 @@ def request_desc(self) -> str: "prjOwner": "user@company.com", "tags": [22], "state": { - "locked": {"value": False, "status": "CLOSED"}, + "shareState": { + "status": "CLOSED", + "locked": False, + "currentUserGroupids": [], + }, "state": {"value": "NOT_STARTED"}, }, } diff --git a/packages/pytest-simcore/src/pytest_simcore/socketio.py b/packages/pytest-simcore/src/pytest_simcore/socketio.py index fd1f21c24a8e..586e9d67e74c 100644 --- a/packages/pytest-simcore/src/pytest_simcore/socketio.py +++ b/packages/pytest-simcore/src/pytest_simcore/socketio.py @@ -70,7 +70,7 @@ async def web_server( @pytest.fixture async def server_url(web_server: URL) -> str: - return f'{web_server.with_path("/")}' + return f"{web_server.with_path('/')}" @pytest.fixture diff --git a/packages/pytest-simcore/src/pytest_simcore/socketio_client.py b/packages/pytest-simcore/src/pytest_simcore/socketio_client.py index 23b9ee0b190c..99b769112056 100644 --- a/packages/pytest-simcore/src/pytest_simcore/socketio_client.py +++ b/packages/pytest-simcore/src/pytest_simcore/socketio_client.py @@ -9,10 +9,12 @@ import pytest import socketio from aiohttp.test_utils import TestClient -from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.logging_tools import log_context from servicelib.aiohttp import status from yarl import URL +from .helpers.assert_checks import assert_status + logger = logging.getLogger(__name__) @@ -44,33 +46,30 @@ async def _create(client_override: TestClient | None = None) -> str: assert data assert not error - return ( - resp.request_info.headers["Cookie"] - if "Cookie" in resp.request_info.headers - else "" - ) + return resp.request_info.headers.get("Cookie", "") return _create @pytest.fixture -async def socketio_client_factory( - socketio_url_factory: Callable, - security_cookie_factory: Callable, - client_session_id_factory: Callable, +async def create_socketio_connection( + socketio_url_factory: Callable[[TestClient | None], str], + security_cookie_factory: Callable[[TestClient | None], Awaitable[str]], + client_session_id_factory: Callable[[], str], ) -> AsyncIterable[ - Callable[[str | None, TestClient | None], Awaitable[socketio.AsyncClient]] + Callable[ + [str | None, TestClient | None], Awaitable[tuple[socketio.AsyncClient, str]] + ] ]: clients: list[socketio.AsyncClient] = [] async def _connect( client_session_id: str | None = None, client: TestClient | None = None - ) -> socketio.AsyncClient: + ) -> tuple[socketio.AsyncClient, str]: if client_session_id is None: client_session_id = client_session_id_factory() sio = socketio.AsyncClient(ssl_verify=False) - # enginio 3.10.0 introduced ssl verification assert client_session_id url = str( URL(socketio_url_factory(client)).with_query( @@ -83,21 +82,27 @@ async def _connect( # WARNING: engineio fails with empty cookies. Expects "key=value" headers.update({"Cookie": cookie}) - print(f"--> Connecting socketio client to {url} ...") - await sio.connect(url, headers=headers, wait_timeout=10) - assert sio.sid - print("... connection done") + with log_context(logging.INFO, f"socketio_client: connecting to {url}"): + print(f"--> Connecting socketio client to {url} ...") + sio.on( + "connect", + handler=lambda: logger.info("Connected successfully with %s", sio.sid), + ) + sio.on( + "disconnect", + handler=lambda: logger.info("Disconnected from %s", sio.sid), + ) + await sio.connect(url, headers=headers, wait_timeout=10) + assert sio.sid clients.append(sio) - return sio + return sio, client_session_id yield _connect # cleans up clients produce by _connect(*) calls for sio in clients: if sio.connected: - print(f"<--Disconnecting socketio client {sio}") - await sio.disconnect() - await sio.wait() - print(f"... disconnection from {sio} done.") - assert not sio.connected - assert not sio.sid + with log_context(logging.INFO, f"socketio_client: disconnecting {sio}"): + await sio.disconnect() + await sio.wait() + assert not sio.connected diff --git a/packages/pytest-simcore/src/pytest_simcore/tracing.py b/packages/pytest-simcore/src/pytest_simcore/tracing.py new file mode 100644 index 000000000000..e9bed749c2c8 --- /dev/null +++ b/packages/pytest-simcore/src/pytest_simcore/tracing.py @@ -0,0 +1,38 @@ +import pytest +from opentelemetry.sdk.trace.export import SimpleSpanProcessor +from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter +from pytest_mock import MockerFixture + + +@pytest.fixture +async def setup_tracing_fastapi( + mocker: MockerFixture, monkeypatch: pytest.MonkeyPatch +) -> InMemorySpanExporter: + memory_exporter = InMemorySpanExporter() + span_processor = SimpleSpanProcessor(memory_exporter) + mocker.patch( + "servicelib.fastapi.tracing._create_span_processor", return_value=span_processor + ) + + monkeypatch.setenv( + "TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT", "http://opentelemetry-collector" + ) + monkeypatch.setenv("TRACING_OPENTELEMETRY_COLLECTOR_PORT", "4318") + return memory_exporter + + +@pytest.fixture +async def setup_tracing_aiohttp( + mocker: MockerFixture, monkeypatch: pytest.MonkeyPatch +) -> InMemorySpanExporter: + memory_exporter = InMemorySpanExporter() + span_processor = SimpleSpanProcessor(memory_exporter) + mocker.patch( + "servicelib.aiohttp.tracing._create_span_processor", return_value=span_processor + ) + + monkeypatch.setenv( + "TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT", "http://opentelemetry-collector" + ) + monkeypatch.setenv("TRACING_OPENTELEMETRY_COLLECTOR_PORT", "4318") + return memory_exporter diff --git a/packages/pytest-simcore/tests/test_helpers_asserts_checks.py b/packages/pytest-simcore/tests/test_helpers_asserts_checks.py new file mode 100644 index 000000000000..189d84f9f81e --- /dev/null +++ b/packages/pytest-simcore/tests/test_helpers_asserts_checks.py @@ -0,0 +1,35 @@ +import pytest +from pytest_simcore.helpers.assert_checks import assert_equal_ignoring_none + + +@pytest.mark.parametrize( + "expected, actual", + [ + ({"a": 1, "b": 2}, {"a": 1, "b": 2, "c": 3}), + ({"a": 1, "b": None}, {"a": 1, "b": 42}), + ({"a": {"x": 10, "y": None}}, {"a": {"x": 10, "y": 99}}), + ({"a": {"x": 10, "y": 20}}, {"a": {"x": 10, "y": 20, "z": 30}}), + ({}, {"foo": "bar"}), + ], +) +def test_assert_equal_ignoring_none_passes(expected, actual): + assert_equal_ignoring_none(expected, actual) + + +@pytest.mark.parametrize( + "expected, actual, error_msg", + [ + ({"a": 1, "b": 2}, {"a": 1}, "Missing key b"), + ({"a": 1, "b": 2}, {"a": 1, "b": 3}, "Mismatch in b: 3 != 2"), + ( + {"a": {"x": 10, "y": 20}}, + {"a": {"x": 10, "y": 99}}, + "Mismatch in y: 99 != 20", + ), + ({"a": {"x": 10}}, {"a": {}}, "Missing key x"), + ], +) +def test_assert_equal_ignoring_none_fails(expected, actual, error_msg): + with pytest.raises(AssertionError) as exc_info: + assert_equal_ignoring_none(expected, actual) + assert error_msg in str(exc_info.value) diff --git a/packages/pytest-simcore/uv.lock b/packages/pytest-simcore/uv.lock deleted file mode 100644 index 57c794b678f1..000000000000 --- a/packages/pytest-simcore/uv.lock +++ /dev/null @@ -1,728 +0,0 @@ -version = 1 -revision = 1 -requires-python = ">=3.11" - -[[package]] -name = "annotated-types" -version = "0.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, -] - -[[package]] -name = "anyio" -version = "4.9.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "idna" }, - { name = "sniffio" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916 }, -] - -[[package]] -name = "bidict" -version = "0.23.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9a/6e/026678aa5a830e07cd9498a05d3e7e650a4f56a42f267a53d22bcda1bdc9/bidict-0.23.1.tar.gz", hash = "sha256:03069d763bc387bbd20e7d49914e75fc4132a41937fa3405417e1a5a2d006d71", size = 29093 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/99/37/e8730c3587a65eb5645d4aba2d27aae48e8003614d6aaf15dda67f702f1f/bidict-0.23.1-py3-none-any.whl", hash = "sha256:5dae8d4d79b552a71cbabc7deb25dfe8ce710b17ff41711e13010ead2abfc3e5", size = 32764 }, -] - -[[package]] -name = "certifi" -version = "2025.1.31" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1c/ab/c9f1e32b7b1bf505bf26f0ef697775960db7932abeb7b516de930ba2705f/certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651", size = 167577 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/38/fc/bce832fd4fd99766c04d1ee0eead6b0ec6486fb100ae5e74c1d91292b982/certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe", size = 166393 }, -] - -[[package]] -name = "click" -version = "8.1.8" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188 }, -] - -[[package]] -name = "colorama" -version = "0.4.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, -] - -[[package]] -name = "dnspython" -version = "2.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b5/4a/263763cb2ba3816dd94b08ad3a33d5fdae34ecb856678773cc40a3605829/dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1", size = 345197 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632 }, -] - -[[package]] -name = "email-validator" -version = "2.2.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "dnspython" }, - { name = "idna" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/48/ce/13508a1ec3f8bb981ae4ca79ea40384becc868bfae97fd1c942bb3a001b1/email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7", size = 48967 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d7/ee/bf0adb559ad3c786f12bcbc9296b3f5675f529199bef03e2df281fa1fadb/email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631", size = 33521 }, -] - -[[package]] -name = "fastapi" -version = "0.115.12" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pydantic" }, - { name = "starlette" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/f4/55/ae499352d82338331ca1e28c7f4a63bfd09479b16395dce38cf50a39e2c2/fastapi-0.115.12.tar.gz", hash = "sha256:1e2c2a2646905f9e83d32f04a3f86aff4a286669c6c950ca95b5fd68c2602681", size = 295236 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/50/b3/b51f09c2ba432a576fe63758bddc81f78f0c6309d9e5c10d194313bf021e/fastapi-0.115.12-py3-none-any.whl", hash = "sha256:e94613d6c05e27be7ffebdd6ea5f388112e5e430c8f7d6494a9d1d88d43e814d", size = 95164 }, -] - -[package.optional-dependencies] -standard = [ - { name = "email-validator" }, - { name = "fastapi-cli", extra = ["standard"] }, - { name = "httpx" }, - { name = "jinja2" }, - { name = "python-multipart" }, - { name = "uvicorn", extra = ["standard"] }, -] - -[[package]] -name = "fastapi-cli" -version = "0.0.7" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "rich-toolkit" }, - { name = "typer" }, - { name = "uvicorn", extra = ["standard"] }, -] -sdist = { url = "https://files.pythonhosted.org/packages/fe/73/82a5831fbbf8ed75905bacf5b2d9d3dfd6f04d6968b29fe6f72a5ae9ceb1/fastapi_cli-0.0.7.tar.gz", hash = "sha256:02b3b65956f526412515907a0793c9094abd4bfb5457b389f645b0ea6ba3605e", size = 16753 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a1/e6/5daefc851b514ce2287d8f5d358ae4341089185f78f3217a69d0ce3a390c/fastapi_cli-0.0.7-py3-none-any.whl", hash = "sha256:d549368ff584b2804336c61f192d86ddea080c11255f375959627911944804f4", size = 10705 }, -] - -[package.optional-dependencies] -standard = [ - { name = "uvicorn", extra = ["standard"] }, -] - -[[package]] -name = "h11" -version = "0.14.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f5/38/3af3d3633a34a3316095b39c8e8fb4853a28a536e55d347bd8d8e9a14b03/h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", size = 100418 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/95/04/ff642e65ad6b90db43e668d70ffb6736436c7ce41fcc549f4e9472234127/h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761", size = 58259 }, -] - -[[package]] -name = "httpcore" -version = "1.0.7" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "h11" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/6a/41/d7d0a89eb493922c37d343b607bc1b5da7f5be7e383740b4753ad8943e90/httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c", size = 85196 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/87/f5/72347bc88306acb359581ac4d52f23c0ef445b57157adedb9aee0cd689d2/httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd", size = 78551 }, -] - -[[package]] -name = "httptools" -version = "0.6.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a7/9a/ce5e1f7e131522e6d3426e8e7a490b3a01f39a6696602e1c4f33f9e94277/httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c", size = 240639 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7b/26/bb526d4d14c2774fe07113ca1db7255737ffbb119315839af2065abfdac3/httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069", size = 199029 }, - { url = "https://files.pythonhosted.org/packages/a6/17/3e0d3e9b901c732987a45f4f94d4e2c62b89a041d93db89eafb262afd8d5/httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a", size = 103492 }, - { url = "https://files.pythonhosted.org/packages/b7/24/0fe235d7b69c42423c7698d086d4db96475f9b50b6ad26a718ef27a0bce6/httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975", size = 462891 }, - { url = "https://files.pythonhosted.org/packages/b1/2f/205d1f2a190b72da6ffb5f41a3736c26d6fa7871101212b15e9b5cd8f61d/httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636", size = 459788 }, - { url = "https://files.pythonhosted.org/packages/6e/4c/d09ce0eff09057a206a74575ae8f1e1e2f0364d20e2442224f9e6612c8b9/httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721", size = 433214 }, - { url = "https://files.pythonhosted.org/packages/3e/d2/84c9e23edbccc4a4c6f96a1b8d99dfd2350289e94f00e9ccc7aadde26fb5/httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988", size = 434120 }, - { url = "https://files.pythonhosted.org/packages/d0/46/4d8e7ba9581416de1c425b8264e2cadd201eb709ec1584c381f3e98f51c1/httptools-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17", size = 88565 }, - { url = "https://files.pythonhosted.org/packages/bb/0e/d0b71465c66b9185f90a091ab36389a7352985fe857e352801c39d6127c8/httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2", size = 200683 }, - { url = "https://files.pythonhosted.org/packages/e2/b8/412a9bb28d0a8988de3296e01efa0bd62068b33856cdda47fe1b5e890954/httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44", size = 104337 }, - { url = "https://files.pythonhosted.org/packages/9b/01/6fb20be3196ffdc8eeec4e653bc2a275eca7f36634c86302242c4fbb2760/httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1", size = 508796 }, - { url = "https://files.pythonhosted.org/packages/f7/d8/b644c44acc1368938317d76ac991c9bba1166311880bcc0ac297cb9d6bd7/httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2", size = 510837 }, - { url = "https://files.pythonhosted.org/packages/52/d8/254d16a31d543073a0e57f1c329ca7378d8924e7e292eda72d0064987486/httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81", size = 485289 }, - { url = "https://files.pythonhosted.org/packages/5f/3c/4aee161b4b7a971660b8be71a92c24d6c64372c1ab3ae7f366b3680df20f/httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f", size = 489779 }, - { url = "https://files.pythonhosted.org/packages/12/b7/5cae71a8868e555f3f67a50ee7f673ce36eac970f029c0c5e9d584352961/httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970", size = 88634 }, - { url = "https://files.pythonhosted.org/packages/94/a3/9fe9ad23fd35f7de6b91eeb60848986058bd8b5a5c1e256f5860a160cc3e/httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660", size = 197214 }, - { url = "https://files.pythonhosted.org/packages/ea/d9/82d5e68bab783b632023f2fa31db20bebb4e89dfc4d2293945fd68484ee4/httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083", size = 102431 }, - { url = "https://files.pythonhosted.org/packages/96/c1/cb499655cbdbfb57b577734fde02f6fa0bbc3fe9fb4d87b742b512908dff/httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3", size = 473121 }, - { url = "https://files.pythonhosted.org/packages/af/71/ee32fd358f8a3bb199b03261f10921716990808a675d8160b5383487a317/httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071", size = 473805 }, - { url = "https://files.pythonhosted.org/packages/8a/0a/0d4df132bfca1507114198b766f1737d57580c9ad1cf93c1ff673e3387be/httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5", size = 448858 }, - { url = "https://files.pythonhosted.org/packages/1e/6a/787004fdef2cabea27bad1073bf6a33f2437b4dbd3b6fb4a9d71172b1c7c/httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0", size = 452042 }, - { url = "https://files.pythonhosted.org/packages/4d/dc/7decab5c404d1d2cdc1bb330b1bf70e83d6af0396fd4fc76fc60c0d522bf/httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8", size = 87682 }, -] - -[[package]] -name = "httpx" -version = "0.28.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, - { name = "certifi" }, - { name = "httpcore" }, - { name = "idna" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517 }, -] - -[[package]] -name = "idna" -version = "3.10" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, -] - -[[package]] -name = "jinja2" -version = "3.1.6" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "markupsafe" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899 }, -] - -[[package]] -name = "markdown-it-py" -version = "3.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "mdurl" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528 }, -] - -[[package]] -name = "markupsafe" -version = "3.0.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353 }, - { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392 }, - { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984 }, - { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120 }, - { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032 }, - { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057 }, - { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359 }, - { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306 }, - { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094 }, - { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521 }, - { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274 }, - { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348 }, - { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149 }, - { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118 }, - { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993 }, - { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178 }, - { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319 }, - { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352 }, - { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097 }, - { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601 }, - { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274 }, - { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352 }, - { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122 }, - { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085 }, - { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978 }, - { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208 }, - { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357 }, - { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344 }, - { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101 }, - { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603 }, - { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510 }, - { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486 }, - { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480 }, - { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914 }, - { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796 }, - { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473 }, - { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114 }, - { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098 }, - { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208 }, - { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739 }, -] - -[[package]] -name = "mdurl" -version = "0.1.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 }, -] - -[[package]] -name = "pydantic" -version = "2.11.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "annotated-types" }, - { name = "pydantic-core" }, - { name = "typing-extensions" }, - { name = "typing-inspection" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/10/2e/ca897f093ee6c5f3b0bee123ee4465c50e75431c3d5b6a3b44a47134e891/pydantic-2.11.3.tar.gz", hash = "sha256:7471657138c16adad9322fe3070c0116dd6c3ad8d649300e3cbdfe91f4db4ec3", size = 785513 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b0/1d/407b29780a289868ed696d1616f4aad49d6388e5a77f567dcd2629dcd7b8/pydantic-2.11.3-py3-none-any.whl", hash = "sha256:a082753436a07f9ba1289c6ffa01cd93db3548776088aa917cc43b63f68fa60f", size = 443591 }, -] - -[[package]] -name = "pydantic-core" -version = "2.33.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/17/19/ed6a078a5287aea7922de6841ef4c06157931622c89c2a47940837b5eecd/pydantic_core-2.33.1.tar.gz", hash = "sha256:bcc9c6fdb0ced789245b02b7d6603e17d1563064ddcfc36f046b61c0c05dd9df", size = 434395 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d6/7f/c6298830cb780c46b4f46bb24298d01019ffa4d21769f39b908cd14bbd50/pydantic_core-2.33.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6e966fc3caaf9f1d96b349b0341c70c8d6573bf1bac7261f7b0ba88f96c56c24", size = 2044224 }, - { url = "https://files.pythonhosted.org/packages/a8/65/6ab3a536776cad5343f625245bd38165d6663256ad43f3a200e5936afd6c/pydantic_core-2.33.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bfd0adeee563d59c598ceabddf2c92eec77abcb3f4a391b19aa7366170bd9e30", size = 1858845 }, - { url = "https://files.pythonhosted.org/packages/e9/15/9a22fd26ba5ee8c669d4b8c9c244238e940cd5d818649603ca81d1c69861/pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91815221101ad3c6b507804178a7bb5cb7b2ead9ecd600041669c8d805ebd595", size = 1910029 }, - { url = "https://files.pythonhosted.org/packages/d5/33/8cb1a62818974045086f55f604044bf35b9342900318f9a2a029a1bec460/pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9fea9c1869bb4742d174a57b4700c6dadea951df8b06de40c2fedb4f02931c2e", size = 1997784 }, - { url = "https://files.pythonhosted.org/packages/c0/ca/49958e4df7715c71773e1ea5be1c74544923d10319173264e6db122543f9/pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d20eb4861329bb2484c021b9d9a977566ab16d84000a57e28061151c62b349a", size = 2141075 }, - { url = "https://files.pythonhosted.org/packages/7b/a6/0b3a167a9773c79ba834b959b4e18c3ae9216b8319bd8422792abc8a41b1/pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb935c5591573ae3201640579f30128ccc10739b45663f93c06796854405505", size = 2745849 }, - { url = "https://files.pythonhosted.org/packages/0b/60/516484135173aa9e5861d7a0663dce82e4746d2e7f803627d8c25dfa5578/pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c964fd24e6166420d18fb53996d8c9fd6eac9bf5ae3ec3d03015be4414ce497f", size = 2005794 }, - { url = "https://files.pythonhosted.org/packages/86/70/05b1eb77459ad47de00cf78ee003016da0cedf8b9170260488d7c21e9181/pydantic_core-2.33.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:681d65e9011f7392db5aa002b7423cc442d6a673c635668c227c6c8d0e5a4f77", size = 2123237 }, - { url = "https://files.pythonhosted.org/packages/c7/57/12667a1409c04ae7dc95d3b43158948eb0368e9c790be8b095cb60611459/pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e100c52f7355a48413e2999bfb4e139d2977a904495441b374f3d4fb4a170961", size = 2086351 }, - { url = "https://files.pythonhosted.org/packages/57/61/cc6d1d1c1664b58fdd6ecc64c84366c34ec9b606aeb66cafab6f4088974c/pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:048831bd363490be79acdd3232f74a0e9951b11b2b4cc058aeb72b22fdc3abe1", size = 2258914 }, - { url = "https://files.pythonhosted.org/packages/d1/0a/edb137176a1f5419b2ddee8bde6a0a548cfa3c74f657f63e56232df8de88/pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bdc84017d28459c00db6f918a7272a5190bec3090058334e43a76afb279eac7c", size = 2257385 }, - { url = "https://files.pythonhosted.org/packages/26/3c/48ca982d50e4b0e1d9954919c887bdc1c2b462801bf408613ccc641b3daa/pydantic_core-2.33.1-cp311-cp311-win32.whl", hash = "sha256:32cd11c5914d1179df70406427097c7dcde19fddf1418c787540f4b730289896", size = 1923765 }, - { url = "https://files.pythonhosted.org/packages/33/cd/7ab70b99e5e21559f5de38a0928ea84e6f23fdef2b0d16a6feaf942b003c/pydantic_core-2.33.1-cp311-cp311-win_amd64.whl", hash = "sha256:2ea62419ba8c397e7da28a9170a16219d310d2cf4970dbc65c32faf20d828c83", size = 1950688 }, - { url = "https://files.pythonhosted.org/packages/4b/ae/db1fc237b82e2cacd379f63e3335748ab88b5adde98bf7544a1b1bd10a84/pydantic_core-2.33.1-cp311-cp311-win_arm64.whl", hash = "sha256:fc903512177361e868bc1f5b80ac8c8a6e05fcdd574a5fb5ffeac5a9982b9e89", size = 1908185 }, - { url = "https://files.pythonhosted.org/packages/c8/ce/3cb22b07c29938f97ff5f5bb27521f95e2ebec399b882392deb68d6c440e/pydantic_core-2.33.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1293d7febb995e9d3ec3ea09caf1a26214eec45b0f29f6074abb004723fc1de8", size = 2026640 }, - { url = "https://files.pythonhosted.org/packages/19/78/f381d643b12378fee782a72126ec5d793081ef03791c28a0fd542a5bee64/pydantic_core-2.33.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:99b56acd433386c8f20be5c4000786d1e7ca0523c8eefc995d14d79c7a081498", size = 1852649 }, - { url = "https://files.pythonhosted.org/packages/9d/2b/98a37b80b15aac9eb2c6cfc6dbd35e5058a352891c5cce3a8472d77665a6/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35a5ec3fa8c2fe6c53e1b2ccc2454398f95d5393ab398478f53e1afbbeb4d939", size = 1892472 }, - { url = "https://files.pythonhosted.org/packages/4e/d4/3c59514e0f55a161004792b9ff3039da52448f43f5834f905abef9db6e4a/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b172f7b9d2f3abc0efd12e3386f7e48b576ef309544ac3a63e5e9cdd2e24585d", size = 1977509 }, - { url = "https://files.pythonhosted.org/packages/a9/b6/c2c7946ef70576f79a25db59a576bce088bdc5952d1b93c9789b091df716/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9097b9f17f91eea659b9ec58148c0747ec354a42f7389b9d50701610d86f812e", size = 2128702 }, - { url = "https://files.pythonhosted.org/packages/88/fe/65a880f81e3f2a974312b61f82a03d85528f89a010ce21ad92f109d94deb/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc77ec5b7e2118b152b0d886c7514a4653bcb58c6b1d760134a9fab915f777b3", size = 2679428 }, - { url = "https://files.pythonhosted.org/packages/6f/ff/4459e4146afd0462fb483bb98aa2436d69c484737feaceba1341615fb0ac/pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3d15245b08fa4a84cefc6c9222e6f37c98111c8679fbd94aa145f9a0ae23d", size = 2008753 }, - { url = "https://files.pythonhosted.org/packages/7c/76/1c42e384e8d78452ededac8b583fe2550c84abfef83a0552e0e7478ccbc3/pydantic_core-2.33.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef99779001d7ac2e2461d8ab55d3373fe7315caefdbecd8ced75304ae5a6fc6b", size = 2114849 }, - { url = "https://files.pythonhosted.org/packages/00/72/7d0cf05095c15f7ffe0eb78914b166d591c0eed72f294da68378da205101/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fc6bf8869e193855e8d91d91f6bf59699a5cdfaa47a404e278e776dd7f168b39", size = 2069541 }, - { url = "https://files.pythonhosted.org/packages/b3/69/94a514066bb7d8be499aa764926937409d2389c09be0b5107a970286ef81/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:b1caa0bc2741b043db7823843e1bde8aaa58a55a58fda06083b0569f8b45693a", size = 2239225 }, - { url = "https://files.pythonhosted.org/packages/84/b0/e390071eadb44b41f4f54c3cef64d8bf5f9612c92686c9299eaa09e267e2/pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ec259f62538e8bf364903a7d0d0239447059f9434b284f5536e8402b7dd198db", size = 2248373 }, - { url = "https://files.pythonhosted.org/packages/d6/b2/288b3579ffc07e92af66e2f1a11be3b056fe1214aab314748461f21a31c3/pydantic_core-2.33.1-cp312-cp312-win32.whl", hash = "sha256:e14f369c98a7c15772b9da98987f58e2b509a93235582838bd0d1d8c08b68fda", size = 1907034 }, - { url = "https://files.pythonhosted.org/packages/02/28/58442ad1c22b5b6742b992ba9518420235adced665513868f99a1c2638a5/pydantic_core-2.33.1-cp312-cp312-win_amd64.whl", hash = "sha256:1c607801d85e2e123357b3893f82c97a42856192997b95b4d8325deb1cd0c5f4", size = 1956848 }, - { url = "https://files.pythonhosted.org/packages/a1/eb/f54809b51c7e2a1d9f439f158b8dd94359321abcc98767e16fc48ae5a77e/pydantic_core-2.33.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d13f0276806ee722e70a1c93da19748594f19ac4299c7e41237fc791d1861ea", size = 1903986 }, - { url = "https://files.pythonhosted.org/packages/7a/24/eed3466a4308d79155f1cdd5c7432c80ddcc4530ba8623b79d5ced021641/pydantic_core-2.33.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:70af6a21237b53d1fe7b9325b20e65cbf2f0a848cf77bed492b029139701e66a", size = 2033551 }, - { url = "https://files.pythonhosted.org/packages/ab/14/df54b1a0bc9b6ded9b758b73139d2c11b4e8eb43e8ab9c5847c0a2913ada/pydantic_core-2.33.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:282b3fe1bbbe5ae35224a0dbd05aed9ccabccd241e8e6b60370484234b456266", size = 1852785 }, - { url = "https://files.pythonhosted.org/packages/fa/96/e275f15ff3d34bb04b0125d9bc8848bf69f25d784d92a63676112451bfb9/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b315e596282bbb5822d0c7ee9d255595bd7506d1cb20c2911a4da0b970187d3", size = 1897758 }, - { url = "https://files.pythonhosted.org/packages/b7/d8/96bc536e975b69e3a924b507d2a19aedbf50b24e08c80fb00e35f9baaed8/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1dfae24cf9921875ca0ca6a8ecb4bb2f13c855794ed0d468d6abbec6e6dcd44a", size = 1986109 }, - { url = "https://files.pythonhosted.org/packages/90/72/ab58e43ce7e900b88cb571ed057b2fcd0e95b708a2e0bed475b10130393e/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6dd8ecfde08d8bfadaea669e83c63939af76f4cf5538a72597016edfa3fad516", size = 2129159 }, - { url = "https://files.pythonhosted.org/packages/dc/3f/52d85781406886c6870ac995ec0ba7ccc028b530b0798c9080531b409fdb/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f593494876eae852dc98c43c6f260f45abdbfeec9e4324e31a481d948214764", size = 2680222 }, - { url = "https://files.pythonhosted.org/packages/f4/56/6e2ef42f363a0eec0fd92f74a91e0ac48cd2e49b695aac1509ad81eee86a/pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:948b73114f47fd7016088e5186d13faf5e1b2fe83f5e320e371f035557fd264d", size = 2006980 }, - { url = "https://files.pythonhosted.org/packages/4c/c0/604536c4379cc78359f9ee0aa319f4aedf6b652ec2854953f5a14fc38c5a/pydantic_core-2.33.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e11f3864eb516af21b01e25fac915a82e9ddad3bb0fb9e95a246067398b435a4", size = 2120840 }, - { url = "https://files.pythonhosted.org/packages/1f/46/9eb764814f508f0edfb291a0f75d10854d78113fa13900ce13729aaec3ae/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:549150be302428b56fdad0c23c2741dcdb5572413776826c965619a25d9c6bde", size = 2072518 }, - { url = "https://files.pythonhosted.org/packages/42/e3/fb6b2a732b82d1666fa6bf53e3627867ea3131c5f39f98ce92141e3e3dc1/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:495bc156026efafd9ef2d82372bd38afce78ddd82bf28ef5276c469e57c0c83e", size = 2248025 }, - { url = "https://files.pythonhosted.org/packages/5c/9d/fbe8fe9d1aa4dac88723f10a921bc7418bd3378a567cb5e21193a3c48b43/pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ec79de2a8680b1a67a07490bddf9636d5c2fab609ba8c57597e855fa5fa4dacd", size = 2254991 }, - { url = "https://files.pythonhosted.org/packages/aa/99/07e2237b8a66438d9b26482332cda99a9acccb58d284af7bc7c946a42fd3/pydantic_core-2.33.1-cp313-cp313-win32.whl", hash = "sha256:ee12a7be1742f81b8a65b36c6921022301d466b82d80315d215c4c691724986f", size = 1915262 }, - { url = "https://files.pythonhosted.org/packages/8a/f4/e457a7849beeed1e5defbcf5051c6f7b3c91a0624dd31543a64fc9adcf52/pydantic_core-2.33.1-cp313-cp313-win_amd64.whl", hash = "sha256:ede9b407e39949d2afc46385ce6bd6e11588660c26f80576c11c958e6647bc40", size = 1956626 }, - { url = "https://files.pythonhosted.org/packages/20/d0/e8d567a7cff7b04e017ae164d98011f1e1894269fe8e90ea187a3cbfb562/pydantic_core-2.33.1-cp313-cp313-win_arm64.whl", hash = "sha256:aa687a23d4b7871a00e03ca96a09cad0f28f443690d300500603bd0adba4b523", size = 1909590 }, - { url = "https://files.pythonhosted.org/packages/ef/fd/24ea4302d7a527d672c5be06e17df16aabfb4e9fdc6e0b345c21580f3d2a/pydantic_core-2.33.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:401d7b76e1000d0dd5538e6381d28febdcacb097c8d340dde7d7fc6e13e9f95d", size = 1812963 }, - { url = "https://files.pythonhosted.org/packages/5f/95/4fbc2ecdeb5c1c53f1175a32d870250194eb2fdf6291b795ab08c8646d5d/pydantic_core-2.33.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7aeb055a42d734c0255c9e489ac67e75397d59c6fbe60d155851e9782f276a9c", size = 1986896 }, - { url = "https://files.pythonhosted.org/packages/71/ae/fe31e7f4a62431222d8f65a3bd02e3fa7e6026d154a00818e6d30520ea77/pydantic_core-2.33.1-cp313-cp313t-win_amd64.whl", hash = "sha256:338ea9b73e6e109f15ab439e62cb3b78aa752c7fd9536794112e14bee02c8d18", size = 1931810 }, - { url = "https://files.pythonhosted.org/packages/0b/76/1794e440c1801ed35415238d2c728f26cd12695df9057154ad768b7b991c/pydantic_core-2.33.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3a371dc00282c4b84246509a5ddc808e61b9864aa1eae9ecc92bb1268b82db4a", size = 2042858 }, - { url = "https://files.pythonhosted.org/packages/73/b4/9cd7b081fb0b1b4f8150507cd59d27b275c3e22ad60b35cb19ea0977d9b9/pydantic_core-2.33.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:f59295ecc75a1788af8ba92f2e8c6eeaa5a94c22fc4d151e8d9638814f85c8fc", size = 1873745 }, - { url = "https://files.pythonhosted.org/packages/e1/d7/9ddb7575d4321e40d0363903c2576c8c0c3280ebea137777e5ab58d723e3/pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08530b8ac922003033f399128505f513e30ca770527cc8bbacf75a84fcc2c74b", size = 1904188 }, - { url = "https://files.pythonhosted.org/packages/d1/a8/3194ccfe461bb08da19377ebec8cb4f13c9bd82e13baebc53c5c7c39a029/pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bae370459da6a5466978c0eacf90690cb57ec9d533f8e63e564ef3822bfa04fe", size = 2083479 }, - { url = "https://files.pythonhosted.org/packages/42/c7/84cb569555d7179ca0b3f838cef08f66f7089b54432f5b8599aac6e9533e/pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e3de2777e3b9f4d603112f78006f4ae0acb936e95f06da6cb1a45fbad6bdb4b5", size = 2118415 }, - { url = "https://files.pythonhosted.org/packages/3b/67/72abb8c73e0837716afbb58a59cc9e3ae43d1aa8677f3b4bc72c16142716/pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3a64e81e8cba118e108d7126362ea30e021291b7805d47e4896e52c791be2761", size = 2079623 }, - { url = "https://files.pythonhosted.org/packages/0b/cd/c59707e35a47ba4cbbf153c3f7c56420c58653b5801b055dc52cccc8e2dc/pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:52928d8c1b6bda03cc6d811e8923dffc87a2d3c8b3bfd2ce16471c7147a24850", size = 2250175 }, - { url = "https://files.pythonhosted.org/packages/84/32/e4325a6676b0bed32d5b084566ec86ed7fd1e9bcbfc49c578b1755bde920/pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1b30d92c9412beb5ac6b10a3eb7ef92ccb14e3f2a8d7732e2d739f58b3aa7544", size = 2254674 }, - { url = "https://files.pythonhosted.org/packages/12/6f/5596dc418f2e292ffc661d21931ab34591952e2843e7168ea5a52591f6ff/pydantic_core-2.33.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:f995719707e0e29f0f41a8aa3bcea6e761a36c9136104d3189eafb83f5cec5e5", size = 2080951 }, -] - -[[package]] -name = "pygments" -version = "2.19.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293 }, -] - -[[package]] -name = "pytest-simcore" -version = "0.1.0" -source = { virtual = "." } -dependencies = [ - { name = "fastapi", extra = ["standard"] }, - { name = "python-socketio" }, - { name = "uvicorn" }, -] - -[package.metadata] -requires-dist = [ - { name = "fastapi", extras = ["standard"], specifier = ">=0.115.12" }, - { name = "python-socketio", specifier = ">=5.12.1" }, - { name = "uvicorn", specifier = ">=0.34.0" }, -] - -[[package]] -name = "python-dotenv" -version = "1.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/88/2c/7bb1416c5620485aa793f2de31d3df393d3686aa8a8506d11e10e13c5baf/python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5", size = 39920 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/18/98a99ad95133c6a6e2005fe89faedf294a748bd5dc803008059409ac9b1e/python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d", size = 20256 }, -] - -[[package]] -name = "python-engineio" -version = "4.11.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "simple-websocket" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/52/e0/a9e0fe427ce7f1b7dbf9531fa00ffe4b557c4a7bc8e71891c115af123170/python_engineio-4.11.2.tar.gz", hash = "sha256:145bb0daceb904b4bb2d3eb2d93f7dbb7bb87a6a0c4f20a94cc8654dec977129", size = 91381 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/07/8f/978a0b913e3f8ad33a9a2fe204d32efe3d1ee34ecb1f2829c1cfbdd92082/python_engineio-4.11.2-py3-none-any.whl", hash = "sha256:f0971ac4c65accc489154fe12efd88f53ca8caf04754c46a66e85f5102ef22ad", size = 59239 }, -] - -[[package]] -name = "python-multipart" -version = "0.0.20" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546 }, -] - -[[package]] -name = "python-socketio" -version = "5.12.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "bidict" }, - { name = "python-engineio" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ce/d0/40ed38076e8aee94785d546d3e3a1cae393da5806a8530be877187e2875f/python_socketio-5.12.1.tar.gz", hash = "sha256:0299ff1f470b676c09c1bfab1dead25405077d227b2c13cf217a34dadc68ba9c", size = 119991 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8a/a3/c69806f30dd81df5a99d592e7db4c930c3a9b098555aa97b0eb866b20b11/python_socketio-5.12.1-py3-none-any.whl", hash = "sha256:24a0ea7cfff0e021eb28c68edbf7914ee4111bdf030b95e4d250c4dc9af7a386", size = 76947 }, -] - -[[package]] -name = "pyyaml" -version = "6.0.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612 }, - { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040 }, - { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829 }, - { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167 }, - { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952 }, - { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301 }, - { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638 }, - { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850 }, - { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980 }, - { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873 }, - { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302 }, - { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154 }, - { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223 }, - { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542 }, - { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164 }, - { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611 }, - { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591 }, - { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338 }, - { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309 }, - { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679 }, - { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428 }, - { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361 }, - { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523 }, - { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660 }, - { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597 }, - { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527 }, - { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446 }, -] - -[[package]] -name = "rich" -version = "14.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "markdown-it-py" }, - { name = "pygments" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a1/53/830aa4c3066a8ab0ae9a9955976fb770fe9c6102117c8ec4ab3ea62d89e8/rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725", size = 224078 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0d/9b/63f4c7ebc259242c89b3acafdb37b41d1185c07ff0011164674e9076b491/rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0", size = 243229 }, -] - -[[package]] -name = "rich-toolkit" -version = "0.14.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click" }, - { name = "rich" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/2e/ea/13945d58d556a28dfb0f774ad5c8af759527390e59505a40d164bf8ce1ce/rich_toolkit-0.14.1.tar.gz", hash = "sha256:9248e2d087bfc01f3e4c5c8987e05f7fa744d00dd22fa2be3aa6e50255790b3f", size = 104416 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/66/e8/61c5b12d1567fdba41a6775db12a090d88b8305424ee7c47259c70d33cb4/rich_toolkit-0.14.1-py3-none-any.whl", hash = "sha256:dc92c0117d752446d04fdc828dbca5873bcded213a091a5d3742a2beec2e6559", size = 24177 }, -] - -[[package]] -name = "shellingham" -version = "1.5.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755 }, -] - -[[package]] -name = "simple-websocket" -version = "1.1.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "wsproto" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b0/d4/bfa032f961103eba93de583b161f0e6a5b63cebb8f2c7d0c6e6efe1e3d2e/simple_websocket-1.1.0.tar.gz", hash = "sha256:7939234e7aa067c534abdab3a9ed933ec9ce4691b0713c78acb195560aa52ae4", size = 17300 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/52/59/0782e51887ac6b07ffd1570e0364cf901ebc36345fea669969d2084baebb/simple_websocket-1.1.0-py3-none-any.whl", hash = "sha256:4af6069630a38ed6c561010f0e11a5bc0d4ca569b36306eb257cd9a192497c8c", size = 13842 }, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, -] - -[[package]] -name = "starlette" -version = "0.46.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/04/1b/52b27f2e13ceedc79a908e29eac426a63465a1a01248e5f24aa36a62aeb3/starlette-0.46.1.tar.gz", hash = "sha256:3c88d58ee4bd1bb807c0d1acb381838afc7752f9ddaec81bbe4383611d833230", size = 2580102 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/4b/528ccf7a982216885a1ff4908e886b8fb5f19862d1962f56a3fce2435a70/starlette-0.46.1-py3-none-any.whl", hash = "sha256:77c74ed9d2720138b25875133f3a2dae6d854af2ec37dceb56aef370c1d8a227", size = 71995 }, -] - -[[package]] -name = "typer" -version = "0.15.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click" }, - { name = "rich" }, - { name = "shellingham" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/8b/6f/3991f0f1c7fcb2df31aef28e0594d8d54b05393a0e4e34c65e475c2a5d41/typer-0.15.2.tar.gz", hash = "sha256:ab2fab47533a813c49fe1f16b1a370fd5819099c00b119e0633df65f22144ba5", size = 100711 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7f/fc/5b29fea8cee020515ca82cc68e3b8e1e34bb19a3535ad854cac9257b414c/typer-0.15.2-py3-none-any.whl", hash = "sha256:46a499c6107d645a9c13f7ee46c5d5096cae6f5fc57dd11eccbbb9ae3e44ddfc", size = 45061 }, -] - -[[package]] -name = "typing-extensions" -version = "4.13.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f6/37/23083fcd6e35492953e8d2aaaa68b860eb422b34627b13f2ce3eb6106061/typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef", size = 106967 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/54/b1ae86c0973cc6f0210b53d508ca3641fb6d0c56823f288d108bc7ab3cc8/typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", size = 45806 }, -] - -[[package]] -name = "typing-inspection" -version = "0.4.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/82/5c/e6082df02e215b846b4b8c0b887a64d7d08ffaba30605502639d44c06b82/typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122", size = 76222 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/31/08/aa4fdfb71f7de5176385bd9e90852eaf6b5d622735020ad600f2bab54385/typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f", size = 14125 }, -] - -[[package]] -name = "uvicorn" -version = "0.34.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click" }, - { name = "h11" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/4b/4d/938bd85e5bf2edeec766267a5015ad969730bb91e31b44021dfe8b22df6c/uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9", size = 76568 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4", size = 62315 }, -] - -[package.optional-dependencies] -standard = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, - { name = "httptools" }, - { name = "python-dotenv" }, - { name = "pyyaml" }, - { name = "uvloop", marker = "platform_python_implementation != 'PyPy' and sys_platform != 'cygwin' and sys_platform != 'win32'" }, - { name = "watchfiles" }, - { name = "websockets" }, -] - -[[package]] -name = "uvloop" -version = "0.21.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/af/c0/854216d09d33c543f12a44b393c402e89a920b1a0a7dc634c42de91b9cf6/uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3", size = 2492741 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/57/a7/4cf0334105c1160dd6819f3297f8700fda7fc30ab4f61fbf3e725acbc7cc/uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8", size = 1447410 }, - { url = "https://files.pythonhosted.org/packages/8c/7c/1517b0bbc2dbe784b563d6ab54f2ef88c890fdad77232c98ed490aa07132/uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0", size = 805476 }, - { url = "https://files.pythonhosted.org/packages/ee/ea/0bfae1aceb82a503f358d8d2fa126ca9dbdb2ba9c7866974faec1cb5875c/uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e", size = 3960855 }, - { url = "https://files.pythonhosted.org/packages/8a/ca/0864176a649838b838f36d44bf31c451597ab363b60dc9e09c9630619d41/uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb", size = 3973185 }, - { url = "https://files.pythonhosted.org/packages/30/bf/08ad29979a936d63787ba47a540de2132169f140d54aa25bc8c3df3e67f4/uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6", size = 3820256 }, - { url = "https://files.pythonhosted.org/packages/da/e2/5cf6ef37e3daf2f06e651aae5ea108ad30df3cb269102678b61ebf1fdf42/uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d", size = 3937323 }, - { url = "https://files.pythonhosted.org/packages/8c/4c/03f93178830dc7ce8b4cdee1d36770d2f5ebb6f3d37d354e061eefc73545/uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c", size = 1471284 }, - { url = "https://files.pythonhosted.org/packages/43/3e/92c03f4d05e50f09251bd8b2b2b584a2a7f8fe600008bcc4523337abe676/uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2", size = 821349 }, - { url = "https://files.pythonhosted.org/packages/a6/ef/a02ec5da49909dbbfb1fd205a9a1ac4e88ea92dcae885e7c961847cd51e2/uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d", size = 4580089 }, - { url = "https://files.pythonhosted.org/packages/06/a7/b4e6a19925c900be9f98bec0a75e6e8f79bb53bdeb891916609ab3958967/uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc", size = 4693770 }, - { url = "https://files.pythonhosted.org/packages/ce/0c/f07435a18a4b94ce6bd0677d8319cd3de61f3a9eeb1e5f8ab4e8b5edfcb3/uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb", size = 4451321 }, - { url = "https://files.pythonhosted.org/packages/8f/eb/f7032be105877bcf924709c97b1bf3b90255b4ec251f9340cef912559f28/uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f", size = 4659022 }, - { url = "https://files.pythonhosted.org/packages/3f/8d/2cbef610ca21539f0f36e2b34da49302029e7c9f09acef0b1c3b5839412b/uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281", size = 1468123 }, - { url = "https://files.pythonhosted.org/packages/93/0d/b0038d5a469f94ed8f2b2fce2434a18396d8fbfb5da85a0a9781ebbdec14/uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af", size = 819325 }, - { url = "https://files.pythonhosted.org/packages/50/94/0a687f39e78c4c1e02e3272c6b2ccdb4e0085fda3b8352fecd0410ccf915/uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6", size = 4582806 }, - { url = "https://files.pythonhosted.org/packages/d2/19/f5b78616566ea68edd42aacaf645adbf71fbd83fc52281fba555dc27e3f1/uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816", size = 4701068 }, - { url = "https://files.pythonhosted.org/packages/47/57/66f061ee118f413cd22a656de622925097170b9380b30091b78ea0c6ea75/uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc", size = 4454428 }, - { url = "https://files.pythonhosted.org/packages/63/9a/0962b05b308494e3202d3f794a6e85abe471fe3cafdbcf95c2e8c713aabd/uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553", size = 4660018 }, -] - -[[package]] -name = "watchfiles" -version = "1.0.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/03/e2/8ed598c42057de7aa5d97c472254af4906ff0a59a66699d426fc9ef795d7/watchfiles-1.0.5.tar.gz", hash = "sha256:b7529b5dcc114679d43827d8c35a07c493ad6f083633d573d81c660abc5979e9", size = 94537 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/39/f4/41b591f59021786ef517e1cdc3b510383551846703e03f204827854a96f8/watchfiles-1.0.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:237f9be419e977a0f8f6b2e7b0475ababe78ff1ab06822df95d914a945eac827", size = 405336 }, - { url = "https://files.pythonhosted.org/packages/ae/06/93789c135be4d6d0e4f63e96eea56dc54050b243eacc28439a26482b5235/watchfiles-1.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0da39ff917af8b27a4bdc5a97ac577552a38aac0d260a859c1517ea3dc1a7c4", size = 395977 }, - { url = "https://files.pythonhosted.org/packages/d2/db/1cd89bd83728ca37054512d4d35ab69b5f12b8aa2ac9be3b0276b3bf06cc/watchfiles-1.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cfcb3952350e95603f232a7a15f6c5f86c5375e46f0bd4ae70d43e3e063c13d", size = 455232 }, - { url = "https://files.pythonhosted.org/packages/40/90/d8a4d44ffe960517e487c9c04f77b06b8abf05eb680bed71c82b5f2cad62/watchfiles-1.0.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:68b2dddba7a4e6151384e252a5632efcaa9bc5d1c4b567f3cb621306b2ca9f63", size = 459151 }, - { url = "https://files.pythonhosted.org/packages/6c/da/267a1546f26465dead1719caaba3ce660657f83c9d9c052ba98fb8856e13/watchfiles-1.0.5-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95cf944fcfc394c5f9de794ce581914900f82ff1f855326f25ebcf24d5397418", size = 489054 }, - { url = "https://files.pythonhosted.org/packages/b1/31/33850dfd5c6efb6f27d2465cc4c6b27c5a6f5ed53c6fa63b7263cf5f60f6/watchfiles-1.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecf6cd9f83d7c023b1aba15d13f705ca7b7d38675c121f3cc4a6e25bd0857ee9", size = 523955 }, - { url = "https://files.pythonhosted.org/packages/09/84/b7d7b67856efb183a421f1416b44ca975cb2ea6c4544827955dfb01f7dc2/watchfiles-1.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:852de68acd6212cd6d33edf21e6f9e56e5d98c6add46f48244bd479d97c967c6", size = 502234 }, - { url = "https://files.pythonhosted.org/packages/71/87/6dc5ec6882a2254cfdd8b0718b684504e737273903b65d7338efaba08b52/watchfiles-1.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5730f3aa35e646103b53389d5bc77edfbf578ab6dab2e005142b5b80a35ef25", size = 454750 }, - { url = "https://files.pythonhosted.org/packages/3d/6c/3786c50213451a0ad15170d091570d4a6554976cf0df19878002fc96075a/watchfiles-1.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:18b3bd29954bc4abeeb4e9d9cf0b30227f0f206c86657674f544cb032296acd5", size = 631591 }, - { url = "https://files.pythonhosted.org/packages/1b/b3/1427425ade4e359a0deacce01a47a26024b2ccdb53098f9d64d497f6684c/watchfiles-1.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ba5552a1b07c8edbf197055bc9d518b8f0d98a1c6a73a293bc0726dce068ed01", size = 625370 }, - { url = "https://files.pythonhosted.org/packages/15/ba/f60e053b0b5b8145d682672024aa91370a29c5c921a88977eb565de34086/watchfiles-1.0.5-cp311-cp311-win32.whl", hash = "sha256:2f1fefb2e90e89959447bc0420fddd1e76f625784340d64a2f7d5983ef9ad246", size = 277791 }, - { url = "https://files.pythonhosted.org/packages/50/ed/7603c4e164225c12c0d4e8700b64bb00e01a6c4eeea372292a3856be33a4/watchfiles-1.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:b6e76ceb1dd18c8e29c73f47d41866972e891fc4cc7ba014f487def72c1cf096", size = 291622 }, - { url = "https://files.pythonhosted.org/packages/a2/c2/99bb7c96b4450e36877fde33690ded286ff555b5a5c1d925855d556968a1/watchfiles-1.0.5-cp311-cp311-win_arm64.whl", hash = "sha256:266710eb6fddc1f5e51843c70e3bebfb0f5e77cf4f27129278c70554104d19ed", size = 283699 }, - { url = "https://files.pythonhosted.org/packages/2a/8c/4f0b9bdb75a1bfbd9c78fad7d8854369283f74fe7cf03eb16be77054536d/watchfiles-1.0.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b5eb568c2aa6018e26da9e6c86f3ec3fd958cee7f0311b35c2630fa4217d17f2", size = 401511 }, - { url = "https://files.pythonhosted.org/packages/dc/4e/7e15825def77f8bd359b6d3f379f0c9dac4eb09dd4ddd58fd7d14127179c/watchfiles-1.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0a04059f4923ce4e856b4b4e5e783a70f49d9663d22a4c3b3298165996d1377f", size = 392715 }, - { url = "https://files.pythonhosted.org/packages/58/65/b72fb817518728e08de5840d5d38571466c1b4a3f724d190cec909ee6f3f/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e380c89983ce6e6fe2dd1e1921b9952fb4e6da882931abd1824c092ed495dec", size = 454138 }, - { url = "https://files.pythonhosted.org/packages/3e/a4/86833fd2ea2e50ae28989f5950b5c3f91022d67092bfec08f8300d8b347b/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fe43139b2c0fdc4a14d4f8d5b5d967f7a2777fd3d38ecf5b1ec669b0d7e43c21", size = 458592 }, - { url = "https://files.pythonhosted.org/packages/38/7e/42cb8df8be9a37e50dd3a818816501cf7a20d635d76d6bd65aae3dbbff68/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee0822ce1b8a14fe5a066f93edd20aada932acfe348bede8aa2149f1a4489512", size = 487532 }, - { url = "https://files.pythonhosted.org/packages/fc/fd/13d26721c85d7f3df6169d8b495fcac8ab0dc8f0945ebea8845de4681dab/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a0dbcb1c2d8f2ab6e0a81c6699b236932bd264d4cef1ac475858d16c403de74d", size = 522865 }, - { url = "https://files.pythonhosted.org/packages/a1/0d/7f9ae243c04e96c5455d111e21b09087d0eeaf9a1369e13a01c7d3d82478/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a2014a2b18ad3ca53b1f6c23f8cd94a18ce930c1837bd891262c182640eb40a6", size = 499887 }, - { url = "https://files.pythonhosted.org/packages/8e/0f/a257766998e26aca4b3acf2ae97dff04b57071e991a510857d3799247c67/watchfiles-1.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10f6ae86d5cb647bf58f9f655fcf577f713915a5d69057a0371bc257e2553234", size = 454498 }, - { url = "https://files.pythonhosted.org/packages/81/79/8bf142575a03e0af9c3d5f8bcae911ee6683ae93a625d349d4ecf4c8f7df/watchfiles-1.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1a7bac2bde1d661fb31f4d4e8e539e178774b76db3c2c17c4bb3e960a5de07a2", size = 630663 }, - { url = "https://files.pythonhosted.org/packages/f1/80/abe2e79f610e45c63a70d271caea90c49bbf93eb00fa947fa9b803a1d51f/watchfiles-1.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ab626da2fc1ac277bbf752446470b367f84b50295264d2d313e28dc4405d663", size = 625410 }, - { url = "https://files.pythonhosted.org/packages/91/6f/bc7fbecb84a41a9069c2c6eb6319f7f7df113adf113e358c57fc1aff7ff5/watchfiles-1.0.5-cp312-cp312-win32.whl", hash = "sha256:9f4571a783914feda92018ef3901dab8caf5b029325b5fe4558c074582815249", size = 277965 }, - { url = "https://files.pythonhosted.org/packages/99/a5/bf1c297ea6649ec59e935ab311f63d8af5faa8f0b86993e3282b984263e3/watchfiles-1.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:360a398c3a19672cf93527f7e8d8b60d8275119c5d900f2e184d32483117a705", size = 291693 }, - { url = "https://files.pythonhosted.org/packages/7f/7b/fd01087cc21db5c47e5beae507b87965db341cce8a86f9eb12bf5219d4e0/watchfiles-1.0.5-cp312-cp312-win_arm64.whl", hash = "sha256:1a2902ede862969077b97523987c38db28abbe09fb19866e711485d9fbf0d417", size = 283287 }, - { url = "https://files.pythonhosted.org/packages/c7/62/435766874b704f39b2fecd8395a29042db2b5ec4005bd34523415e9bd2e0/watchfiles-1.0.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0b289572c33a0deae62daa57e44a25b99b783e5f7aed81b314232b3d3c81a11d", size = 401531 }, - { url = "https://files.pythonhosted.org/packages/6e/a6/e52a02c05411b9cb02823e6797ef9bbba0bfaf1bb627da1634d44d8af833/watchfiles-1.0.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a056c2f692d65bf1e99c41045e3bdcaea3cb9e6b5a53dcaf60a5f3bd95fc9763", size = 392417 }, - { url = "https://files.pythonhosted.org/packages/3f/53/c4af6819770455932144e0109d4854437769672d7ad897e76e8e1673435d/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9dca99744991fc9850d18015c4f0438865414e50069670f5f7eee08340d8b40", size = 453423 }, - { url = "https://files.pythonhosted.org/packages/cb/d1/8e88df58bbbf819b8bc5cfbacd3c79e01b40261cad0fc84d1e1ebd778a07/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:894342d61d355446d02cd3988a7326af344143eb33a2fd5d38482a92072d9563", size = 458185 }, - { url = "https://files.pythonhosted.org/packages/ff/70/fffaa11962dd5429e47e478a18736d4e42bec42404f5ee3b92ef1b87ad60/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab44e1580924d1ffd7b3938e02716d5ad190441965138b4aa1d1f31ea0877f04", size = 486696 }, - { url = "https://files.pythonhosted.org/packages/39/db/723c0328e8b3692d53eb273797d9a08be6ffb1d16f1c0ba2bdbdc2a3852c/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d6f9367b132078b2ceb8d066ff6c93a970a18c3029cea37bfd7b2d3dd2e5db8f", size = 522327 }, - { url = "https://files.pythonhosted.org/packages/cd/05/9fccc43c50c39a76b68343484b9da7b12d42d0859c37c61aec018c967a32/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2e55a9b162e06e3f862fb61e399fe9f05d908d019d87bf5b496a04ef18a970a", size = 499741 }, - { url = "https://files.pythonhosted.org/packages/23/14/499e90c37fa518976782b10a18b18db9f55ea73ca14641615056f8194bb3/watchfiles-1.0.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0125f91f70e0732a9f8ee01e49515c35d38ba48db507a50c5bdcad9503af5827", size = 453995 }, - { url = "https://files.pythonhosted.org/packages/61/d9/f75d6840059320df5adecd2c687fbc18960a7f97b55c300d20f207d48aef/watchfiles-1.0.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:13bb21f8ba3248386337c9fa51c528868e6c34a707f729ab041c846d52a0c69a", size = 629693 }, - { url = "https://files.pythonhosted.org/packages/fc/17/180ca383f5061b61406477218c55d66ec118e6c0c51f02d8142895fcf0a9/watchfiles-1.0.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:839ebd0df4a18c5b3c1b890145b5a3f5f64063c2a0d02b13c76d78fe5de34936", size = 624677 }, - { url = "https://files.pythonhosted.org/packages/bf/15/714d6ef307f803f236d69ee9d421763707899d6298d9f3183e55e366d9af/watchfiles-1.0.5-cp313-cp313-win32.whl", hash = "sha256:4a8ec1e4e16e2d5bafc9ba82f7aaecfeec990ca7cd27e84fb6f191804ed2fcfc", size = 277804 }, - { url = "https://files.pythonhosted.org/packages/a8/b4/c57b99518fadf431f3ef47a610839e46e5f8abf9814f969859d1c65c02c7/watchfiles-1.0.5-cp313-cp313-win_amd64.whl", hash = "sha256:f436601594f15bf406518af922a89dcaab416568edb6f65c4e5bbbad1ea45c11", size = 291087 }, -] - -[[package]] -name = "websockets" -version = "15.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9f/32/18fcd5919c293a398db67443acd33fde142f283853076049824fc58e6f75/websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431", size = 175423 }, - { url = "https://files.pythonhosted.org/packages/76/70/ba1ad96b07869275ef42e2ce21f07a5b0148936688c2baf7e4a1f60d5058/websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57", size = 173082 }, - { url = "https://files.pythonhosted.org/packages/86/f2/10b55821dd40eb696ce4704a87d57774696f9451108cff0d2824c97e0f97/websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905", size = 173330 }, - { url = "https://files.pythonhosted.org/packages/a5/90/1c37ae8b8a113d3daf1065222b6af61cc44102da95388ac0018fcb7d93d9/websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562", size = 182878 }, - { url = "https://files.pythonhosted.org/packages/8e/8d/96e8e288b2a41dffafb78e8904ea7367ee4f891dafc2ab8d87e2124cb3d3/websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792", size = 181883 }, - { url = "https://files.pythonhosted.org/packages/93/1f/5d6dbf551766308f6f50f8baf8e9860be6182911e8106da7a7f73785f4c4/websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413", size = 182252 }, - { url = "https://files.pythonhosted.org/packages/d4/78/2d4fed9123e6620cbf1706c0de8a1632e1a28e7774d94346d7de1bba2ca3/websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8", size = 182521 }, - { url = "https://files.pythonhosted.org/packages/e7/3b/66d4c1b444dd1a9823c4a81f50231b921bab54eee2f69e70319b4e21f1ca/websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3", size = 181958 }, - { url = "https://files.pythonhosted.org/packages/08/ff/e9eed2ee5fed6f76fdd6032ca5cd38c57ca9661430bb3d5fb2872dc8703c/websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf", size = 181918 }, - { url = "https://files.pythonhosted.org/packages/d8/75/994634a49b7e12532be6a42103597b71098fd25900f7437d6055ed39930a/websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85", size = 176388 }, - { url = "https://files.pythonhosted.org/packages/98/93/e36c73f78400a65f5e236cd376713c34182e6663f6889cd45a4a04d8f203/websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065", size = 176828 }, - { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437 }, - { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096 }, - { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332 }, - { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152 }, - { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096 }, - { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523 }, - { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790 }, - { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165 }, - { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160 }, - { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395 }, - { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841 }, - { url = "https://files.pythonhosted.org/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931", size = 175440 }, - { url = "https://files.pythonhosted.org/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675", size = 173098 }, - { url = "https://files.pythonhosted.org/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151", size = 173329 }, - { url = "https://files.pythonhosted.org/packages/31/1d/063b25dcc01faa8fada1469bdf769de3768b7044eac9d41f734fd7b6ad6d/websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22", size = 183111 }, - { url = "https://files.pythonhosted.org/packages/93/53/9a87ee494a51bf63e4ec9241c1ccc4f7c2f45fff85d5bde2ff74fcb68b9e/websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f", size = 182054 }, - { url = "https://files.pythonhosted.org/packages/ff/b2/83a6ddf56cdcbad4e3d841fcc55d6ba7d19aeb89c50f24dd7e859ec0805f/websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8", size = 182496 }, - { url = "https://files.pythonhosted.org/packages/98/41/e7038944ed0abf34c45aa4635ba28136f06052e08fc2168520bb8b25149f/websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375", size = 182829 }, - { url = "https://files.pythonhosted.org/packages/e0/17/de15b6158680c7623c6ef0db361da965ab25d813ae54fcfeae2e5b9ef910/websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d", size = 182217 }, - { url = "https://files.pythonhosted.org/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4", size = 182195 }, - { url = "https://files.pythonhosted.org/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa", size = 176393 }, - { url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837 }, - { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743 }, -] - -[[package]] -name = "wsproto" -version = "1.2.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "h11" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c9/4a/44d3c295350d776427904d73c189e10aeae66d7f555bb2feee16d1e4ba5a/wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065", size = 53425 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/78/58/e860788190eba3bcce367f74d29c4675466ce8dddfba85f7827588416f01/wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736", size = 24226 }, -] diff --git a/packages/service-integration/Dockerfile b/packages/service-integration/Dockerfile index 8a6d6a854c83..39e6841602c2 100644 --- a/packages/service-integration/Dockerfile +++ b/packages/service-integration/Dockerfile @@ -2,7 +2,7 @@ # Define arguments in the global scope ARG PYTHON_VERSION="3.11.9" -ARG UV_VERSION="0.6" +ARG UV_VERSION="0.7" FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build FROM python:${PYTHON_VERSION}-slim-bookworm AS base-arm64 @@ -50,7 +50,8 @@ ENV LANG=C.UTF-8 ENV PYTHONDONTWRITEBYTECODE=1 \ VIRTUAL_ENV=/home/scu/.venv # https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode -ENV UV_COMPILE_BYTECODE=1 +ENV UV_COMPILE_BYTECODE=1 \ + UV_LINK_MODE=copy # Ensures that the python and pip executables used # in the image will be those from our virtualenv. @@ -74,13 +75,6 @@ COPY --from=uv_build /uv /uvx /bin/ RUN uv venv "${VIRTUAL_ENV}" - -RUN --mount=type=cache,target=/root/.cache/uv \ - uv pip install --upgrade \ - pip~=24.0 \ - wheel \ - setuptools - WORKDIR /build/packages/service-integration RUN \ diff --git a/packages/service-integration/requirements/_base.in b/packages/service-integration/requirements/_base.in index 213a27f4c131..899bf3ec23bf 100644 --- a/packages/service-integration/requirements/_base.in +++ b/packages/service-integration/requirements/_base.in @@ -13,5 +13,5 @@ jinja2_time jsonschema # pytest-plugin pytest # pytest-plugin pyyaml -typer[all] +typer yarl diff --git a/packages/service-integration/requirements/_base.txt b/packages/service-integration/requirements/_base.txt index 332dcc970010..e76fb651293c 100644 --- a/packages/service-integration/requirements/_base.txt +++ b/packages/service-integration/requirements/_base.txt @@ -22,7 +22,7 @@ chardet==5.2.0 # via binaryornot charset-normalizer==3.4.1 # via requests -click==8.1.8 +click==8.2.1 # via # -r requirements/_base.in # cookiecutter @@ -42,7 +42,7 @@ idna==3.10 # yarl iniconfig==2.0.0 # via pytest -jinja2==3.1.5 +jinja2==3.1.6 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -52,6 +52,8 @@ jinja2==3.1.5 # jinja2-time jinja2-time==0.2.0 # via -r requirements/_base.in +jsonref==1.1.0 + # via -r requirements/../../../packages/models-library/requirements/_base.in jsonschema==4.23.0 # via # -r requirements/../../../packages/models-library/requirements/_base.in @@ -81,7 +83,7 @@ pluggy==1.5.0 # via pytest propcache==0.3.0 # via yarl -pydantic==2.10.6 +pydantic==2.11.7 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -92,9 +94,9 @@ pydantic==2.10.6 # -r requirements/../../../packages/models-library/requirements/_base.in # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.2 +pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.10.2 +pydantic-extra-types==2.10.5 # via # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -107,8 +109,10 @@ pydantic-settings==2.7.0 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/models-library/requirements/_base.in pygments==2.19.1 - # via rich -pytest==8.3.5 + # via + # pytest + # rich +pytest==8.4.1 # via -r requirements/_base.in python-dateutil==2.9.0.post0 # via arrow @@ -132,11 +136,11 @@ referencing==0.35.1 # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications -requests==2.32.3 +requests==2.32.4 # via # cookiecutter # docker -rich==13.9.4 +rich==14.1.0 # via # cookiecutter # typer @@ -150,17 +154,20 @@ six==1.17.0 # via python-dateutil text-unidecode==1.3 # via python-slugify -typer==0.15.2 +typer==0.16.1 # via -r requirements/_base.in types-python-dateutil==2.9.0.20241206 # via arrow -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # pydantic # pydantic-core # pydantic-extra-types # typer -urllib3==2.3.0 + # typing-inspection +typing-inspection==0.4.1 + # via pydantic +urllib3==2.5.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt diff --git a/packages/service-integration/requirements/_test.txt b/packages/service-integration/requirements/_test.txt index 19f48613efac..0074581d905b 100644 --- a/packages/service-integration/requirements/_test.txt +++ b/packages/service-integration/requirements/_test.txt @@ -19,14 +19,19 @@ pluggy==1.5.0 # via # -c requirements/_base.txt # pytest -pytest==8.3.5 + # pytest-cov +pygments==2.19.1 + # via + # -c requirements/_base.txt + # pytest +pytest==8.4.1 # via # -c requirements/_base.txt # -r requirements/_test.in # pytest-cov # pytest-instafail # pytest-sugar -pytest-cov==6.0.0 +pytest-cov==6.2.1 # via -r requirements/_test.in pytest-instafail==0.5.0 # via -r requirements/_test.in @@ -53,7 +58,7 @@ types-pyyaml==6.0.12.20241230 # via -r requirements/_test.in types-requests==2.32.0.20250301 # via types-docker -urllib3==2.3.0 +urllib3==2.5.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt diff --git a/packages/service-integration/requirements/_tools.txt b/packages/service-integration/requirements/_tools.txt index 3b1673839385..5e646f82bd2b 100644 --- a/packages/service-integration/requirements/_tools.txt +++ b/packages/service-integration/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # black @@ -27,9 +27,9 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via -r requirements/../../../requirements/devenv.txt -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via # black # mypy @@ -42,7 +42,9 @@ packaging==24.2 # black # build pathspec==0.12.1 - # via black + # via + # black + # mypy pip==25.0.1 # via pip-tools pip-tools==7.4.1 @@ -67,11 +69,11 @@ pyyaml==6.0.2 # pre-commit ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.2 +setuptools==80.9.0 # via pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # mypy diff --git a/packages/service-integration/src/service_integration/__init__.py b/packages/service-integration/src/service_integration/__init__.py index 4f56c57d703b..78352fb23e83 100644 --- a/packages/service-integration/src/service_integration/__init__.py +++ b/packages/service-integration/src/service_integration/__init__.py @@ -1,5 +1,3 @@ -""" Library to facilitate the integration of user services running in osparc-simcore - -""" +"""Library to facilitate the integration of user services running in osparc-simcore""" from ._meta import __version__ diff --git a/packages/service-integration/src/service_integration/cli/__init__.py b/packages/service-integration/src/service_integration/cli/__init__.py index a146de5735dd..6e8b5c6343e5 100644 --- a/packages/service-integration/src/service_integration/cli/__init__.py +++ b/packages/service-integration/src/service_integration/cli/__init__.py @@ -7,7 +7,7 @@ from .._meta import __version__ from ..settings import AppSettings -from . import _compose_spec, _metadata, _run_creator, _test +from . import _compose_spec, _escaping, _metadata, _run_creator, _test from ._config import config_app app = typer.Typer() @@ -72,6 +72,7 @@ def main( app.command("compose")(_compose_spec.create_compose) app.add_typer(config_app, name="config", help="Manage osparc config files") app.command("test")(_test.run_tests) +app.command("legacy-escape")(_escaping.legacy_escape) # legacy app.command("bump-version")(_metadata.bump_version) app.command("get-version")(_metadata.get_version) diff --git a/packages/service-integration/src/service_integration/cli/_escaping.py b/packages/service-integration/src/service_integration/cli/_escaping.py new file mode 100644 index 000000000000..2953911554cc --- /dev/null +++ b/packages/service-integration/src/service_integration/cli/_escaping.py @@ -0,0 +1,44 @@ +import re +from pathlib import Path +from typing import Annotated + +import typer + +from ..osparc_config import OSPARC_CONFIG_DIRNAME + + +def escape_dollar_brace(text: str) -> str: + # the pattern finds '$${' that is not preceded by another '$'. + pattern = r"(? str: + assert result.exception + tb_message = "\n".join(traceback.format_tb(result.exception.__traceback__)) + return f"Below exception was raised by the cli:\n{tb_message}" + + def test_cli_help(run_program_with_args: Callable): result = run_program_with_args( "--help", ) - assert result.exit_code == 0 + assert result.exit_code == os.EX_OK, _format_cli_error(result) def test_cli_version(run_program_with_args: Callable): result = run_program_with_args( "--version", ) - assert result.exit_code == 0 + assert result.exit_code == os.EX_OK, _format_cli_error(result) assert __version__ == result.output.strip() + + +@pytest.fixture +def copy_tests_data_dir(tests_data_dir: Path, tmp_path: Path) -> Path: + new_dir_path = tmp_path / "copy_tests_data_dir" + new_dir_path.mkdir(exist_ok=True, parents=True) + + for item in tests_data_dir.glob("*"): + print(f"Copying {item} to {new_dir_path / item.name}") + shutil.copy2(item, new_dir_path / item.name) + + return new_dir_path + + +def test_cli_legacy_escape(copy_tests_data_dir: Path, run_program_with_args: Callable): + result = run_program_with_args( + "legacy-escape", "--osparc-config-dirname", copy_tests_data_dir + ) + assert result.exit_code == os.EX_OK, _format_cli_error(result) + # NOTE only 1 file will have a sequence that will be escaped + assert ( + f"Escaped sequence in {copy_tests_data_dir}/docker-compose-meta.yml" + in result.output.strip() + ) diff --git a/packages/service-integration/tests/test_cli__escaping.py b/packages/service-integration/tests/test_cli__escaping.py new file mode 100644 index 000000000000..e33463b4dcd0 --- /dev/null +++ b/packages/service-integration/tests/test_cli__escaping.py @@ -0,0 +1,20 @@ +import pytest +from service_integration.cli._escaping import escape_dollar_brace + + +@pytest.mark.parametrize( + "to_escape, escaped", + [ + ("some text", "some text"), + ("$${escapes}", "$$$${escapes}"), + ("$$${preserves}", "$$${preserves}"), + ("$$$${preserves}", "$$$${preserves}"), + ("$$$$${preserves}", "$$$$${preserves}"), + ( + "$${escapes} & $$${preserves},$$$${preserves}, $$$$${preserves}", + "$$$${escapes} & $$${preserves},$$$${preserves}, $$$$${preserves}", + ), + ], +) +def test_escape_dollar_brace(to_escape: str, escaped: str): + assert escape_dollar_brace(to_escape) == escaped diff --git a/packages/service-integration/tests/test_osparc_image_specs.py b/packages/service-integration/tests/test_osparc_image_specs.py index 6bec87425ad2..0dd6b96232a4 100644 --- a/packages/service-integration/tests/test_osparc_image_specs.py +++ b/packages/service-integration/tests/test_osparc_image_specs.py @@ -8,6 +8,7 @@ import pytest import yaml from pydantic import BaseModel +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from service_integration.compose_spec_model import BuildItem, Service from service_integration.osparc_config import ( DockerComposeOverwriteConfig, @@ -19,7 +20,13 @@ @pytest.fixture -def settings() -> AppSettings: +def settings(monkeypatch: pytest.MonkeyPatch) -> AppSettings: + setenvs_from_dict( + monkeypatch, + { + "ENABLE_OOIL_OSPARC_VARIABLE_IDENTIFIER": "true", + }, + ) return AppSettings() diff --git a/packages/service-library/requirements/_aiohttp.txt b/packages/service-library/requirements/_aiohttp.txt index fabfd7475b79..dd705a2037c1 100644 --- a/packages/service-library/requirements/_aiohttp.txt +++ b/packages/service-library/requirements/_aiohttp.txt @@ -1,6 +1,6 @@ aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via -r requirements/_aiohttp.in aiopg==1.4.0 # via -r requirements/_aiohttp.in @@ -14,10 +14,6 @@ attrs==25.1.0 # aiohttp # jsonschema # referencing -deprecated==1.2.18 - # via - # opentelemetry-api - # opentelemetry-semantic-conventions frozenlist==1.5.0 # via # aiohttp @@ -38,7 +34,7 @@ multidict==6.1.0 # via # aiohttp # yarl -opentelemetry-api==1.30.0 +opentelemetry-api==1.34.1 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-aiohttp-client @@ -46,27 +42,27 @@ opentelemetry-api==1.30.0 # opentelemetry-instrumentation-aiopg # opentelemetry-instrumentation-dbapi # opentelemetry-semantic-conventions -opentelemetry-instrumentation==0.51b0 +opentelemetry-instrumentation==0.55b1 # via # opentelemetry-instrumentation-aiohttp-client # opentelemetry-instrumentation-aiohttp-server # opentelemetry-instrumentation-aiopg # opentelemetry-instrumentation-dbapi -opentelemetry-instrumentation-aiohttp-client==0.51b0 +opentelemetry-instrumentation-aiohttp-client==0.55b1 # via -r requirements/_aiohttp.in -opentelemetry-instrumentation-aiohttp-server==0.51b0 +opentelemetry-instrumentation-aiohttp-server==0.55b1 # via -r requirements/_aiohttp.in -opentelemetry-instrumentation-aiopg==0.51b0 +opentelemetry-instrumentation-aiopg==0.55b1 # via -r requirements/_aiohttp.in -opentelemetry-instrumentation-dbapi==0.51b0 +opentelemetry-instrumentation-dbapi==0.55b1 # via opentelemetry-instrumentation-aiopg -opentelemetry-semantic-conventions==0.51b0 +opentelemetry-semantic-conventions==0.55b1 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-aiohttp-client # opentelemetry-instrumentation-aiohttp-server # opentelemetry-instrumentation-dbapi -opentelemetry-util-http==0.51b0 +opentelemetry-util-http==0.55b1 # via # opentelemetry-instrumentation-aiohttp-client # opentelemetry-instrumentation-aiohttp-server @@ -92,11 +88,14 @@ rpds-py==0.23.1 # referencing sqlalchemy==1.4.54 # via aiopg +typing-extensions==4.14.1 + # via + # opentelemetry-api + # opentelemetry-semantic-conventions werkzeug==3.1.3 # via -r requirements/_aiohttp.in wrapt==1.17.2 # via - # deprecated # opentelemetry-instrumentation # opentelemetry-instrumentation-aiohttp-client # opentelemetry-instrumentation-aiohttp-server diff --git a/packages/service-library/requirements/_base.in b/packages/service-library/requirements/_base.in index 24222e414b81..d094798f8a49 100644 --- a/packages/service-library/requirements/_base.in +++ b/packages/service-library/requirements/_base.in @@ -19,6 +19,7 @@ faststream opentelemetry-api opentelemetry-exporter-otlp opentelemetry-instrumentation-aio-pika +opentelemetry-instrumentation-asyncpg opentelemetry-instrumentation-logging opentelemetry-instrumentation-redis opentelemetry-instrumentation-requests diff --git a/packages/service-library/requirements/_base.txt b/packages/service-library/requirements/_base.txt index 862ed2b212bf..2ef0a5469a1d 100644 --- a/packages/service-library/requirements/_base.txt +++ b/packages/service-library/requirements/_base.txt @@ -10,7 +10,7 @@ aiofiles==24.1.0 # via -r requirements/_base.in aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -49,14 +49,8 @@ certifi==2025.1.31 # requests charset-normalizer==3.4.1 # via requests -click==8.1.8 +click==8.2.1 # via typer -deprecated==1.2.18 - # via - # opentelemetry-api - # opentelemetry-exporter-otlp-proto-grpc - # opentelemetry-exporter-otlp-proto-http - # opentelemetry-semantic-conventions dnspython==2.7.0 # via email-validator email-validator==2.2.0 @@ -71,7 +65,7 @@ frozenlist==1.5.0 # via # aiohttp # aiosignal -googleapis-common-protos==1.68.0 +googleapis-common-protos==1.70.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http @@ -85,6 +79,8 @@ idna==3.10 # yarl importlib-metadata==8.5.0 # via opentelemetry-api +jsonref==1.1.0 + # via -r requirements/../../../packages/models-library/requirements/_base.in jsonschema==4.23.0 # via -r requirements/../../../packages/models-library/requirements/_base.in jsonschema-specifications==2024.10.1 @@ -97,59 +93,64 @@ multidict==6.1.0 # via # aiohttp # yarl -opentelemetry-api==1.30.0 +opentelemetry-api==1.34.1 # via # -r requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.30.0 +opentelemetry-exporter-otlp==1.34.1 # via -r requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.30.0 +opentelemetry-exporter-otlp-proto-common==1.34.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.30.0 +opentelemetry-exporter-otlp-proto-grpc==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.30.0 +opentelemetry-exporter-otlp-proto-http==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.51b0 +opentelemetry-instrumentation==0.55b1 # via # opentelemetry-instrumentation-aio-pika + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aio-pika==0.51b0 +opentelemetry-instrumentation-aio-pika==0.55b1 # via -r requirements/_base.in -opentelemetry-instrumentation-logging==0.51b0 +opentelemetry-instrumentation-asyncpg==0.55b1 # via -r requirements/_base.in -opentelemetry-instrumentation-redis==0.51b0 +opentelemetry-instrumentation-logging==0.55b1 # via -r requirements/_base.in -opentelemetry-instrumentation-requests==0.51b0 +opentelemetry-instrumentation-redis==0.55b1 # via -r requirements/_base.in -opentelemetry-proto==1.30.0 +opentelemetry-instrumentation-requests==0.55b1 + # via -r requirements/_base.in +opentelemetry-proto==1.34.1 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.30.0 +opentelemetry-sdk==1.34.1 # via # -r requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.51b0 +opentelemetry-semantic-conventions==0.55b1 # via # opentelemetry-instrumentation + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.51b0 +opentelemetry-util-http==0.55b1 # via opentelemetry-instrumentation-requests orjson==3.10.15 # via @@ -171,7 +172,7 @@ propcache==0.3.0 # via # aiohttp # yarl -protobuf==5.29.3 +protobuf==5.29.5 # via # googleapis-common-protos # opentelemetry-proto @@ -179,7 +180,7 @@ psutil==7.0.0 # via -r requirements/_base.in pycryptodome==3.21.0 # via stream-zip -pydantic==2.10.6 +pydantic==2.11.7 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -196,9 +197,9 @@ pydantic==2.10.6 # fast-depends # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.2 +pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.10.2 +pydantic-extra-types==2.10.5 # via # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -250,9 +251,9 @@ referencing==0.35.1 # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications -requests==2.32.3 +requests==2.32.4 # via opentelemetry-exporter-otlp-proto-http -rich==14.0.0 +rich==14.1.0 # via # -r requirements/../../../packages/settings-library/requirements/_base.in # typer @@ -274,21 +275,28 @@ toolz==1.0.0 # via -r requirements/_base.in tqdm==4.67.1 # via -r requirements/_base.in -typer==0.16.0 +typer==0.16.1 # via -r requirements/../../../packages/settings-library/requirements/_base.in types-python-dateutil==2.9.0.20241206 # via arrow -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # aiodebug # anyio # faststream + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http # opentelemetry-sdk + # opentelemetry-semantic-conventions # pydantic # pydantic-core # pydantic-extra-types # typer -urllib3==2.3.0 + # typing-inspection +typing-inspection==0.4.1 + # via pydantic +urllib3==2.5.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -299,7 +307,6 @@ urllib3==2.3.0 # requests wrapt==1.17.2 # via - # deprecated # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-redis diff --git a/packages/service-library/requirements/_fastapi.in b/packages/service-library/requirements/_fastapi.in index 3303e6043afa..940a289b3c55 100644 --- a/packages/service-library/requirements/_fastapi.in +++ b/packages/service-library/requirements/_fastapi.in @@ -3,7 +3,7 @@ # # - +asgi-lifespan fastapi[standard] fastapi-lifespan-manager httpx[http2] diff --git a/packages/service-library/requirements/_fastapi.txt b/packages/service-library/requirements/_fastapi.txt index c6e5a29f597a..4b894d9984b6 100644 --- a/packages/service-library/requirements/_fastapi.txt +++ b/packages/service-library/requirements/_fastapi.txt @@ -5,34 +5,37 @@ anyio==4.8.0 # httpx # starlette # watchfiles +asgi-lifespan==2.1.0 + # via -r requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi certifi==2025.1.31 # via # httpcore # httpx -click==8.1.8 + # sentry-sdk +click==8.2.1 # via # rich-toolkit # typer # uvicorn -deprecated==1.2.18 - # via - # opentelemetry-api - # opentelemetry-semantic-conventions dnspython==2.7.0 # via email-validator email-validator==2.2.0 - # via fastapi -fastapi==0.115.12 + # via + # fastapi + # pydantic +fastapi==0.116.1 # via # -r requirements/_fastapi.in # fastapi-lifespan-manager -fastapi-cli==0.0.7 +fastapi-cli==0.0.8 # via fastapi +fastapi-cloud-cli==0.1.5 + # via fastapi-cli fastapi-lifespan-manager==0.1.4 # via -r requirements/_fastapi.in -h11==0.14.0 +h11==0.16.0 # via # httpcore # uvicorn @@ -40,7 +43,7 @@ h2==4.2.0 # via httpx hpack==4.1.0 # via h2 -httpcore==1.0.7 +httpcore==1.0.9 # via httpx httptools==0.6.4 # via uvicorn @@ -48,6 +51,7 @@ httpx==0.28.1 # via # -r requirements/_fastapi.in # fastapi + # fastapi-cloud-cli hyperframe==6.1.0 # via h2 idna==3.10 @@ -65,31 +69,31 @@ markupsafe==3.0.2 # via jinja2 mdurl==0.1.2 # via markdown-it-py -opentelemetry-api==1.30.0 +opentelemetry-api==1.34.1 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx # opentelemetry-semantic-conventions -opentelemetry-instrumentation==0.51b0 +opentelemetry-instrumentation==0.55b1 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx -opentelemetry-instrumentation-asgi==0.51b0 +opentelemetry-instrumentation-asgi==0.55b1 # via opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-fastapi==0.51b0 +opentelemetry-instrumentation-fastapi==0.55b1 # via -r requirements/_fastapi.in -opentelemetry-instrumentation-httpx==0.51b0 +opentelemetry-instrumentation-httpx==0.55b1 # via -r requirements/_fastapi.in -opentelemetry-semantic-conventions==0.51b0 +opentelemetry-semantic-conventions==0.55b1 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx -opentelemetry-util-http==0.51b0 +opentelemetry-util-http==0.55b1 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi @@ -98,9 +102,11 @@ packaging==24.2 # via opentelemetry-instrumentation prometheus-client==0.21.1 # via -r requirements/_fastapi.in -pydantic==2.10.6 - # via fastapi -pydantic-core==2.27.2 +pydantic==2.11.7 + # via + # fastapi + # fastapi-cloud-cli +pydantic-core==2.33.2 # via pydantic pygments==2.19.1 # via rich @@ -110,32 +116,51 @@ python-multipart==0.0.20 # via fastapi pyyaml==6.0.2 # via uvicorn -rich==14.0.0 +rich==14.1.0 # via # rich-toolkit # typer -rich-toolkit==0.14.7 - # via fastapi-cli +rich-toolkit==0.15.0 + # via + # fastapi-cli + # fastapi-cloud-cli +rignore==0.6.4 + # via fastapi-cloud-cli +sentry-sdk==2.35.0 + # via fastapi-cloud-cli shellingham==1.5.4 # via typer sniffio==1.3.1 - # via anyio -starlette==0.46.0 + # via + # anyio + # asgi-lifespan +starlette==0.47.2 # via fastapi -typer==0.16.0 - # via fastapi-cli -typing-extensions==4.12.2 +typer==0.16.1 + # via + # fastapi-cli + # fastapi-cloud-cli +typing-extensions==4.14.1 # via # anyio # fastapi + # opentelemetry-api + # opentelemetry-semantic-conventions # pydantic # pydantic-core # rich-toolkit + # starlette # typer + # typing-inspection +typing-inspection==0.4.1 + # via pydantic +urllib3==2.5.0 + # via sentry-sdk uvicorn==0.34.2 # via # fastapi # fastapi-cli + # fastapi-cloud-cli uvloop==0.21.0 # via uvicorn watchfiles==1.0.5 @@ -144,7 +169,6 @@ websockets==15.0.1 # via uvicorn wrapt==1.17.2 # via - # deprecated # opentelemetry-instrumentation # opentelemetry-instrumentation-httpx zipp==3.21.0 diff --git a/packages/service-library/requirements/_test.in b/packages/service-library/requirements/_test.in index 239a389cbc06..5fd26efe1e97 100644 --- a/packages/service-library/requirements/_test.in +++ b/packages/service-library/requirements/_test.in @@ -17,6 +17,7 @@ botocore coverage docker faker +fakeredis[lua] flaky numpy openapi-spec-validator @@ -41,3 +42,4 @@ types_aiofiles types_tqdm types-psutil types-psycopg2 +uvloop diff --git a/packages/service-library/requirements/_test.txt b/packages/service-library/requirements/_test.txt index 0714e6eb3e01..af2d7db162e7 100644 --- a/packages/service-library/requirements/_test.txt +++ b/packages/service-library/requirements/_test.txt @@ -3,7 +3,7 @@ aiohappyeyeballs==2.6.1 # -c requirements/_aiohttp.txt # -c requirements/_base.txt # aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_aiohttp.txt @@ -20,7 +20,9 @@ anyio==4.8.0 # -c requirements/_fastapi.txt # httpx asgi-lifespan==2.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_fastapi.txt + # -r requirements/_test.in attrs==25.1.0 # via # -c requirements/_aiohttp.txt @@ -53,6 +55,8 @@ execnet==2.1.1 # via pytest-xdist faker==36.1.1 # via -r requirements/_test.in +fakeredis==2.30.3 + # via -r requirements/_test.in flaky==3.8.1 # via -r requirements/_test.in frozenlist==1.5.0 @@ -65,11 +69,11 @@ greenlet==3.1.1 # via # -c requirements/_aiohttp.txt # sqlalchemy -h11==0.14.0 +h11==0.16.0 # via # -c requirements/_fastapi.txt # httpcore -httpcore==1.0.7 +httpcore==1.0.9 # via # -c requirements/_fastapi.txt # httpx @@ -109,15 +113,17 @@ jsonschema-specifications==2024.10.1 # openapi-schema-validator lazy-object-proxy==1.10.0 # via openapi-spec-validator +lupa==2.5 + # via fakeredis multidict==6.1.0 # via # -c requirements/_aiohttp.txt # -c requirements/_base.txt # aiohttp # yarl -mypy==1.15.0 +mypy==1.16.1 # via sqlalchemy -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via mypy numpy==2.2.3 # via -r requirements/_test.in @@ -134,12 +140,16 @@ packaging==24.2 # pytest-sugar pathable==0.4.4 # via jsonschema-path +pathspec==0.12.1 + # via mypy pillow==11.1.0 # via -r requirements/_test.in pip==25.0.1 # via -r requirements/_test.in pluggy==1.5.0 - # via pytest + # via + # pytest + # pytest-cov pprintpp==0.4.0 # via pytest-icdiff propcache==0.3.0 @@ -150,7 +160,12 @@ propcache==0.3.0 # yarl py-cpuinfo==9.0.0 # via pytest-benchmark -pytest==8.3.5 +pygments==2.19.1 + # via + # -c requirements/_base.txt + # -c requirements/_fastapi.txt + # pytest +pytest==8.4.1 # via # -r requirements/_test.in # pytest-aiohttp @@ -165,27 +180,27 @@ pytest==8.3.5 # pytest-xdist pytest-aiohttp==1.1.0 # via -r requirements/_test.in -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via # -r requirements/_test.in # pytest-aiohttp pytest-benchmark==5.1.0 # via -r requirements/_test.in -pytest-cov==6.0.0 +pytest-cov==6.2.1 # via -r requirements/_test.in -pytest-docker==3.2.0 +pytest-docker==3.2.3 # via -r requirements/_test.in pytest-icdiff==0.9 # via -r requirements/_test.in pytest-instafail==0.5.0 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in pytest-sugar==1.0.0 # via -r requirements/_test.in -pytest-xdist==3.6.1 +pytest-xdist==3.8.0 # via -r requirements/_test.in python-dateutil==2.9.0.post0 # via @@ -202,6 +217,11 @@ pyyaml==6.0.2 # -c requirements/_base.txt # -c requirements/_fastapi.txt # jsonschema-path +redis==5.2.1 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # fakeredis referencing==0.35.1 # via # -c requirements/../../../requirements/constraints.txt @@ -210,7 +230,7 @@ referencing==0.35.1 # jsonschema # jsonschema-path # jsonschema-specifications -requests==2.32.3 +requests==2.32.4 # via # -c requirements/_base.txt # docker @@ -236,6 +256,8 @@ sniffio==1.3.1 # -c requirements/_fastapi.txt # anyio # asgi-lifespan +sortedcontainers==2.4.0 + # via fakeredis sqlalchemy==1.4.54 # via # -c requirements/../../../requirements/constraints.txt @@ -255,8 +277,9 @@ types-requests==2.32.0.20250301 # via types-tqdm types-tqdm==4.67.0.20250301 # via -r requirements/_test.in -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via + # -c requirements/_aiohttp.txt # -c requirements/_base.txt # -c requirements/_fastapi.txt # anyio @@ -264,14 +287,19 @@ typing-extensions==4.12.2 # sqlalchemy2-stubs tzdata==2025.1 # via faker -urllib3==2.3.0 +urllib3==2.5.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt + # -c requirements/_fastapi.txt # botocore # docker # requests # types-requests +uvloop==0.21.0 + # via + # -c requirements/_fastapi.txt + # -r requirements/_test.in yarl==1.18.3 # via # -c requirements/_aiohttp.txt diff --git a/packages/service-library/requirements/_tools.txt b/packages/service-library/requirements/_tools.txt index 985c2c3bc856..35fcb09f3491 100644 --- a/packages/service-library/requirements/_tools.txt +++ b/packages/service-library/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # black @@ -27,11 +27,11 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via # -c requirements/_test.txt # black @@ -45,7 +45,10 @@ packaging==24.2 # black # build pathspec==0.12.1 - # via black + # via + # -c requirements/_test.txt + # black + # mypy pip==25.0.1 # via # -c requirements/_test.txt @@ -73,11 +76,11 @@ pyyaml==6.0.2 # pre-commit ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.2 +setuptools==80.9.0 # via pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # -c requirements/_test.txt diff --git a/packages/service-library/setup.cfg b/packages/service-library/setup.cfg index 874495da36bd..714b873009e3 100644 --- a/packages/service-library/setup.cfg +++ b/packages/service-library/setup.cfg @@ -21,6 +21,7 @@ markers = testit: "marks test to run during development" performance_test: "performance test" no_cleanup_check_rabbitmq_server_has_no_errors: "no check in rabbitmq logs" + heavy_load: "marks test as heavy load" [mypy] plugins = diff --git a/packages/service-library/setup.py b/packages/service-library/setup.py index 521b491b918e..2ddd96c9ece1 100644 --- a/packages/service-library/setup.py +++ b/packages/service-library/setup.py @@ -38,7 +38,7 @@ def read_reqs(reqs_path: Path) -> set[str]: "python_requires": "~=3.11", "install_requires": tuple(PROD_REQUIREMENTS), "packages": find_packages(where="src"), - "package_data": {"": ["py.typed"]}, + "package_data": {"": ["py.typed", "redis/lua/*.lua"]}, "package_dir": {"": "src"}, "test_suite": "tests", "tests_require": tuple(TEST_REQUIREMENTS), diff --git a/packages/service-library/src/servicelib/aiohttp/aiopg_utils.py b/packages/service-library/src/servicelib/aiohttp/aiopg_utils.py index e7b98347c318..23fcbb41f0a3 100644 --- a/packages/service-library/src/servicelib/aiohttp/aiopg_utils.py +++ b/packages/service-library/src/servicelib/aiohttp/aiopg_utils.py @@ -1,20 +1,21 @@ -""" Holderplace for random helpers using aiopg +"""Holderplace for random helpers using aiopg - - Drop here functions/constants that at that time does - not fit in any of the setups. Then, they can be moved and - refactor when new abstractions are used in place. +- Drop here functions/constants that at that time does +not fit in any of the setups. Then, they can be moved and +refactor when new abstractions are used in place. - - aiopg is used as a client sdk to interact asynchronously with postgres service +- aiopg is used as a client sdk to interact asynchronously with postgres service - SEE for aiopg: https://aiopg.readthedocs.io/en/stable/sa.html - SEE for underlying psycopg: http://initd.org/psycopg/docs/module.html - SEE for extra keywords: https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PARAMKEYWORDS +SEE for aiopg: https://aiopg.readthedocs.io/en/stable/sa.html +SEE for underlying psycopg: http://initd.org/psycopg/docs/module.html +SEE for extra keywords: https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PARAMKEYWORDS """ # TODO: Towards implementing https://github.com/ITISFoundation/osparc-simcore/issues/1195 # TODO: deprecate this module. Move utils into retry_policies, simcore_postgres_database.utils_aiopg import logging +from typing import Final import sqlalchemy as sa from aiohttp import web @@ -31,6 +32,8 @@ log = logging.getLogger(__name__) +APP_AIOPG_ENGINE_KEY: Final = web.AppKey("APP_AIOPG_ENGINE_KEY", Engine) + async def raise_if_not_responsive(engine: Engine): async with engine.acquire() as conn: diff --git a/packages/service-library/src/servicelib/aiohttp/application_keys.py b/packages/service-library/src/servicelib/aiohttp/application_keys.py index 3958c860cb00..1a85b2a00796 100644 --- a/packages/service-library/src/servicelib/aiohttp/application_keys.py +++ b/packages/service-library/src/servicelib/aiohttp/application_keys.py @@ -1,4 +1,4 @@ -""" Namespace to keep all application storage keys +"""Namespace to keep all application storage keys Unique keys to identify stored data Naming convention accounts for the storage scope: application, request, response, configuration and/or resources @@ -8,22 +8,25 @@ See https://aiohttp.readthedocs.io/en/stable/web_advanced.html#data-sharing-aka-no-singletons-please """ + from typing import Final -# REQUIREMENTS: -# - guarantees all keys are unique -# - one place for all common keys -# - hierarchical classification +from aiohttp import ClientSession, web + +# APPLICATION's CONTEXT KEYS + +# NOTE: use these keys to store/retrieve data from aiohttp.web.Application +# SEE https://docs.aiohttp.org/en/stable/web_quickstart.html#aiohttp-web-app-key # # web.Application keys, i.e. app[APP_*_KEY] # -APP_CONFIG_KEY: Final[str] = f"{__name__ }.config" -APP_SETTINGS_KEY: Final[str] = f"{__name__ }.settings" +APP_CONFIG_KEY = web.AppKey("APP_CONFIG_KEY", dict[str, object]) APP_AIOPG_ENGINE_KEY: Final[str] = f"{__name__ }.aiopg_engine" -APP_CLIENT_SESSION_KEY: Final[str] = f"{__name__ }.session" +APP_CLIENT_SESSION_KEY: web.AppKey[ClientSession] = web.AppKey("APP_CLIENT_SESSION_KEY") + APP_FIRE_AND_FORGET_TASKS_KEY: Final[str] = f"{__name__}.tasks" diff --git a/packages/service-library/src/servicelib/aiohttp/application_setup.py b/packages/service-library/src/servicelib/aiohttp/application_setup.py index 0d52603f9651..3375c5444f9d 100644 --- a/packages/service-library/src/servicelib/aiohttp/application_setup.py +++ b/packages/service-library/src/servicelib/aiohttp/application_setup.py @@ -2,9 +2,12 @@ import inspect import logging from collections.abc import Callable +from contextlib import ContextDecorator from copy import deepcopy +from datetime import datetime from enum import Enum -from typing import Any, Protocol +from types import TracebackType +from typing import Any, Final, Protocol import arrow from aiohttp import web @@ -13,23 +16,21 @@ TypedDict, ) -from .application_keys import APP_CONFIG_KEY, APP_SETTINGS_KEY +from .application_keys import APP_CONFIG_KEY -log = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) -APP_SETUP_COMPLETED_KEY = f"{__name__ }.setup" +APP_SETUP_COMPLETED_KEY: Final[web.AppKey] = web.AppKey("setup_completed", list[str]) class _SetupFunc(Protocol): __name__: str - def __call__(self, app: web.Application, *args: Any, **kwds: Any) -> bool: - ... + def __call__(self, app: web.Application, *args: Any, **kwds: Any) -> bool: ... class _ApplicationSettings(Protocol): - def is_enabled(self, field_name: str) -> bool: - ... + def is_enabled(self, field_name: str) -> bool: ... class ModuleCategory(Enum): @@ -46,12 +47,10 @@ def __init__(self, *, reason) -> None: super().__init__(reason) -class ApplicationSetupError(Exception): - ... +class ApplicationSetupError(Exception): ... -class DependencyError(ApplicationSetupError): - ... +class DependencyError(ApplicationSetupError): ... class SetupMetadataDict(TypedDict): @@ -116,13 +115,14 @@ def _get_app_settings_and_field_name( arg_settings_name: str | None, setup_func_name: str, logger: logging.Logger, + app_settings_key: web.AppKey, ) -> tuple[_ApplicationSettings | None, str | None]: - app_settings: _ApplicationSettings | None = app.get(APP_SETTINGS_KEY) + app_settings: _ApplicationSettings | None = app.get(app_settings_key) settings_field_name = arg_settings_name if app_settings: if not settings_field_name: - # FIXME: hard-coded WEBSERVER_ temporary + # NOTE: hard-coded WEBSERVER_ temporary settings_field_name = f"WEBSERVER_{arg_module_name.split('.')[-1].upper()}" logger.debug("Checking addon's %s ", f"{settings_field_name=}") @@ -134,6 +134,53 @@ def _get_app_settings_and_field_name( return app_settings, settings_field_name +class _SetupTimingContext(ContextDecorator): + """Context manager/decorator for timing and logging module setup operations.""" + + def __init__( + self, + module_name: str, + *, + logger: logging.Logger, + category: ModuleCategory | None = None, + depends: list[str] | None = None, + ) -> None: + """Initialize timing context. + + :param module_name: Name of the module being set up + :param category: Optional module category for detailed logging + :param depends: Optional dependencies for detailed logging + """ + self.module_name = module_name + self.category = category + self.depends = depends + self.started: datetime | None = None + self.head_msg = f"Setup of {module_name}" + self.logger = logger + + def __enter__(self) -> None: + self.started = arrow.utcnow().datetime + if self.category is not None: + self.logger.info( + "%s (%s, %s) started ... ", + self.head_msg, + f"{self.category.name=}", + f"{self.depends}", + ) + else: + self.logger.info("%s started ...", self.head_msg) + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + if self.started: + elapsed = (arrow.utcnow() - self.started).total_seconds() + _logger.info("%s completed [Elapsed: %3.1f secs]", self.head_msg, elapsed) + + # PUBLIC API ------------------------------------------------------------------ @@ -141,13 +188,69 @@ def is_setup_completed(module_name: str, app: web.Application) -> bool: return module_name in app[APP_SETUP_COMPLETED_KEY] +def ensure_single_setup( + module_name: str, + *, + logger: logging.Logger, +) -> Callable[[Callable[..., Any]], Callable[..., Any]]: + """Ensures a setup function is executed only once per application and handles completion. + + :param module_name: Name of the module being set up + """ + + def _log_skip(reason: str) -> bool: + logger.info("Skipping '%s' setup: %s", module_name, reason) + return False + + def decorator(setup_func: _SetupFunc) -> _SetupFunc: + + @functools.wraps(setup_func) + def _wrapper(app: web.Application, *args: Any, **kwargs: Any) -> bool: + + # pre-setup init + if APP_SETUP_COMPLETED_KEY not in app: + app[APP_SETUP_COMPLETED_KEY] = [] + + # check + if is_setup_completed(module_name, app): + _log_skip( + f"'{module_name}' was already initialized in {app}." + " Setup can only be executed once per app." + ) + return False + + try: + completed = setup_func(app, *args, **kwargs) + + # post-setup handling + if completed is None: + completed = True + + if completed: # registers completed setup + app[APP_SETUP_COMPLETED_KEY].append(module_name) + return completed + + assert not completed # nosec + _log_skip("Undefined (setup function returned false)") + return False + + except SkipModuleSetupError as err: + _log_skip(err.reason) + return False + + return _wrapper + + return decorator + + def app_module_setup( module_name: str, category: ModuleCategory, *, + app_settings_key: web.AppKey, settings_name: str | None = None, depends: list[str] | None = None, - logger: logging.Logger = log, + logger: logging.Logger = _logger, # TODO: SEE https://github.com/ITISFoundation/osparc-simcore/issues/2008 # TODO: - settings_name becomes module_name!! # TODO: - plugin base should be aware of setup and settings -> model instead of function? @@ -190,35 +293,27 @@ def setup(app: web.Application): module_name, depends, config_section, config_enabled ) - def _decorate(setup_func: _SetupFunc): - if "setup" not in setup_func.__name__: - logger.warning("Rename '%s' to contain 'setup'", setup_func.__name__) - - # metadata info - def setup_metadata() -> SetupMetadataDict: - return SetupMetadataDict( - module_name=module_name, - dependencies=depends, - config_section=section, - config_enabled=config_enabled, - ) + # metadata info + def _setup_metadata() -> SetupMetadataDict: + return SetupMetadataDict( + module_name=module_name, + dependencies=depends, + config_section=section, + config_enabled=config_enabled, + ) - # wrapper - @functools.wraps(setup_func) - def _wrapper(app: web.Application, *args, **kargs) -> bool: - # pre-setup - head_msg = f"Setup of {module_name}" - started = arrow.utcnow() - logger.info( - "%s (%s, %s) started ... ", - head_msg, - f"{category.name=}", - f"{depends}", - ) + def decorator(setup_func: _SetupFunc) -> _SetupFunc: - if APP_SETUP_COMPLETED_KEY not in app: - app[APP_SETUP_COMPLETED_KEY] = [] + assert ( # nosec + "setup_" in setup_func.__name__ + ), f"Rename '{setup_func.__name__}' like 'setup_$(plugin-name)'" + @functools.wraps(setup_func) + @ensure_single_setup(module_name, logger=logger) + @_SetupTimingContext( + module_name, category=category, depends=depends, logger=logger + ) + def _wrapper(app: web.Application, *args, **kargs) -> bool: if category == ModuleCategory.ADDON: # ONLY addons can be enabled/disabled @@ -243,6 +338,7 @@ def _wrapper(app: web.Application, *args, **kargs) -> bool: settings_name, setup_func.__name__, logger, + app_settings_key, ) if ( @@ -258,7 +354,6 @@ def _wrapper(app: web.Application, *args, **kargs) -> bool: return False if depends: - # TODO: no need to enforce. Use to deduce order instead. uninitialized = [ dep for dep in depends if not is_setup_completed(dep, app) ] @@ -266,52 +361,24 @@ def _wrapper(app: web.Application, *args, **kargs) -> bool: msg = f"Cannot setup app module '{module_name}' because the following dependencies are still uninitialized: {uninitialized}" raise DependencyError(msg) - # execution of setup - try: - if is_setup_completed(module_name, app): - raise SkipModuleSetupError( # noqa: TRY301 - reason=f"'{module_name}' was already initialized in {app}." - " Setup can only be executed once per app." - ) - - completed = setup_func(app, *args, **kargs) - - # post-setup - if completed is None: - completed = True - - if completed: # registers completed setup - app[APP_SETUP_COMPLETED_KEY].append(module_name) - else: - raise SkipModuleSetupError( # noqa: TRY301 - reason="Undefined (setup function returned false)" - ) + # execution of setup with module name + completed: bool = setup_func(app, *args, **kargs) - except SkipModuleSetupError as exc: - logger.info("Skipping '%s' setup: %s", module_name, exc.reason) - completed = False - - elapsed = arrow.utcnow() - started - logger.info( - "%s %s [Elapsed: %3.1f secs]", - head_msg, - "completed" if completed else "skipped", - elapsed.total_seconds(), - ) return completed - _wrapper.metadata = setup_metadata # type: ignore[attr-defined] - _wrapper.mark_as_simcore_servicelib_setup_func = True # type: ignore[attr-defined] - # NOTE: this is added by functools.wraps decorated - assert _wrapper.__wrapped__ == setup_func # nosec + assert ( + _wrapper.__wrapped__ == setup_func + ), "this is added by functools.wraps decorator" # nosec + + setattr(_wrapper, "metadata", _setup_metadata) # noqa: B010 + setattr(_wrapper, "mark_as_simcore_servicelib_setup_func", True) # noqa: B010 return _wrapper - return _decorate + return decorator def is_setup_function(fun: Callable) -> bool: - # TODO: use _SetupFunc protocol to check in runtime return ( inspect.isfunction(fun) and hasattr(fun, "mark_as_simcore_servicelib_setup_func") diff --git a/packages/service-library/src/servicelib/aiohttp/client_session.py b/packages/service-library/src/servicelib/aiohttp/client_session.py index 40e49c76a4bc..010c2ec345bf 100644 --- a/packages/service-library/src/servicelib/aiohttp/client_session.py +++ b/packages/service-library/src/servicelib/aiohttp/client_session.py @@ -1,5 +1,4 @@ from collections.abc import AsyncGenerator -from typing import cast from aiohttp import ClientSession, ClientTimeout, web from common_library.json_serialization import json_dumps @@ -41,10 +40,11 @@ async def persistent_client_session(app: web.Application) -> AsyncGenerator[None def get_client_session(app: web.Application) -> ClientSession: """Refers to the one-and-only client in the app""" assert APP_CLIENT_SESSION_KEY in app # nosec - return cast(ClientSession, app[APP_CLIENT_SESSION_KEY]) + return app[APP_CLIENT_SESSION_KEY] __all__: tuple[str, ...] = ( + "APP_CLIENT_SESSION_KEY", "get_client_session", "persistent_client_session", ) diff --git a/packages/service-library/src/servicelib/aiohttp/db_asyncpg_engine.py b/packages/service-library/src/servicelib/aiohttp/db_asyncpg_engine.py index 88b0338dadfb..9e5056f67bc5 100644 --- a/packages/service-library/src/servicelib/aiohttp/db_asyncpg_engine.py +++ b/packages/service-library/src/servicelib/aiohttp/db_asyncpg_engine.py @@ -8,7 +8,6 @@ from typing import Final from aiohttp import web -from servicelib.logging_utils import log_context from settings_library.postgres import PostgresSettings from simcore_postgres_database.utils_aiosqlalchemy import ( # type: ignore[import-not-found] # this on is unclear get_pg_engine_stateinfo, @@ -39,23 +38,22 @@ def get_async_engine(app: web.Application) -> AsyncEngine: return engine -async def connect_to_db(app: web.Application, settings: PostgresSettings) -> None: +async def connect_to_db( + app: web.Application, settings: PostgresSettings, application_name: str +) -> None: """ - db services up, data migrated and ready to use - sets an engine in app state (use `get_async_engine(app)` to retrieve) """ - if settings.POSTGRES_CLIENT_NAME: - settings = settings.model_copy( - update={"POSTGRES_CLIENT_NAME": settings.POSTGRES_CLIENT_NAME + "-asyncpg"} - ) - with log_context( _logger, logging.INFO, "Connecting app[APP_DB_ASYNC_ENGINE_KEY] to postgres with %s", f"{settings=}", ): - engine = await create_async_engine_and_database_ready(settings) + engine = await create_async_engine_and_database_ready( + settings, application_name + ) _set_async_engine_to_app_state(app, engine) _logger.info( diff --git a/packages/service-library/src/servicelib/aiohttp/docker_utils.py b/packages/service-library/src/servicelib/aiohttp/docker_utils.py index 8e9393e1e69c..3468b789a8a5 100644 --- a/packages/service-library/src/servicelib/aiohttp/docker_utils.py +++ b/packages/service-library/src/servicelib/aiohttp/docker_utils.py @@ -1,6 +1,9 @@ import logging +from typing import Final +import aiodocker import aiohttp +from aiohttp import web from models_library.docker import DockerGenericTag from pydantic import TypeAdapter, ValidationError from settings_library.docker_registry import RegistrySettings @@ -18,6 +21,8 @@ _logger = logging.getLogger(__name__) +APP_DOCKER_ENGINE_KEY: Final = web.AppKey("APP_DOCKER_ENGINE_KEY", aiodocker.Docker) + async def retrieve_image_layer_information( image: DockerGenericTag, registry_settings: RegistrySettings diff --git a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_constants.py b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_constants.py index 79594cb18b89..fe38782e9ffe 100644 --- a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_constants.py +++ b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_constants.py @@ -3,9 +3,9 @@ from pydantic import PositiveFloat MINUTE: Final[PositiveFloat] = 60 -APP_LONG_RUNNING_TASKS_MANAGER_KEY: Final[ - str -] = f"{__name__ }.long_running_tasks.tasks_manager" -RQT_LONG_RUNNING_TASKS_CONTEXT_KEY: Final[ - str -] = f"{__name__}.long_running_tasks.context" +APP_LONG_RUNNING_MANAGER_KEY: Final[str] = ( + f"{__name__ }.long_running_tasks.tasks_manager" +) +RQT_LONG_RUNNING_TASKS_CONTEXT_KEY: Final[str] = ( + f"{__name__}.long_running_tasks.context" +) diff --git a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_dependencies.py b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_dependencies.py deleted file mode 100644 index b38004b32009..000000000000 --- a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_dependencies.py +++ /dev/null @@ -1,23 +0,0 @@ -from typing import Any - -from aiohttp import web - -from ...long_running_tasks._task import TasksManager -from ._constants import ( - APP_LONG_RUNNING_TASKS_MANAGER_KEY, - RQT_LONG_RUNNING_TASKS_CONTEXT_KEY, -) - - -def get_tasks_manager(app: web.Application) -> TasksManager: - output: TasksManager = app[APP_LONG_RUNNING_TASKS_MANAGER_KEY] - return output - - -def get_task_context(request: web.Request) -> dict[str, Any]: - output: dict[str, Any] = request[RQT_LONG_RUNNING_TASKS_CONTEXT_KEY] - return output - - -def create_task_name_from_request(request: web.Request) -> str: - return f"{request.method} {request.rel_url}" diff --git a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_error_handlers.py b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_error_handlers.py index 4534d7c951cb..8a679c70b7d8 100644 --- a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_error_handlers.py +++ b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_error_handlers.py @@ -3,7 +3,7 @@ from aiohttp import web from common_library.json_serialization import json_dumps -from ...long_running_tasks._errors import ( +from ...long_running_tasks.errors import ( TaskCancelledError, TaskNotCompletedError, TaskNotFoundError, diff --git a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_manager.py b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_manager.py new file mode 100644 index 000000000000..e77e8959ccfd --- /dev/null +++ b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_manager.py @@ -0,0 +1,18 @@ +from aiohttp import web + +from ...long_running_tasks.manager import LongRunningManager +from ...long_running_tasks.models import TaskContext +from ._constants import APP_LONG_RUNNING_MANAGER_KEY +from ._request import get_task_context + + +class AiohttpLongRunningManager(LongRunningManager): + + @staticmethod + def get_task_context(request: web.Request) -> TaskContext: + return get_task_context(request) + + +def get_long_running_manager(app: web.Application) -> AiohttpLongRunningManager: + output: AiohttpLongRunningManager = app[APP_LONG_RUNNING_MANAGER_KEY] + return output diff --git a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_request.py b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_request.py new file mode 100644 index 000000000000..0ccfd3c6a40a --- /dev/null +++ b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_request.py @@ -0,0 +1,10 @@ +from typing import Any + +from aiohttp import web + +from ._constants import RQT_LONG_RUNNING_TASKS_CONTEXT_KEY + + +def get_task_context(request: web.Request) -> dict[str, Any]: + output: dict[str, Any] = request[RQT_LONG_RUNNING_TASKS_CONTEXT_KEY] + return output diff --git a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_routes.py b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_routes.py index 1906c0bc93f1..55879e34ef13 100644 --- a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_routes.py +++ b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_routes.py @@ -1,18 +1,17 @@ -import logging from typing import Any from aiohttp import web -from common_library.json_serialization import json_dumps from pydantic import BaseModel -from servicelib.aiohttp import status -from ...long_running_tasks._errors import TaskNotCompletedError, TaskNotFoundError -from ...long_running_tasks._models import TaskGet, TaskId, TaskStatus -from ...long_running_tasks._task import TrackedTask -from ..requests_validation import parse_request_path_parameters_as -from ._dependencies import get_task_context, get_tasks_manager +from ...aiohttp import status +from ...long_running_tasks import lrt_api +from ...long_running_tasks.models import TaskGet, TaskId +from ..requests_validation import ( + parse_request_path_parameters_as, +) +from ..rest_responses import create_data_response +from ._manager import get_long_running_manager -_logger = logging.getLogger(__name__) routes = web.RouteTableDef() @@ -22,79 +21,61 @@ class _PathParam(BaseModel): @routes.get("", name="list_tasks") async def list_tasks(request: web.Request) -> web.Response: - tasks_manager = get_tasks_manager(request.app) - task_context = get_task_context(request) - tracked_tasks: list[TrackedTask] = tasks_manager.list_tasks( - with_task_context=task_context - ) - - return web.json_response( - { - "data": [ - TaskGet( - task_id=t.task_id, - task_name=t.task_name, - status_href=f"{request.app.router['get_task_status'].url_for(task_id=t.task_id)}", - result_href=f"{request.app.router['get_task_result'].url_for(task_id=t.task_id)}", - abort_href=f"{request.app.router['cancel_and_delete_task'].url_for(task_id=t.task_id)}", - ) - for t in tracked_tasks - ] - }, - dumps=json_dumps, + long_running_manager = get_long_running_manager(request.app) + return create_data_response( + [ + TaskGet( + task_id=t.task_id, + status_href=f"{request.app.router['get_task_status'].url_for(task_id=t.task_id)}", + result_href=f"{request.app.router['get_task_result'].url_for(task_id=t.task_id)}", + abort_href=f"{request.app.router['remove_task'].url_for(task_id=t.task_id)}", + ) + for t in await lrt_api.list_tasks( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + long_running_manager.get_task_context(request), + ) + ] ) @routes.get("/{task_id}", name="get_task_status") async def get_task_status(request: web.Request) -> web.Response: path_params = parse_request_path_parameters_as(_PathParam, request) - tasks_manager = get_tasks_manager(request.app) - task_context = get_task_context(request) + long_running_manager = get_long_running_manager(request.app) - task_status: TaskStatus = tasks_manager.get_task_status( - task_id=path_params.task_id, with_task_context=task_context + task_status = await lrt_api.get_task_status( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + long_running_manager.get_task_context(request), + path_params.task_id, ) - return web.json_response({"data": task_status}, dumps=json_dumps) + return create_data_response(task_status) @routes.get("/{task_id}/result", name="get_task_result") async def get_task_result(request: web.Request) -> web.Response | Any: path_params = parse_request_path_parameters_as(_PathParam, request) - tasks_manager = get_tasks_manager(request.app) - task_context = get_task_context(request) + long_running_manager = get_long_running_manager(request.app) # NOTE: this might raise an exception that will be catched by the _error_handlers - try: - task_result = tasks_manager.get_task_result( - task_id=path_params.task_id, with_task_context=task_context - ) - # NOTE: this will fail if the task failed for some reason.... - await tasks_manager.remove_task( - path_params.task_id, with_task_context=task_context, reraise_errors=False - ) - return task_result - except (TaskNotFoundError, TaskNotCompletedError): - raise - except Exception: - # the task shall be removed in this case - await tasks_manager.remove_task( - path_params.task_id, with_task_context=task_context, reraise_errors=False - ) - raise - - -@routes.delete("/{task_id}", name="cancel_and_delete_task") -async def cancel_and_delete_task(request: web.Request) -> web.Response: - path_params = parse_request_path_parameters_as(_PathParam, request) - tasks_manager = get_tasks_manager(request.app) - task_context = get_task_context(request) - await tasks_manager.remove_task(path_params.task_id, with_task_context=task_context) - return web.json_response(status=status.HTTP_204_NO_CONTENT) + return await lrt_api.get_task_result( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + long_running_manager.get_task_context(request), + path_params.task_id, + ) -__all__: tuple[str, ...] = ( - "get_tasks_manager", - "TaskId", - "TaskGet", - "TaskStatus", -) +@routes.delete("/{task_id}", name="remove_task") +async def remove_task(request: web.Request) -> web.Response: + path_params = parse_request_path_parameters_as(_PathParam, request) + long_running_manager = get_long_running_manager(request.app) + + await lrt_api.remove_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + long_running_manager.get_task_context(request), + path_params.task_id, + ) + return web.json_response(status=status.HTTP_204_NO_CONTENT) diff --git a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_server.py b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_server.py index d0c96699462d..09c50be9685e 100644 --- a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_server.py +++ b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/_server.py @@ -1,39 +1,47 @@ import asyncio -import logging +import datetime from collections.abc import AsyncGenerator, Callable from functools import wraps from typing import Any from aiohttp import web +from aiohttp.web import HTTPException from common_library.json_serialization import json_dumps -from pydantic import AnyHttpUrl, PositiveFloat, TypeAdapter +from pydantic import AnyHttpUrl, TypeAdapter +from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings from ...aiohttp import status -from ...long_running_tasks._models import TaskGet -from ...long_running_tasks._task import ( +from ...long_running_tasks import lrt_api +from ...long_running_tasks._serialization import ( + BaseObjectSerializer, + register_custom_serialization, +) +from ...long_running_tasks.constants import ( + DEFAULT_STALE_TASK_CHECK_INTERVAL, + DEFAULT_STALE_TASK_DETECT_TIMEOUT, +) +from ...long_running_tasks.models import ( + LRTNamespace, + RegisteredTaskName, TaskContext, - TaskProtocol, - TasksManager, - start_task, + TaskGet, ) from ..typing_extension import Handler from . import _routes from ._constants import ( - APP_LONG_RUNNING_TASKS_MANAGER_KEY, - MINUTE, + APP_LONG_RUNNING_MANAGER_KEY, RQT_LONG_RUNNING_TASKS_CONTEXT_KEY, ) -from ._dependencies import create_task_name_from_request, get_tasks_manager from ._error_handlers import base_long_running_error_handler +from ._manager import AiohttpLongRunningManager, get_long_running_manager -_logger = logging.getLogger(__name__) - -def no_ops_decorator(handler: Handler): +def _no_ops_decorator(handler: Handler): return handler -def no_task_context_decorator(handler: Handler): +def _no_task_context_decorator(handler: Handler): @wraps(handler) async def _wrap(request: web.Request): request[RQT_LONG_RUNNING_TASKS_CONTEXT_KEY] = {} @@ -42,22 +50,27 @@ async def _wrap(request: web.Request): return _wrap +def _create_task_name_from_request(request: web.Request) -> str: + return f"{request.method} {request.rel_url}" + + async def start_long_running_task( # NOTE: positional argument are suffixed with "_" to avoid name conflicts with "task_kwargs" keys request_: web.Request, - task_: TaskProtocol, + registerd_task_name: RegisteredTaskName, *, fire_and_forget: bool = False, task_context: TaskContext, **task_kwargs: Any, ) -> web.Response: - task_manager = get_tasks_manager(request_.app) - task_name = create_task_name_from_request(request_) + long_running_manager = get_long_running_manager(request_.app) + task_name = _create_task_name_from_request(request_) task_id = None try: - task_id = start_task( - task_manager, - task_, + task_id = await lrt_api.start_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + registerd_task_name, fire_and_forget=fire_and_forget, task_context=task_context, task_name=task_name, @@ -74,11 +87,10 @@ async def start_long_running_task( f"http://{ip_addr}:{port}{request_.app.router['get_task_result'].url_for(task_id=task_id)}" # NOSONAR ) abort_url = TypeAdapter(AnyHttpUrl).validate_python( - f"http://{ip_addr}:{port}{request_.app.router['cancel_and_delete_task'].url_for(task_id=task_id)}" # NOSONAR + f"http://{ip_addr}:{port}{request_.app.router['remove_task'].url_for(task_id=task_id)}" # NOSONAR ) task_get = TaskGet( task_id=task_id, - task_name=task_name, status_href=f"{status_url}", result_href=f"{result_url}", abort_href=f"{abort_url}", @@ -89,10 +101,14 @@ async def start_long_running_task( dumps=json_dumps, ) except asyncio.CancelledError: - # cancel the task, the client has disconnected + # remove the task, the client was disconnected if task_id: - task_manager = get_tasks_manager(request_.app) - await task_manager.cancel_task(task_id, with_task_context=None) + await lrt_api.remove_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + task_context, + task_id, + ) raise @@ -115,41 +131,69 @@ def _wrap_and_add_routes( ) +class AiohttpHTTPExceptionSerializer(BaseObjectSerializer[HTTPException]): + @classmethod + def get_init_kwargs_from_object(cls, obj: HTTPException) -> dict: + return { + "status_code": obj.status_code, + "reason": obj.reason, + "text": obj.text, + "headers": dict(obj.headers) if obj.headers else None, + } + + @classmethod + def prepare_object_init_kwargs(cls, data: dict) -> dict: + data.pop("status_code") + return data + + def setup( app: web.Application, *, router_prefix: str, - handler_check_decorator: Callable = no_ops_decorator, - task_request_context_decorator: Callable = no_task_context_decorator, - stale_task_check_interval_s: PositiveFloat = 1 * MINUTE, - stale_task_detect_timeout_s: PositiveFloat = 5 * MINUTE, + redis_settings: RedisSettings, + rabbit_settings: RabbitSettings, + lrt_namespace: LRTNamespace, + stale_task_check_interval: datetime.timedelta = DEFAULT_STALE_TASK_CHECK_INTERVAL, + stale_task_detect_timeout: datetime.timedelta = DEFAULT_STALE_TASK_DETECT_TIMEOUT, + handler_check_decorator: Callable = _no_ops_decorator, + task_request_context_decorator: Callable = _no_task_context_decorator, ) -> None: """ - `router_prefix` APIs are mounted on `/...`, this will change them to be mounted as `{router_prefix}/...` - - `stale_task_check_interval_s` interval at which the + - `redis_settings` settings for Redis connection + - `rabbit_settings` settings for RabbitMQ connection + - `lrt_namespace` namespace for the long-running tasks + - `stale_task_check_interval` interval at which the TaskManager checks for tasks which are no longer being actively monitored by a client - - `stale_task_detect_timeout_s` interval after which a - task is considered stale + - `stale_task_detect_timeout` interval after which atask is considered stale """ async def on_cleanup_ctx(app: web.Application) -> AsyncGenerator[None, None]: - # add components to state - app[ - APP_LONG_RUNNING_TASKS_MANAGER_KEY - ] = long_running_task_manager = TasksManager( - stale_task_check_interval_s=stale_task_check_interval_s, - stale_task_detect_timeout_s=stale_task_detect_timeout_s, - ) + register_custom_serialization(HTTPException, AiohttpHTTPExceptionSerializer) # add error handlers app.middlewares.append(base_long_running_error_handler) + # add components to state + app[APP_LONG_RUNNING_MANAGER_KEY] = long_running_manager = ( + AiohttpLongRunningManager( + stale_task_check_interval=stale_task_check_interval, + stale_task_detect_timeout=stale_task_detect_timeout, + redis_settings=redis_settings, + rabbit_settings=rabbit_settings, + lrt_namespace=lrt_namespace, + ) + ) + + await long_running_manager.setup() + yield # cleanup - await long_running_task_manager.close() + await long_running_manager.teardown() # add routing (done at setup-time) _wrap_and_add_routes( diff --git a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/client.py b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/client.py index e29fabc87fea..ed5675a457d4 100644 --- a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/client.py +++ b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/client.py @@ -1,9 +1,11 @@ import asyncio import logging from collections.abc import AsyncGenerator -from typing import Any +from datetime import timedelta +from typing import Any, Final from aiohttp import ClientConnectionError, ClientSession +from pydantic import PositiveFloat from tenacity import TryAgain, retry from tenacity.asyncio import AsyncRetrying from tenacity.before_sleep import before_sleep_log @@ -12,14 +14,21 @@ from tenacity.wait import wait_random_exponential from yarl import URL -from ...long_running_tasks._constants import DEFAULT_POLL_INTERVAL_S, HOUR -from ...long_running_tasks._models import LRTask, RequestBody +from ...long_running_tasks.constants import DEFAULT_POLL_INTERVAL_S +from ...long_running_tasks.models import ( + LRTask, + RequestBody, + TaskGet, + TaskId, + TaskProgress, + TaskStatus, +) from ...rest_responses import unwrap_envelope_if_required from .. import status -from .server import TaskGet, TaskId, TaskProgress, TaskStatus _logger = logging.getLogger(__name__) +_DEFAULT_CLIENT_TIMEOUT_S: Final[PositiveFloat] = timedelta(hours=1).total_seconds() _DEFAULT_AIOHTTP_RETRY_POLICY: dict[str, Any] = { "retry": retry_if_exception_type(ClientConnectionError), @@ -43,7 +52,7 @@ async def _wait_for_completion( session: ClientSession, task_id: TaskId, status_url: URL, - client_timeout: int, + client_timeout: PositiveFloat, ) -> AsyncGenerator[TaskProgress, None]: try: async for attempt in AsyncRetrying( @@ -92,7 +101,7 @@ async def long_running_task_request( session: ClientSession, url: URL, json: RequestBody | None = None, - client_timeout: int = 1 * HOUR, + client_timeout: PositiveFloat = _DEFAULT_CLIENT_TIMEOUT_S, ) -> AsyncGenerator[LRTask, None]: """Will use the passed `ClientSession` to call an oSparc long running task `url` passing `json` as request body. @@ -123,6 +132,3 @@ async def long_running_task_request( if task: await _abort_task(session, URL(task.abort_href)) raise - - -__all__: tuple[str, ...] = ("LRTask",) diff --git a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/server.py b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/server.py index 55d1295c1977..2c51e7cc43dc 100644 --- a/packages/service-library/src/servicelib/aiohttp/long_running_tasks/server.py +++ b/packages/service-library/src/servicelib/aiohttp/long_running_tasks/server.py @@ -6,39 +6,21 @@ running task. """ -from ...long_running_tasks._errors import TaskAlreadyRunningError, TaskCancelledError -from ...long_running_tasks._models import ProgressMessage, ProgressPercent -from ...long_running_tasks._task import ( - TaskId, - TaskProgress, - TaskProtocol, - TasksManager, - TaskStatus, -) -from ._dependencies import ( - create_task_name_from_request, - get_task_context, - get_tasks_manager, -) -from ._routes import TaskGet +from typing import Final + +from aiohttp import web + +from ._manager import get_long_running_manager from ._server import setup, start_long_running_task +APP_LONG_RUNNING_TASKS_KEY: Final = web.AppKey( + "APP_LONG_RUNNING_TASKS_KEY", dict[str, object] +) + __all__: tuple[str, ...] = ( - "create_task_name_from_request", - "get_task_context", - "get_tasks_manager", - "ProgressMessage", - "ProgressPercent", + "get_long_running_manager", "setup", "start_long_running_task", - "TaskAlreadyRunningError", - "TaskCancelledError", - "TaskId", - "TaskGet", - "TasksManager", - "TaskProgress", - "TaskProtocol", - "TaskStatus", ) # nopycln: file diff --git a/packages/service-library/src/servicelib/aiohttp/monitor_slow_callbacks.py b/packages/service-library/src/servicelib/aiohttp/monitor_slow_callbacks.py index 24d7328d351d..0723755c4ba6 100644 --- a/packages/service-library/src/servicelib/aiohttp/monitor_slow_callbacks.py +++ b/packages/service-library/src/servicelib/aiohttp/monitor_slow_callbacks.py @@ -1,11 +1,17 @@ import asyncio.events import sys import time +from typing import Final +from aiohttp import web from pyinstrument import Profiler from .incidents import LimitedOrderedStack, SlowCallback +APP_SLOW_CALLBACKS_MONITOR_KEY: Final = web.AppKey( + "APP_SLOW_CALLBACKS_MONITOR_KEY", LimitedOrderedStack[SlowCallback] +) + def enable( slow_duration_secs: float, incidents: LimitedOrderedStack[SlowCallback] diff --git a/packages/service-library/src/servicelib/aiohttp/monitoring.py b/packages/service-library/src/servicelib/aiohttp/monitoring.py index 84472c7e2f34..b929860f2af1 100644 --- a/packages/service-library/src/servicelib/aiohttp/monitoring.py +++ b/packages/service-library/src/servicelib/aiohttp/monitoring.py @@ -1,6 +1,5 @@ """Enables monitoring of some quantities needed for diagnostics""" -import asyncio import logging from collections.abc import Awaitable, Callable from time import perf_counter @@ -26,9 +25,10 @@ ) from .typing_extension import Handler -log = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) _PROMETHEUS_METRICS: Final[str] = f"{__name__}.prometheus_metrics" # noqa: N816 +APP_MONITORING_NAMESPACE_KEY: Final = web.AppKey("APP_MONITORING_NAMESPACE_KEY", str) def get_collector_registry(app: web.Application) -> CollectorRegistry: @@ -60,17 +60,15 @@ def middleware_factory( async def middleware_handler(request: web.Request, handler: Handler): # See https://prometheus.io/docs/concepts/metric_types - log_exception: BaseException | None = None - resp: web.StreamResponse = web.HTTPInternalServerError( - reason="Unexpected exception" - ) + response: web.StreamResponse = web.HTTPInternalServerError() + canonical_endpoint = request.path if request.match_info.route.resource: canonical_endpoint = request.match_info.route.resource.canonical start_time = perf_counter() try: if enter_middleware_cb: - with log_catch(logger=log, reraise=False): + with log_catch(logger=_logger, reraise=False): await enter_middleware_cb(request) metrics = request.app[_PROMETHEUS_METRICS] @@ -86,29 +84,19 @@ async def middleware_handler(request: web.Request, handler: Handler): endpoint=canonical_endpoint, user_agent=user_agent, ): - resp = await handler(request) + response = await handler(request) assert isinstance( # nosec - resp, web.StreamResponse + response, web.StreamResponse ), "Forgot envelope middleware?" except web.HTTPServerError as exc: - resp = exc - log_exception = exc - raise resp from exc + response = exc + raise + except web.HTTPException as exc: - resp = exc - log_exception = None - raise resp from exc - except asyncio.CancelledError as exc: - resp = web.HTTPInternalServerError(text=f"{exc}") - log_exception = exc - raise resp from exc - except Exception as exc: # pylint: disable=broad-except - resp = web.HTTPInternalServerError(text=f"{exc}") - resp.__cause__ = exc - log_exception = exc - raise resp from exc + response = exc + raise finally: response_latency_seconds = perf_counter() - start_time @@ -118,29 +106,15 @@ async def middleware_handler(request: web.Request, handler: Handler): method=request.method, endpoint=canonical_endpoint, user_agent=user_agent, - http_status=resp.status, + http_status=response.status, response_latency_seconds=response_latency_seconds, ) if exit_middleware_cb: - with log_catch(logger=log, reraise=False): - await exit_middleware_cb(request, resp) - - if log_exception: - log.error( - 'Unexpected server error "%s" from access: %s "%s %s" done ' - "in %3.2f secs. Responding with status %s", - type(log_exception), - request.remote, - request.method, - request.path, - response_latency_seconds, - resp.status, - exc_info=log_exception, - stack_info=True, - ) - - return resp + with log_catch(logger=_logger, reraise=False): + await exit_middleware_cb(request, response) + + return response setattr( # noqa: B010 middleware_handler, "__middleware_name__", f"{__name__}.monitor_{app_name}" diff --git a/packages/service-library/src/servicelib/aiohttp/observer.py b/packages/service-library/src/servicelib/aiohttp/observer.py index e0dfd6a579e1..7ff3ca4826e4 100644 --- a/packages/service-library/src/servicelib/aiohttp/observer.py +++ b/packages/service-library/src/servicelib/aiohttp/observer.py @@ -6,21 +6,26 @@ import logging from collections import defaultdict from collections.abc import Callable +from typing import Final from aiohttp import web +from ..aiohttp.application_setup import ensure_single_setup from ..utils import logged_gather log = logging.getLogger(__name__) _APP_OBSERVER_EVENTS_REGISTRY_KEY = "{__name__}.event_registry" +APP_FIRE_AND_FORGET_TASKS_KEY: Final = web.AppKey( + "APP_FIRE_AND_FORGET_TASKS_KEY", set[object] +) -class ObserverRegistryNotFoundError(RuntimeError): - ... +class ObserverRegistryNotFoundError(RuntimeError): ... +@ensure_single_setup(__name__, logger=log) def setup_observer_registry(app: web.Application): # only once app.setdefault(_APP_OBSERVER_EVENTS_REGISTRY_KEY, defaultdict(list)) diff --git a/packages/service-library/src/servicelib/aiohttp/profiler_middleware.py b/packages/service-library/src/servicelib/aiohttp/profiler_middleware.py index eab7d1fc5980..07d3c7127297 100644 --- a/packages/service-library/src/servicelib/aiohttp/profiler_middleware.py +++ b/packages/service-library/src/servicelib/aiohttp/profiler_middleware.py @@ -1,9 +1,6 @@ from aiohttp.web import HTTPInternalServerError, Request, StreamResponse, middleware -from servicelib.mimetype_constants import ( - MIMETYPE_APPLICATION_JSON, - MIMETYPE_APPLICATION_ND_JSON, -) +from ..mimetype_constants import MIMETYPE_APPLICATION_JSON, MIMETYPE_APPLICATION_ND_JSON from ..utils_profiling_middleware import _is_profiling, _profiler, append_profile @@ -13,7 +10,7 @@ async def profiling_middleware(request: Request, handler): try: if _profiler.is_running or (_profiler.last_session is not None): raise HTTPInternalServerError( - reason="Profiler is already running. Only a single request can be profiled at any given time.", + text="Profiler is already running. Only a single request can be profiled at any given time.", headers={}, ) _profiler.reset() @@ -24,7 +21,7 @@ async def profiling_middleware(request: Request, handler): if response.content_type != MIMETYPE_APPLICATION_JSON: raise HTTPInternalServerError( - reason=f"Profiling middleware is not compatible with {response.content_type=}", + text=f"Profiling middleware is not compatible with {response.content_type=}", headers={}, ) diff --git a/packages/service-library/src/servicelib/request_keys.py b/packages/service-library/src/servicelib/aiohttp/request_keys.py similarity index 62% rename from packages/service-library/src/servicelib/request_keys.py rename to packages/service-library/src/servicelib/aiohttp/request_keys.py index 8322e812557f..3d288b6960ec 100644 --- a/packages/service-library/src/servicelib/request_keys.py +++ b/packages/service-library/src/servicelib/aiohttp/request_keys.py @@ -1,7 +1,5 @@ -""" Storage keys in requests +"""Storage keys in requests""" -""" from typing import Final -# RQT=request RQT_USERID_KEY: Final[str] = f"{__name__}.userid" diff --git a/packages/service-library/src/servicelib/aiohttp/requests_validation.py b/packages/service-library/src/servicelib/aiohttp/requests_validation.py index d555e535fe74..d5717fbdd4d6 100644 --- a/packages/service-library/src/servicelib/aiohttp/requests_validation.py +++ b/packages/service-library/src/servicelib/aiohttp/requests_validation.py @@ -10,10 +10,11 @@ import json.decoder from collections.abc import Iterator from contextlib import contextmanager -from typing import TypeVar +from typing import Final, TypeVar from aiohttp import web -from common_library.json_serialization import json_dumps +from common_library.user_messages import user_message +from models_library.rest_error import EnvelopedError from pydantic import BaseModel, TypeAdapter, ValidationError from ..mimetype_constants import MIMETYPE_APPLICATION_JSON @@ -22,17 +23,20 @@ ModelClass = TypeVar("ModelClass", bound=BaseModel) ModelOrListOrDictType = TypeVar("ModelOrListOrDictType", bound=BaseModel | list | dict) +APP_JSON_SCHEMA_SPECS_KEY: Final = web.AppKey( + "APP_JSON_SCHEMA_SPECS_KEY", dict[str, object] +) + @contextmanager def handle_validation_as_http_error( - *, error_msg_template: str, resource_name: str, use_error_v1: bool + *, error_msg_template: str, resource_name: str ) -> Iterator[None]: """Context manager to handle ValidationError and reraise them as HTTPUnprocessableEntity error Arguments: error_msg_template -- _description_ resource_name -- - use_error_v1 -- If True, it uses new error response Raises: web.HTTPUnprocessableEntity: (422) raised from a ValidationError @@ -43,53 +47,40 @@ def handle_validation_as_http_error( yield except ValidationError as err: - details = [ + # SEE https://github.com/ITISFoundation/osparc-simcore/issues/443 + _details = [ { - "loc": ".".join(map(str, e["loc"])), + "loc": ".".join(map(str, e["loc"])), # e.g. "body.name" "msg": e["msg"], "type": e["type"], } for e in err.errors() ] - reason_msg = error_msg_template.format( - failed=", ".join(d["loc"] for d in details) - ) - - if use_error_v1: - # NOTE: keeps backwards compatibility until ligher error response is implemented in the entire API - # Implements servicelib.aiohttp.rest_responses.ErrorItemType - errors = [ - { - "code": e["type"], - "message": e["msg"], - "resource": resource_name, - "field": e["loc"], - } - for e in details - ] - error_str = json_dumps( - { - "error": { - "status": status.HTTP_422_UNPROCESSABLE_ENTITY, - "errors": errors, - } - } - ) - else: - # NEW proposed error for https://github.com/ITISFoundation/osparc-simcore/issues/443 - error_str = json_dumps( - { - "error": { - "msg": reason_msg, - "resource": resource_name, # optional - "details": details, # optional - } + + errors_details = [ + { + "code": e["type"], + "message": e["msg"], + "resource": resource_name, + "field": e["loc"], + } + for e in _details + ] + + error_json_str = EnvelopedError.model_validate( + { + "error": { + "message": error_msg_template.format( + failed=", ".join(e["field"] for e in errors_details) + ), + "status": status.HTTP_422_UNPROCESSABLE_ENTITY, + "errors": errors_details, } - ) + } + ).model_dump_json(exclude_unset=True, exclude_none=True) raise web.HTTPUnprocessableEntity( # 422 - reason=reason_msg, - text=error_str, + text=error_json_str, content_type=MIMETYPE_APPLICATION_JSON, ) from err @@ -105,15 +96,10 @@ def handle_validation_as_http_error( def parse_request_path_parameters_as( parameters_schema_cls: type[ModelClass], request: web.Request, - *, - use_enveloped_error_v1: bool = True, ) -> ModelClass: """Parses path parameters from 'request' and validates against 'parameters_schema' - Keyword Arguments: - use_enveloped_error_v1 -- new enveloped error model (default: {True}) - Raises: web.HTTPUnprocessableEntity: (422) if validation of parameters fail @@ -122,9 +108,10 @@ def parse_request_path_parameters_as( """ with handle_validation_as_http_error( - error_msg_template="Invalid parameter/s '{failed}' in request path", + error_msg_template=user_message( + "Invalid parameter/s '{failed}' in request path" + ), resource_name=request.rel_url.path, - use_error_v1=use_enveloped_error_v1, ): data = dict(request.match_info) return parameters_schema_cls.model_validate(data) @@ -133,15 +120,10 @@ def parse_request_path_parameters_as( def parse_request_query_parameters_as( parameters_schema_cls: type[ModelClass], request: web.Request, - *, - use_enveloped_error_v1: bool = True, ) -> ModelClass: """Parses query parameters from 'request' and validates against 'parameters_schema' - Keyword Arguments: - use_enveloped_error_v1 -- new enveloped error model (default: {True}) - Raises: web.HTTPUnprocessableEntity: (422) if validation of parameters fail @@ -150,9 +132,10 @@ def parse_request_query_parameters_as( """ with handle_validation_as_http_error( - error_msg_template="Invalid parameter/s '{failed}' in request query", + error_msg_template=user_message( + "Invalid parameter/s '{failed}' in request query" + ), resource_name=request.rel_url.path, - use_error_v1=use_enveloped_error_v1, ): # NOTE: Currently, this does not take into consideration cases where there are multiple # query parameters with the same key. However, we are not using such cases anywhere at the moment. @@ -167,13 +150,12 @@ def parse_request_query_parameters_as( def parse_request_headers_as( parameters_schema_cls: type[ModelClass], request: web.Request, - *, - use_enveloped_error_v1: bool = True, ) -> ModelClass: with handle_validation_as_http_error( - error_msg_template="Invalid parameter/s '{failed}' in request headers", + error_msg_template=user_message( + "Invalid parameter/s '{failed}' in request headers" + ), resource_name=request.rel_url.path, - use_error_v1=use_enveloped_error_v1, ): data = dict(request.headers) return parameters_schema_cls.model_validate(data) @@ -182,8 +164,6 @@ def parse_request_headers_as( async def parse_request_body_as( model_schema_cls: type[ModelOrListOrDictType], request: web.Request, - *, - use_enveloped_error_v1: bool = True, ) -> ModelOrListOrDictType: """Parses and validates request body against schema @@ -198,9 +178,8 @@ async def parse_request_body_as( Validated model of request body """ with handle_validation_as_http_error( - error_msg_template="Invalid field/s '{failed}' in request body", + error_msg_template=user_message("Invalid field/s '{failed}' in request body"), resource_name=request.rel_url.path, - use_error_v1=use_enveloped_error_v1, ): if not request.can_read_body: # requests w/o body e.g. when model-schema is fully optional diff --git a/packages/service-library/src/servicelib/aiohttp/rest_middlewares.py b/packages/service-library/src/servicelib/aiohttp/rest_middlewares.py index 064355fdbd2e..0c8a6da57cfe 100644 --- a/packages/service-library/src/servicelib/aiohttp/rest_middlewares.py +++ b/packages/service-library/src/servicelib/aiohttp/rest_middlewares.py @@ -5,32 +5,38 @@ import logging from collections.abc import Awaitable, Callable -from typing import Any +from typing import Any, Final from aiohttp import web +from aiohttp.web_exceptions import HTTPError from aiohttp.web_request import Request from aiohttp.web_response import StreamResponse -from common_library.error_codes import create_error_code +from common_library.error_codes import ErrorCodeStr, create_error_code from common_library.json_serialization import json_dumps, json_loads +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs +from common_library.user_messages import user_message +from models_library.basic_types import IDStr from models_library.rest_error import ErrorGet, ErrorItemType, LogMessageType -from ..logging_errors import create_troubleshotting_log_kwargs from ..mimetype_constants import MIMETYPE_APPLICATION_JSON -from ..rest_responses import is_enveloped_from_map, is_enveloped_from_text -from ..utils import is_production_environ +from ..rest_constants import RESPONSE_MODEL_POLICY +from ..rest_responses import is_enveloped_from_text +from ..status_codes_utils import get_code_description, is_5xx_server_error +from . import status from .rest_responses import ( create_data_response, create_http_error, safe_status_message, wrap_as_envelope, ) -from .rest_utils import EnvelopeFactory from .typing_extension import Handler, Middleware +from .web_exceptions_extension import get_http_error_class_or_none DEFAULT_API_VERSION = "v0" -_FMSG_INTERNAL_ERROR_USER_FRIENDLY = ( +_FMSG_INTERNAL_ERROR_USER_FRIENDLY = user_message( "We apologize for the inconvenience. " - "The issue has been recorded, please report it if it persists." + "The issue has been recorded, please report it if it persists.", + _version=1, ) @@ -42,110 +48,198 @@ def is_api_request(request: web.Request, api_version: str) -> bool: return bool(request.path.startswith(base_path)) -def error_middleware_factory( # noqa: C901 - api_version: str, -) -> Middleware: - _is_prod: bool = is_production_environ() - - def _process_and_raise_unexpected_error(request: web.BaseRequest, err: Exception): - error_code = create_error_code(err) - error_context: dict[str, Any] = { - "request.remote": f"{request.remote}", - "request.method": f"{request.method}", - "request.path": f"{request.path}", - } - - user_error_msg = _FMSG_INTERNAL_ERROR_USER_FRIENDLY - http_error = create_http_error( - err, +def _create_error_context( + request: web.BaseRequest, exception: Exception +) -> tuple[ErrorCodeStr, dict[str, Any]]: + """Create error code and context for logging purposes. + + Returns: + Tuple of (error_code, error_context) + """ + error_code = create_error_code(exception) + error_context: dict[str, Any] = { + "request.remote": f"{request.remote}", + "request.method": f"{request.method}", + "request.path": f"{request.path}", + } + return error_code, error_context + + +def _log_5xx_server_error( + request: web.BaseRequest, exception: Exception, user_error_msg: str +) -> ErrorCodeStr: + """Log 5XX server errors with error code and context.""" + error_code, error_context = _create_error_context(request, exception) + + _logger.exception( + **create_troubleshooting_log_kwargs( user_error_msg, - web.HTTPInternalServerError, - skip_internal_error_details=_is_prod, + error=exception, + error_context=error_context, error_code=error_code, ) - _logger.exception( - **create_troubleshotting_log_kwargs( - user_error_msg, - error=err, - error_context=error_context, - error_code=error_code, + ) + return error_code + + +def _handle_unexpected_exception_as_500( + request: web.BaseRequest, exception: Exception +) -> web.HTTPInternalServerError: + """Process unexpected exceptions and return them as HTTP errors with proper formatting. + + IMPORTANT: this function cannot throw exceptions, as it is called + """ + error_code, error_context = _create_error_context(request, exception) + user_error_msg = _FMSG_INTERNAL_ERROR_USER_FRIENDLY + + error_context["http_error"] = http_error = create_http_error( + exception, + user_error_msg, + web.HTTPInternalServerError, + error_code=error_code, + ) + + _log_5xx_server_error(request, exception, user_error_msg) + + return http_error + + +def handle_aiohttp_web_http_error( + request: web.BaseRequest, exception: web.HTTPError +) -> web.HTTPError: + """Handle standard HTTP errors by ensuring they're properly formatted. + + NOTE: this needs further refactoring to avoid code duplication + """ + assert request # nosec + assert not exception.empty_body, "HTTPError should not have an empty body" # nosec + + exception.content_type = MIMETYPE_APPLICATION_JSON + if exception.reason: + exception.set_status( + exception.status, reason=safe_status_message(message=exception.reason) + ) + + if not exception.text or not is_enveloped_from_text(exception.text): + # NOTE: aiohttp.HTTPException creates `text = f"{self.status}: {self.reason}"` + user_error_msg = exception.text or "Unexpected error" + + error_code: IDStr | None = None + if is_5xx_server_error(exception.status): + error_code = IDStr( + _log_5xx_server_error(request, exception, user_error_msg) + ) + + error_model = ErrorGet( + errors=[ + ErrorItemType( + code=exception.__class__.__name__, + message=user_error_msg, + resource=None, + field=None, + ), + ], + status=exception.status, + logs=[ + LogMessageType(message=user_error_msg, level="ERROR"), + ], + message=user_error_msg, + support_id=error_code, + ) + exception.text = json_dumps( + wrap_as_envelope( + error=error_model.model_dump(mode="json", **RESPONSE_MODEL_POLICY) ) ) - raise http_error + return exception + + +def _handle_aiohttp_web_http_successful( + request: web.Request, exception: web.HTTPSuccessful +) -> web.HTTPSuccessful: + """Handle successful HTTP responses, ensuring they're properly enveloped.""" + assert request # nosec + + exception.content_type = MIMETYPE_APPLICATION_JSON + if exception.reason: + exception.set_status( + exception.status, reason=safe_status_message(message=exception.reason) + ) + + if exception.text and not is_enveloped_from_text(exception.text): + # Ensures that the response is enveloped + data = json_loads(exception.text) + exception.text = json_dumps({"data": data}) + + return exception + + +def _handle_exception_as_http_error( + request: web.Request, + exception: NotImplementedError | TimeoutError, + status_code: int, +) -> HTTPError: + """ + Generic handler for exceptions that map to specific HTTP status codes. + Converts the status code to the appropriate HTTP error class and creates a response. + """ + assert request # nosec + + http_error_cls = get_http_error_class_or_none(status_code) + if http_error_cls is None: + msg = ( + f"No HTTP error class found for status code {status_code}, falling back to 500", + ) + raise ValueError(msg) + + user_error_msg = get_code_description(status_code) + + if is_5xx_server_error(status_code): + _log_5xx_server_error(request, exception, user_error_msg) + + return create_http_error(exception, user_error_msg, http_error_cls) + + +def error_middleware_factory(api_version: str) -> Middleware: @web.middleware - async def _middleware_handler(request: web.Request, handler: Handler): # noqa: C901 + async def _middleware_handler(request: web.Request, handler: Handler): """ Ensure all error raised are properly enveloped and json responses """ if not is_api_request(request, api_version): return await handler(request) - # FIXME: review when to send info to client and when not! try: - return await handler(request) + try: + result = await handler(request) - except web.HTTPError as err: - - err.content_type = MIMETYPE_APPLICATION_JSON - if err.reason: - err.set_status(err.status, safe_status_message(message=err.reason)) - - if not err.text or not is_enveloped_from_text(err.text): - error_message = err.text or err.reason or "Unexpected error" - error_model = ErrorGet( - errors=[ - ErrorItemType.from_error(err), - ], - status=err.status, - logs=[ - LogMessageType(message=error_message, level="ERROR"), - ], - message=error_message, + except web.HTTPError as exc: # 4XX and 5XX raised as exceptions + result = handle_aiohttp_web_http_error(request, exc) + + except web.HTTPSuccessful as exc: # 2XX rased as exceptions + result = _handle_aiohttp_web_http_successful(request, exc) + + except web.HTTPRedirection as exc: # 3XX raised as exceptions + result = exc + + except NotImplementedError as exc: + result = _handle_exception_as_http_error( + request, exc, status.HTTP_501_NOT_IMPLEMENTED ) - err.text = EnvelopeFactory(error=error_model).as_text() - - raise - - except web.HTTPSuccessful as err: - err.content_type = MIMETYPE_APPLICATION_JSON - if err.reason: - err.set_status(err.status, safe_status_message(message=err.reason)) - - if err.text: - try: - payload = json_loads(err.text) - if not is_enveloped_from_map(payload): - payload = wrap_as_envelope(data=payload) - err.text = json_dumps(payload) - except Exception as other_error: # pylint: disable=broad-except - _process_and_raise_unexpected_error(request, other_error) - raise - - except web.HTTPRedirection as err: - _logger.debug("Redirected to %s", err) - raise - - except NotImplementedError as err: - http_error = create_http_error( - err, - f"{err}", - web.HTTPNotImplemented, - skip_internal_error_details=_is_prod, - ) - raise http_error from err - - except TimeoutError as err: - http_error = create_http_error( - err, - f"{err}", - web.HTTPGatewayTimeout, - skip_internal_error_details=_is_prod, - ) - raise http_error from err - except Exception as err: # pylint: disable=broad-except - _process_and_raise_unexpected_error(request, err) + except TimeoutError as exc: + result = _handle_exception_as_http_error( + request, exc, status.HTTP_504_GATEWAY_TIMEOUT + ) + + except Exception as exc: # pylint: disable=broad-except + # + # Last resort for unexpected exceptions (including those raise by the exception handlers!) + # + result = _handle_unexpected_exception_as_500(request, exc) + + return result # adds identifier (mostly for debugging) setattr( # noqa: B010 @@ -164,7 +258,6 @@ def envelope_middleware_factory( api_version: str, ) -> Callable[..., Awaitable[StreamResponse]]: # FIXME: This data conversion is very error-prone. Use decorators instead! - _is_prod: bool = is_production_environ() @web.middleware async def _middleware_handler( @@ -205,3 +298,8 @@ def append_rest_middlewares( """Helper that appends rest-middlewares in the correct order""" app.middlewares.append(error_middleware_factory(api_version)) app.middlewares.append(envelope_middleware_factory(api_version)) + + +APP_JSONSCHEMA_SPECS_KEY: Final = web.AppKey( + "APP_JSONSCHEMA_SPECS_KEY", dict[str, object] +) diff --git a/packages/service-library/src/servicelib/aiohttp/rest_responses.py b/packages/service-library/src/servicelib/aiohttp/rest_responses.py index 3986de59700c..7e3214dac2f2 100644 --- a/packages/service-library/src/servicelib/aiohttp/rest_responses.py +++ b/packages/service-library/src/servicelib/aiohttp/rest_responses.py @@ -1,4 +1,4 @@ -from typing import Any, Final, TypedDict +from typing import Any, Final, TypedDict, TypeVar from aiohttp import web from aiohttp.web_exceptions import HTTPError @@ -10,7 +10,7 @@ from ..mimetype_constants import MIMETYPE_APPLICATION_JSON from ..rest_constants import RESPONSE_MODEL_POLICY from ..rest_responses import is_enveloped -from ..status_codes_utils import get_code_description, is_error +from ..status_codes_utils import get_code_description, get_code_display_name, is_error class EnvelopeDict(TypedDict): @@ -69,32 +69,37 @@ def safe_status_message( return flat_message[: max_length - 3] + "..." +T_HTTPError = TypeVar("T_HTTPError", bound=HTTPError) + + def create_http_error( errors: list[Exception] | Exception, error_message: str | None = None, - http_error_cls: type[HTTPError] = web.HTTPInternalServerError, + http_error_cls: type[ + T_HTTPError + ] = web.HTTPInternalServerError, # type: ignore[assignment] *, status_reason: str | None = None, - skip_internal_error_details: bool = False, error_code: ErrorCodeStr | None = None, -) -> HTTPError: +) -> T_HTTPError: """ - Response body conforms OAS schema model - Can skip internal details when 500 status e.g. to avoid transmitting server exceptions to the client in production """ - if not isinstance(errors, list): - errors = [errors] - - is_internal_error = bool(http_error_cls == web.HTTPInternalServerError) - status_reason = status_reason or get_code_description(http_error_cls.status_code) + status_reason = status_reason or get_code_display_name(http_error_cls.status_code) error_message = error_message or get_code_description(http_error_cls.status_code) assert len(status_reason) < MAX_STATUS_MESSAGE_LENGTH # nosec - if is_internal_error and skip_internal_error_details: - error = ErrorGet.model_validate( + # WARNING: do not refactor too much this function withouth considering how + # front-end handle errors. i.e. please sync with front-end developers before + # changing the workflows in this function + + is_internal_error = bool(http_error_cls == web.HTTPInternalServerError) + if is_internal_error: + error_model = ErrorGet.model_validate( { "status": http_error_cls.status_code, "message": error_message, @@ -102,8 +107,11 @@ def create_http_error( } ) else: + if not isinstance(errors, list): + errors = [errors] + items = [ErrorItemType.from_error(err) for err in errors] - error = ErrorGet.model_validate( + error_model = ErrorGet.model_validate( { "errors": items, # NOTE: deprecated! "status": http_error_cls.status_code, @@ -113,26 +121,31 @@ def create_http_error( ) assert not http_error_cls.empty_body # nosec + payload = wrap_as_envelope( - error=error.model_dump(mode="json", **RESPONSE_MODEL_POLICY) + error=error_model.model_dump(mode="json", **RESPONSE_MODEL_POLICY) ) return http_error_cls( reason=safe_status_message(status_reason), - text=json_dumps( - payload, - ), + text=json_dumps(payload), content_type=MIMETYPE_APPLICATION_JSON, ) -def exception_to_response(exc: HTTPError) -> web.Response: +def exception_to_response(exception: HTTPError) -> web.Response: # Returning web.HTTPException is deprecated so here we have a converter to a response # so it can be used as # SEE https://github.com/aio-libs/aiohttp/issues/2415 + + if exception.reason: + reason = safe_status_message(exception.reason) + else: + reason = get_code_description(exception.status) + return web.Response( - status=exc.status, - headers=exc.headers, - reason=exc.reason, - text=exc.text, + status=exception.status, + headers=exception.headers, + reason=reason, + text=exception.text, ) diff --git a/packages/service-library/src/servicelib/aiohttp/status.py b/packages/service-library/src/servicelib/aiohttp/status.py index 2a38913adcc6..695ddb0c6d56 100644 --- a/packages/service-library/src/servicelib/aiohttp/status.py +++ b/packages/service-library/src/servicelib/aiohttp/status.py @@ -17,6 +17,10 @@ from __future__ import annotations +from typing import Final + +from aiohttp import web + __all__ = ( "HTTP_100_CONTINUE", "HTTP_101_SWITCHING_PROTOCOLS", @@ -146,3 +150,5 @@ HTTP_508_LOOP_DETECTED = 508 HTTP_510_NOT_EXTENDED = 510 HTTP_511_NETWORK_AUTHENTICATION_REQUIRED = 511 + +APP_HEALTH_KEY: Final = web.AppKey("APP_HEALTH_KEY", str) diff --git a/packages/service-library/src/servicelib/aiohttp/tracing.py b/packages/service-library/src/servicelib/aiohttp/tracing.py index 1e41aab20f09..140a24852e4c 100644 --- a/packages/service-library/src/servicelib/aiohttp/tracing.py +++ b/packages/service-library/src/servicelib/aiohttp/tracing.py @@ -2,6 +2,7 @@ import logging from collections.abc import AsyncIterator, Callable +from typing import Final from aiohttp import web from opentelemetry import trace @@ -15,12 +16,14 @@ middleware as aiohttp_server_opentelemetry_middleware, # pylint:disable=no-name-in-module ) from opentelemetry.sdk.resources import Resource -from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace import SpanProcessor, TracerProvider from opentelemetry.sdk.trace.export import BatchSpanProcessor -from servicelib.logging_utils import log_context from settings_library.tracing import TracingSettings from yarl import URL +from ..logging_utils import log_context +from ..tracing import get_trace_id_header + _logger = logging.getLogger(__name__) try: from opentelemetry.instrumentation.botocore import ( # type: ignore[import-not-found] @@ -36,6 +39,13 @@ HAS_AIOPG = True except ImportError: HAS_AIOPG = False +try: + from opentelemetry.instrumentation.asyncpg import AsyncPGInstrumentor + + HAS_ASYNCPG = True +except ImportError: + HAS_ASYNCPG = False + try: from opentelemetry.instrumentation.requests import RequestsInstrumentor @@ -50,11 +60,24 @@ except ImportError: HAS_AIO_PIKA = False +APP_OPENTELEMETRY_INSTRUMENTOR_KEY: Final = web.AppKey( + "APP_OPENTELEMETRY_INSTRUMENTOR_KEY", dict[str, object] +) + + +def _create_span_processor(tracing_destination: str) -> SpanProcessor: + otlp_exporter = OTLPSpanExporterHTTP( + endpoint=tracing_destination, + ) + return BatchSpanProcessor(otlp_exporter) + def _startup( + *, app: web.Application, tracing_settings: TracingSettings, service_name: str, + add_response_trace_id_header: bool = False, ) -> None: """ Sets up this service for a distributed tracing system (opentelemetry) @@ -90,12 +113,8 @@ def _startup( tracing_destination, ) - otlp_exporter = OTLPSpanExporterHTTP( - endpoint=tracing_destination, - ) - # Add the span processor to the tracer provider - tracer_provider.add_span_processor(BatchSpanProcessor(otlp_exporter)) # type: ignore[attr-defined] # https://github.com/open-telemetry/opentelemetry-python/issues/3713 + tracer_provider.add_span_processor(_create_span_processor(tracing_destination)) # type: ignore[attr-defined] # https://github.com/open-telemetry/opentelemetry-python/issues/3713 # Instrument aiohttp server # Explanation for custom middleware call DK 10/2024: # OpenTelemetry Aiohttp autoinstrumentation is meant to be used by only calling `AioHttpServerInstrumentor().instrument()` @@ -106,6 +125,8 @@ def _startup( # # Since the code that is provided (monkeypatched) in the __init__ that the opentelemetry-autoinstrumentation-library provides is only 4 lines, # just adding a middleware, we are free to simply execute this "missed call" [since we can't call the monkeypatch'ed __init__()] in this following line: + if add_response_trace_id_header: + app.middlewares.insert(0, response_trace_id_header_middleware) app.middlewares.insert(0, aiohttp_server_opentelemetry_middleware) # Code of the aiohttp server instrumentation: github.com/open-telemetry/opentelemetry-python-contrib/blob/eccb05c808a7d797ef5b6ecefed3590664426fbf/instrumentation/opentelemetry-instrumentation-aiohttp-server/src/opentelemetry/instrumentation/aiohttp_server/__init__.py#L246 # For reference, the above statement was written for: @@ -122,6 +143,13 @@ def _startup( msg="Attempting to add aio-pg opentelemetry autoinstrumentation...", ): AiopgInstrumentor().instrument() + if HAS_ASYNCPG: + with log_context( + _logger, + logging.INFO, + msg="Attempting to add asyncpg opentelemetry autoinstrumentation...", + ): + AsyncPGInstrumentor().instrument() if HAS_BOTOCORE: with log_context( _logger, @@ -146,6 +174,21 @@ def _startup( AioPikaInstrumentor().instrument() +@web.middleware +async def response_trace_id_header_middleware(request: web.Request, handler): + headers = get_trace_id_header() + + try: + response = await handler(request) + except web.HTTPException as exc: + if headers: + exc.headers.update(headers) + raise + if headers: + response.headers.update(headers) + return response + + def _shutdown() -> None: """Uninstruments all opentelemetry instrumentors that were instrumented.""" try: @@ -157,6 +200,11 @@ def _shutdown() -> None: AiopgInstrumentor().uninstrument() except Exception: # pylint:disable=broad-exception-caught _logger.exception("Failed to uninstrument AiopgInstrumentor") + if HAS_ASYNCPG: + try: + AsyncPGInstrumentor().uninstrument() + except Exception: # pylint:disable=broad-exception-caught + _logger.exception("Failed to uninstrument AsyncPGInstrumentor") if HAS_BOTOCORE: try: BotocoreInstrumentor().uninstrument() @@ -175,9 +223,18 @@ def _shutdown() -> None: def get_tracing_lifespan( - app: web.Application, tracing_settings: TracingSettings, service_name: str + *, + app: web.Application, + tracing_settings: TracingSettings, + service_name: str, + add_response_trace_id_header: bool = False, ) -> Callable[[web.Application], AsyncIterator]: - _startup(app=app, tracing_settings=tracing_settings, service_name=service_name) + _startup( + app=app, + tracing_settings=tracing_settings, + service_name=service_name, + add_response_trace_id_header=add_response_trace_id_header, + ) async def tracing_lifespan(app: web.Application): assert app # nosec diff --git a/packages/service-library/src/servicelib/archiving_utils/_interface_7zip.py b/packages/service-library/src/servicelib/archiving_utils/_interface_7zip.py index 1e642895f1dd..9fab723c1edb 100644 --- a/packages/service-library/src/servicelib/archiving_utils/_interface_7zip.py +++ b/packages/service-library/src/servicelib/archiving_utils/_interface_7zip.py @@ -11,10 +11,10 @@ import tqdm from pydantic import NonNegativeInt -from servicelib.logging_utils import log_catch from tqdm.contrib.logging import tqdm_logging_redirect from ..file_utils import shutil_move +from ..logging_utils import log_catch from ..progress_bar import ProgressBarData from ._errors import ( CouldNotFindValueError, diff --git a/packages/service-library/src/servicelib/async_utils.py b/packages/service-library/src/servicelib/async_utils.py index c6466df0a708..fc84fda55acc 100644 --- a/packages/service-library/src/servicelib/async_utils.py +++ b/packages/service-library/src/servicelib/async_utils.py @@ -1,14 +1,13 @@ import asyncio -import contextlib -import datetime import logging from collections import deque -from collections.abc import Awaitable, Callable, Coroutine -from contextlib import suppress +from collections.abc import Awaitable, Callable from dataclasses import dataclass from functools import wraps from typing import TYPE_CHECKING, Any, ParamSpec, TypeVar +from common_library.async_tools import cancel_wait_task + from . import tracing from .utils_profiling_middleware import dont_profile, is_profiling, profile_context @@ -56,9 +55,7 @@ async def _safe_cancel(context: Context) -> None: try: await context.in_queue.put(None) if context.task is not None: - context.task.cancel() - with suppress(asyncio.CancelledError): - await context.task + await cancel_wait_task(context.task, max_delay=None) except RuntimeError as e: if "Event loop is closed" in f"{e}": _logger.warning("event loop is closed and could not cancel %s", context) @@ -212,40 +209,3 @@ async def worker(in_q: Queue[QueueElement], out_q: Queue) -> None: return wrapper return decorator - - -def delayed_start( - delay: datetime.timedelta, -) -> Callable[ - [Callable[P, Coroutine[Any, Any, R]]], Callable[P, Coroutine[Any, Any, R]] -]: - def _decorator( - func: Callable[P, Coroutine[Any, Any, R]], - ) -> Callable[P, Coroutine[Any, Any, R]]: - @wraps(func) - async def _wrapper(*args: P.args, **kwargs: P.kwargs) -> R: - await asyncio.sleep(delay.total_seconds()) - return await func(*args, **kwargs) - - return _wrapper - - return _decorator - - -async def cancel_wait_task( - task: asyncio.Task, - *, - max_delay: float | None = None, -) -> None: - """Cancel a asyncio.Task and waits for it to finish. - - :param task: task to be canceled - :param max_delay: duration (in seconds) to wait before giving - up the cancellation. If None it waits forever. - :raises TimeoutError: raised if cannot cancel the task. - """ - - task.cancel() - async with asyncio.timeout(max_delay): - with contextlib.suppress(asyncio.CancelledError): - await task diff --git a/packages/service-library/src/servicelib/background_task.py b/packages/service-library/src/servicelib/background_task.py index 508f34b99eec..a283c78f6060 100644 --- a/packages/service-library/src/servicelib/background_task.py +++ b/packages/service-library/src/servicelib/background_task.py @@ -6,10 +6,10 @@ from collections.abc import AsyncIterator, Awaitable, Callable, Coroutine from typing import Any, Final, ParamSpec, TypeVar +from common_library.async_tools import cancel_wait_task, delayed_start from tenacity import TryAgain, before_sleep_log, retry, retry_if_exception_type from tenacity.wait import wait_fixed -from .async_utils import cancel_wait_task, delayed_start from .logging_utils import log_catch, log_context _logger = logging.getLogger(__name__) @@ -42,7 +42,7 @@ def periodic( ) -> Callable[ [Callable[P, Coroutine[Any, Any, None]]], Callable[P, Coroutine[Any, Any, None]] ]: - """Calls the function periodically with a given interval. + """Calls the function periodically with a given interval or triggered by an early wake-up event. Arguments: interval -- the interval between calls @@ -58,7 +58,7 @@ def periodic( """ def _decorator( - func: Callable[P, Coroutine[Any, Any, None]], + async_fun: Callable[P, Coroutine[Any, Any, None]], ) -> Callable[P, Coroutine[Any, Any, None]]: class _InternalTryAgain(TryAgain): # Local exception to prevent reacting to similarTryAgain exceptions raised by the wrapped func @@ -82,10 +82,10 @@ class _InternalTryAgain(TryAgain): ), before_sleep=before_sleep_log(_logger, logging.DEBUG), ) - @functools.wraps(func) + @functools.wraps(async_fun) async def _wrapper(*args: P.args, **kwargs: P.kwargs) -> None: with log_catch(_logger, reraise=True): - await func(*args, **kwargs) + await async_fun(*args, **kwargs) raise _InternalTryAgain return _wrapper @@ -142,4 +142,4 @@ async def periodic_task( if asyncio_task is not None: # NOTE: this stopping is shielded to prevent the cancellation to propagate # into the stopping procedure - await asyncio.shield(cancel_wait_task(asyncio_task, max_delay=stop_timeout)) + await cancel_wait_task(asyncio_task, max_delay=stop_timeout) diff --git a/packages/service-library/src/servicelib/background_task_utils.py b/packages/service-library/src/servicelib/background_task_utils.py index 8313f6424303..bd70241b183f 100644 --- a/packages/service-library/src/servicelib/background_task_utils.py +++ b/packages/service-library/src/servicelib/background_task_utils.py @@ -3,11 +3,10 @@ from collections.abc import Callable, Coroutine from typing import Any, ParamSpec, TypeVar -from servicelib.exception_utils import silence_exceptions -from servicelib.redis._errors import CouldNotAcquireLockError - from .background_task import periodic +from .exception_utils import suppress_exceptions from .redis import RedisClientSDK, exclusive +from .redis._errors import CouldNotAcquireLockError P = ParamSpec("P") R = TypeVar("R") @@ -39,10 +38,11 @@ def _decorator( coro: Callable[P, Coroutine[Any, Any, None]], ) -> Callable[P, Coroutine[Any, Any, None]]: @periodic(interval=retry_after) - @silence_exceptions( + @suppress_exceptions( # Replicas will raise CouldNotAcquireLockError # SEE https://github.com/ITISFoundation/osparc-simcore/issues/7574 - (CouldNotAcquireLockError,) + (CouldNotAcquireLockError,), + reason=f"Multiple instances of the periodic task `{coro.__module__}.{coro.__name__}` are running.", ) @exclusive( redis_client, @@ -53,6 +53,8 @@ def _decorator( async def _wrapper(*args: P.args, **kwargs: P.kwargs) -> None: return await coro(*args, **kwargs) + # Marks with an identifier (mostly to assert a function has been decorated with this decorator) + setattr(_wrapper, "__exclusive_periodic__", True) # noqa: B010 return _wrapper return _decorator diff --git a/packages/service-library/src/servicelib/celery/__init__.py b/packages/service-library/src/servicelib/celery/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/service-library/src/servicelib/celery/app_server.py b/packages/service-library/src/servicelib/celery/app_server.py new file mode 100644 index 000000000000..c11d4b46acd1 --- /dev/null +++ b/packages/service-library/src/servicelib/celery/app_server.py @@ -0,0 +1,44 @@ +import asyncio +import threading +from abc import ABC, abstractmethod +from asyncio import AbstractEventLoop +from typing import Generic, TypeVar + +from ..celery.task_manager import TaskManager + +T = TypeVar("T") + + +class BaseAppServer(ABC, Generic[T]): + def __init__(self, app: T) -> None: + self._app: T = app + self._shutdown_event: asyncio.Event = asyncio.Event() + + @property + def app(self) -> T: + return self._app + + @property + def event_loop(self) -> AbstractEventLoop: + return self._event_loop + + @event_loop.setter + def event_loop(self, loop: AbstractEventLoop) -> None: + self._event_loop = loop + + @property + def shutdown_event(self) -> asyncio.Event: + return self._shutdown_event + + @property + @abstractmethod + def task_manager(self) -> TaskManager: + raise NotImplementedError + + @abstractmethod + async def run_until_shutdown( + self, + startup_completed_event: threading.Event, + ) -> None: + """Used to initialize the app server until shutdown event is set.""" + raise NotImplementedError diff --git a/packages/service-library/src/servicelib/celery/models.py b/packages/service-library/src/servicelib/celery/models.py new file mode 100644 index 000000000000..2f37e9b70c5d --- /dev/null +++ b/packages/service-library/src/servicelib/celery/models.py @@ -0,0 +1,236 @@ +import datetime +from enum import StrEnum +from typing import Annotated, Final, Literal, Protocol, Self, TypeAlias, TypeVar +from uuid import UUID + +import orjson +from common_library.json_serialization import json_dumps, json_loads +from models_library.progress_bar import ProgressReport +from pydantic import BaseModel, ConfigDict, Field, StringConstraints, model_validator +from pydantic.config import JsonDict + +ModelType = TypeVar("ModelType", bound=BaseModel) + +TaskID: TypeAlias = str +TaskName: TypeAlias = Annotated[ + str, StringConstraints(strip_whitespace=True, min_length=1) +] +TaskUUID: TypeAlias = UUID +_TASK_ID_KEY_DELIMITATOR: Final[str] = ":" +_FORBIDDEN_KEYS = ("*", _TASK_ID_KEY_DELIMITATOR, "=") +_FORBIDDEN_VALUES = (_TASK_ID_KEY_DELIMITATOR, "=") +AllowedTypes = ( + int | float | bool | str | None | list[str] | list[int] | list[float] | list[bool] +) + +Wildcard: TypeAlias = Literal["*"] +WILDCARD: Final[Wildcard] = "*" + + +class OwnerMetadata(BaseModel): + """ + Class for associating metadata with a celery task. The implementation is very flexible and allows the task owner to define their own metadata. + This could be metadata for validating if a user has access to a given task (e.g. user_id or product_name) or metadata for keeping track of how to handle a task, + e.g. which schema will the result of the task have. + + The class exposes a filtering mechanism to list tasks using wildcards. + + Example usage: + class StorageOwnerMetadata(OwnerMetadata): + user_id: int | Wildcard + product_name: int | Wildcard + owner = APP_NAME + + Where APP_NAME is the name of the service. Listing tasks using the filter + `StorageOwnerMetadata(user_id=123, product_name=WILDCARD)` will return all tasks with + user_id 123, any product_name submitted from the service. + + If the metadata schema is known, the class allows deserializing the metadata (recreate_as_model). I.e. one can recover the metadata from the task: + metadata -> task_uuid -> metadata + + """ + + model_config = ConfigDict(extra="allow", frozen=True) + owner: Annotated[ + str, + StringConstraints(min_length=1, pattern=r"^[a-z_-]+$"), + Field( + description='Identifies the service owning the task. Should be the "APP_NAME" of the service.' + ), + ] + + @model_validator(mode="after") + def _check_valid_filters(self) -> Self: + for key, value in self.model_dump().items(): + # forbidden keys + if any(x in key for x in _FORBIDDEN_KEYS): + raise ValueError(f"Invalid filter key: '{key}'") + # forbidden values + if any(x in f"{value}" for x in _FORBIDDEN_VALUES): + raise ValueError(f"Invalid filter value for key '{key}': '{value}'") + + class _TypeValidationModel(BaseModel): + filters: dict[str, AllowedTypes] + + _TypeValidationModel.model_validate({"filters": self.model_dump()}) + return self + + def model_dump_task_id(self, task_uuid: TaskUUID | Wildcard) -> TaskID: + data = self.model_dump(mode="json") + data.update({"task_uuid": f"{task_uuid}"}) + return _TASK_ID_KEY_DELIMITATOR.join( + [f"{k}={json_dumps(v)}" for k, v in sorted(data.items())] + ) + + @classmethod + def model_validate_task_id(cls, task_id: TaskID) -> Self: + data = cls._deserialize_task_id(task_id) + data.pop("task_uuid", None) + return cls.model_validate(data) + + @classmethod + def _deserialize_task_id(cls, task_id: TaskID) -> dict[str, AllowedTypes]: + key_value_pairs = [ + item.split("=") for item in task_id.split(_TASK_ID_KEY_DELIMITATOR) + ] + try: + return {key: json_loads(value) for key, value in key_value_pairs} + except orjson.JSONDecodeError as err: + raise ValueError(f"Invalid task_id format: {task_id}") from err + + @classmethod + def get_task_uuid(cls, task_id: TaskID) -> TaskUUID: + data = cls._deserialize_task_id(task_id) + try: + uuid_string = data["task_uuid"] + if not isinstance(uuid_string, str): + raise ValueError(f"Invalid task_id format: {task_id}") + return TaskUUID(uuid_string) + except ValueError as err: + raise ValueError(f"Invalid task_id format: {task_id}") from err + + +class TaskState(StrEnum): + PENDING = "PENDING" + STARTED = "STARTED" + RETRY = "RETRY" + SUCCESS = "SUCCESS" + FAILURE = "FAILURE" + + +TASK_DONE_STATES: Final[tuple[TaskState, ...]] = ( + TaskState.SUCCESS, + TaskState.FAILURE, +) + + +class TasksQueue(StrEnum): + CPU_BOUND = "cpu_bound" + DEFAULT = "default" + API_WORKER_QUEUE = "api_worker_queue" + + +class ExecutionMetadata(BaseModel): + name: TaskName + ephemeral: bool = True + queue: TasksQueue = TasksQueue.DEFAULT + + +class Task(BaseModel): + uuid: TaskUUID + metadata: ExecutionMetadata + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + { + "uuid": "123e4567-e89b-12d3-a456-426614174000", + "metadata": { + "name": "task1", + "ephemeral": True, + "queue": "default", + }, + }, + { + "uuid": "223e4567-e89b-12d3-a456-426614174001", + "metadata": { + "name": "task2", + "ephemeral": False, + "queue": "cpu_bound", + }, + }, + { + "uuid": "323e4567-e89b-12d3-a456-426614174002", + "metadata": { + "name": "task3", + "ephemeral": True, + "queue": "default", + }, + }, + ] + } + ) + + model_config = ConfigDict(json_schema_extra=_update_json_schema_extra) + + +class TaskInfoStore(Protocol): + async def create_task( + self, + task_id: TaskID, + execution_metadata: ExecutionMetadata, + expiry: datetime.timedelta, + ) -> None: ... + + async def task_exists(self, task_id: TaskID) -> bool: ... + + async def get_task_metadata(self, task_id: TaskID) -> ExecutionMetadata | None: ... + + async def get_task_progress(self, task_id: TaskID) -> ProgressReport | None: ... + + async def list_tasks(self, owner_metadata: OwnerMetadata) -> list[Task]: ... + + async def remove_task(self, task_id: TaskID) -> None: ... + + async def set_task_progress( + self, task_id: TaskID, report: ProgressReport + ) -> None: ... + + +class TaskStatus(BaseModel): + task_uuid: TaskUUID + task_state: TaskState + progress_report: ProgressReport + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + + schema.update( + { + "examples": [ + { + "task_uuid": "123e4567-e89b-12d3-a456-426614174000", + "task_state": "SUCCESS", + "progress_report": { + "actual_value": 0.5, + "total": 1.0, + "attempts": 1, + "unit": "Byte", + "message": { + "description": "some description", + "current": 12.2, + "total": 123, + }, + }, + } + ] + } + ) + + model_config = ConfigDict(json_schema_extra=_update_json_schema_extra) + + @property + def is_done(self) -> bool: + return self.task_state in TASK_DONE_STATES diff --git a/packages/service-library/src/servicelib/celery/task_manager.py b/packages/service-library/src/servicelib/celery/task_manager.py new file mode 100644 index 000000000000..78722dd66454 --- /dev/null +++ b/packages/service-library/src/servicelib/celery/task_manager.py @@ -0,0 +1,43 @@ +from typing import Any, Protocol, runtime_checkable + +from models_library.progress_bar import ProgressReport + +from ..celery.models import ( + ExecutionMetadata, + OwnerMetadata, + Task, + TaskID, + TaskStatus, + TaskUUID, +) + + +@runtime_checkable +class TaskManager(Protocol): + async def submit_task( + self, + execution_metadata: ExecutionMetadata, + *, + owner_metadata: OwnerMetadata, + **task_param + ) -> TaskUUID: ... + + async def cancel_task( + self, owner_metadata: OwnerMetadata, task_uuid: TaskUUID + ) -> None: ... + + async def task_exists(self, task_id: TaskID) -> bool: ... + + async def get_task_result( + self, owner_metadata: OwnerMetadata, task_uuid: TaskUUID + ) -> Any: ... + + async def get_task_status( + self, owner_metadata: OwnerMetadata, task_uuid: TaskUUID + ) -> TaskStatus: ... + + async def list_tasks(self, owner_metadata: OwnerMetadata) -> list[Task]: ... + + async def set_task_progress( + self, task_id: TaskID, report: ProgressReport + ) -> None: ... diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/container_utils.py b/packages/service-library/src/servicelib/container_utils.py similarity index 77% rename from services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/container_utils.py rename to packages/service-library/src/servicelib/container_utils.py index 64b91bf938e2..03d468b4a977 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/container_utils.py +++ b/packages/service-library/src/servicelib/container_utils.py @@ -1,19 +1,35 @@ import asyncio import logging from collections.abc import Sequence -from typing import Any +from typing import Any, Final from aiodocker import Docker, DockerError from aiodocker.execs import Exec from aiodocker.stream import Stream +from common_library.errors_classes import OsparcErrorMixin from pydantic import NonNegativeFloat -from starlette import status -from ..core.errors import ( - ContainerExecCommandFailedError, - ContainerExecContainerNotFoundError, - ContainerExecTimeoutError, -) + +class BaseContainerUtilsError(OsparcErrorMixin, Exception): + pass + + +class ContainerExecContainerNotFoundError(BaseContainerUtilsError): + msg_template = "Container '{container_name}' was not found" + + +class ContainerExecTimeoutError(BaseContainerUtilsError): + msg_template = "Timed out after {timeout} while executing: '{command}'" + + +class ContainerExecCommandFailedError(BaseContainerUtilsError): + msg_template = ( + "Command '{command}' exited with code '{exit_code}'" + "and output: '{command_result}'" + ) + + +_HTTP_404_NOT_FOUND: Final[int] = 404 _logger = logging.getLogger(__name__) @@ -77,10 +93,10 @@ async def run_command_in_container( _execute_command(container_name, command), timeout ) except DockerError as e: - if e.status == status.HTTP_404_NOT_FOUND: + if e.status == _HTTP_404_NOT_FOUND: raise ContainerExecContainerNotFoundError( container_name=container_name ) from e raise - except asyncio.TimeoutError as e: + except TimeoutError as e: raise ContainerExecTimeoutError(timeout=timeout, command=command) from e diff --git a/packages/service-library/src/servicelib/db_asyncpg_utils.py b/packages/service-library/src/servicelib/db_asyncpg_utils.py index f9dfd27c2d8c..d72f77d95159 100644 --- a/packages/service-library/src/servicelib/db_asyncpg_utils.py +++ b/packages/service-library/src/servicelib/db_asyncpg_utils.py @@ -18,7 +18,7 @@ @retry(**PostgresRetryPolicyUponInitialization(_logger).kwargs) async def create_async_engine_and_database_ready( - settings: PostgresSettings, + settings: PostgresSettings, application_name: str ) -> AsyncEngine: """ - creates asyncio engine @@ -30,17 +30,17 @@ async def create_async_engine_and_database_ready( raise_if_migration_not_ready, ) - server_settings = None - if settings.POSTGRES_CLIENT_NAME: - assert isinstance(settings.POSTGRES_CLIENT_NAME, str) # nosec - server_settings = { - "application_name": settings.POSTGRES_CLIENT_NAME, - } + server_settings = { + "jit": "off", + "application_name": settings.client_name( + f"{application_name}", suffix="asyncpg" + ), + } engine = create_async_engine( settings.dsn_with_async_sqlalchemy, - pool_size=settings.POSTGRES_MINSIZE, - max_overflow=settings.POSTGRES_MAXSIZE - settings.POSTGRES_MINSIZE, + pool_size=settings.POSTGRES_MAX_POOLSIZE, + max_overflow=settings.POSTGRES_MAX_OVERFLOW, connect_args={"server_settings": server_settings}, pool_pre_ping=True, # https://docs.sqlalchemy.org/en/14/core/pooling.html#dealing-with-disconnects future=True, # this uses sqlalchemy 2.0 API, shall be removed when sqlalchemy 2.0 is released @@ -71,7 +71,7 @@ async def check_postgres_liveness(engine: AsyncEngine) -> LivenessResult: @contextlib.asynccontextmanager async def with_async_pg_engine( - settings: PostgresSettings, + settings: PostgresSettings, *, application_name: str ) -> AsyncIterator[AsyncEngine]: """ Creates an asyncpg engine and ensures it is properly closed after use. @@ -82,14 +82,16 @@ async def with_async_pg_engine( logging.DEBUG, f"connection to db {settings.dsn_with_async_sqlalchemy}", ): - server_settings = None - if settings.POSTGRES_CLIENT_NAME: - assert isinstance(settings.POSTGRES_CLIENT_NAME, str) + server_settings = { + "application_name": settings.client_name( + application_name, suffix="asyncpg" + ), + } engine = create_async_engine( settings.dsn_with_async_sqlalchemy, - pool_size=settings.POSTGRES_MINSIZE, - max_overflow=settings.POSTGRES_MAXSIZE - settings.POSTGRES_MINSIZE, + pool_size=settings.POSTGRES_MAX_POOLSIZE, + max_overflow=settings.POSTGRES_MAX_OVERFLOW, connect_args={"server_settings": server_settings}, pool_pre_ping=True, # https://docs.sqlalchemy.org/en/14/core/pooling.html#dealing-with-disconnects future=True, # this uses sqlalchemy 2.0 API, shall be removed when sqlalchemy 2.0 is released diff --git a/packages/service-library/src/servicelib/deferred_tasks/__init__.py b/packages/service-library/src/servicelib/deferred_tasks/__init__.py index dd57b0838103..be2491ffb6f5 100644 --- a/packages/service-library/src/servicelib/deferred_tasks/__init__.py +++ b/packages/service-library/src/servicelib/deferred_tasks/__init__.py @@ -22,14 +22,16 @@ - `cancel`: (called by the user) [optional]: send a message to cancel the current task. A warning will be logged but no call to either `on_result` or `on_finished_with_error` will occur. +- `on_cancelled` (called by state `ManuallyCancelled`) [optional] {can be overwritten by the user}: + called after the cancellation is handled by the worker executing the `run` ## DeferredHandler lifecycle ```mermaid stateDiagram-v2 - * --> Scheduled: via [start] - ** --> ManuallyCancelled: via [cancel] + (1) --> Scheduled: via [start] + (2) --> ManuallyCancelled: via [cancel] ManuallyCancelled --> Worker: attempts to cancel task in @@ -41,9 +43,10 @@ ErrorResult --> FinishedWithError: gives up when out of retries or if cancelled Worker --> DeferredResult: success - DeferredResult --> °: calls [on_result] - FinishedWithError --> °°: calls [on_finished_with_error] - Worker --> °°°: task cancelled + DeferredResult --> (3): calls [on_result] + FinishedWithError --> (4): calls [on_finished_with_error] + Worker --> Removed*: task cancelled + Removed* --> (5): calls [on_cancelled] ``` ### States @@ -57,6 +60,7 @@ - `FinishedWIthError`: logs error, invokes `on_finished_with_error` and removes the schedule - `DeferredResult`: invokes `on_result` and removes the schedule - `ManuallyCancelled`: sends message to all instances to cancel. The instance handling the task will cancel the task and remove the schedule +- `Removed*`: is a fake state that does not exist only used to convey the information that the cancellation event is triggered after removal """ from ._base_deferred_handler import ( diff --git a/packages/service-library/src/servicelib/deferred_tasks/_base_deferred_handler.py b/packages/service-library/src/servicelib/deferred_tasks/_base_deferred_handler.py index 3c5110ef8f83..cf42eff26f47 100644 --- a/packages/service-library/src/servicelib/deferred_tasks/_base_deferred_handler.py +++ b/packages/service-library/src/servicelib/deferred_tasks/_base_deferred_handler.py @@ -45,6 +45,21 @@ async def get_retries(cls, context: DeferredContext) -> NonNegativeInt: assert context # nosec return 0 + @classmethod + async def get_retry_delay( + cls, + context: DeferredContext, + remaining_attempts: NonNegativeInt, + total_attempts: NonNegativeInt, + ) -> timedelta: + """ + returns: the delay between eatch retry attempt (default: 0s) + """ + assert context # nosec + assert remaining_attempts # nosec + assert total_attempts # nosec + return timedelta(seconds=0) + @classmethod @abstractmethod async def get_timeout(cls, context: DeferredContext) -> timedelta: @@ -84,6 +99,11 @@ async def on_finished_with_error( NOTE: by design the default action is to do nothing """ + @classmethod + @abstractmethod + async def on_cancelled(cls, context: DeferredContext) -> None: + """called after handling ``cancel`` request by the copy executing ``run``""" + @classmethod async def cancel(cls, task_uid: TaskUID) -> None: """cancels a deferred""" diff --git a/packages/service-library/src/servicelib/deferred_tasks/_deferred_manager.py b/packages/service-library/src/servicelib/deferred_tasks/_deferred_manager.py index b49990a78341..c1b26a3d8478 100644 --- a/packages/service-library/src/servicelib/deferred_tasks/_deferred_manager.py +++ b/packages/service-library/src/servicelib/deferred_tasks/_deferred_manager.py @@ -2,18 +2,24 @@ import inspect import logging from collections.abc import Awaitable, Callable, Iterable -from datetime import timedelta +from datetime import datetime, timedelta from enum import Enum from typing import Any, Final import arrow from faststream.exceptions import NackMessage, RejectMessage -from faststream.rabbit import ExchangeType, RabbitBroker, RabbitExchange, RabbitRouter +from faststream.rabbit import ( + ExchangeType, + RabbitBroker, + RabbitExchange, + RabbitQueue, + RabbitRouter, +) from pydantic import NonNegativeInt -from servicelib.logging_utils import log_catch, log_context -from servicelib.redis import RedisClientSDK from settings_library.rabbit import RabbitSettings +from ..logging_utils import log_catch, log_context +from ..redis import RedisClientSDK from ._base_deferred_handler import ( BaseDeferredHandler, DeferredContext, @@ -112,6 +118,14 @@ def _raise_if_not_type(task_result: Any, expected_types: Iterable[type]) -> None raise TypeError(msg) +async def _wait_until_future_date(possible_future_date: datetime) -> None: + while True: + now = arrow.utcnow().datetime + if now >= possible_future_date: + return + await asyncio.sleep(1) + + class DeferredManager: # pylint:disable=too-many-instance-attributes def __init__( self, @@ -149,10 +163,14 @@ def __init__( self._global_resources_prefix = f"{calling_module_name}" self.common_exchange = RabbitExchange( - f"{self._global_resources_prefix}_common", type=ExchangeType.DIRECT + f"{self._global_resources_prefix}_common", + durable=True, + type=ExchangeType.DIRECT, ) self.cancellation_exchange = RabbitExchange( - f"{self._global_resources_prefix}_cancellation", type=ExchangeType.FANOUT + f"{self._global_resources_prefix}_cancellation", + durable=True, + type=ExchangeType.FANOUT, ) def patch_based_deferred_handlers(self) -> None: @@ -243,8 +261,10 @@ def un_patch_base_deferred_handlers(cls) -> None: subclass.is_present.original_is_present # type: ignore ) - def _get_global_queue_name(self, queue_name: _FastStreamRabbitQueue) -> str: - return f"{self._global_resources_prefix}_{queue_name}" + def _get_global_queue(self, queue_name: _FastStreamRabbitQueue) -> RabbitQueue: + return RabbitQueue( + f"{self._global_resources_prefix}_{queue_name}", durable=True + ) def __get_subclass( self, class_unique_reference: ClassUniqueReference @@ -259,7 +279,7 @@ async def __publish_to_queue( ) -> None: await self.broker.publish( task_uid, - queue=self._get_global_queue_name(queue), + queue=self._get_global_queue(queue), exchange=( self.cancellation_exchange if queue == _FastStreamRabbitQueue.MANUALLY_CANCELLED @@ -285,18 +305,21 @@ async def __start( subclass = self.__get_subclass(class_unique_reference) deferred_context = self.__get_deferred_context(start_context) + retry_count = await subclass.get_retries(deferred_context) task_schedule = TaskScheduleModel( timeout=await subclass.get_timeout(deferred_context), - execution_attempts=await subclass.get_retries(deferred_context) + 1, + total_attempts=retry_count, + execution_attempts=retry_count + 1, class_unique_reference=class_unique_reference, start_context=start_context, state=TaskState.SCHEDULED, ) + await self._task_tracker.save(task_uid, task_schedule) + with log_catch(_logger, reraise=False): await subclass.on_created(task_uid, deferred_context) - await self._task_tracker.save(task_uid, task_schedule) _logger.debug("Scheduled task '%s' with entry: %s", task_uid, task_schedule) await self.__publish_to_queue(task_uid, _FastStreamRabbitQueue.SCHEDULED) @@ -448,7 +471,29 @@ async def _fs_handle_error_result( # pylint:disable=method-hidden task_schedule.result, TaskResultCancelledError ): _logger.debug("Schedule retry attempt for task_uid '%s'", task_uid) - # does not retry if task was cancelled + + # resilenet wait before retrying + if task_schedule.wait_cancellation_until is None: + # save the new one + subclass = self.__get_subclass(task_schedule.class_unique_reference) + deferred_context = self.__get_deferred_context( + task_schedule.start_context + ) + sleep_interval = await subclass.get_retry_delay( + context=deferred_context, + remaining_attempts=task_schedule.execution_attempts, + total_attempts=task_schedule.total_attempts, + ) + task_schedule.wait_cancellation_until = ( + arrow.utcnow().datetime + sleep_interval + ) + await self._task_tracker.save(task_uid, task_schedule) + + await _wait_until_future_date(task_schedule.wait_cancellation_until) + task_schedule.wait_cancellation_until = None + await self._task_tracker.save(task_uid, task_schedule) + + # waiting is done can proceed with retry task_schedule.state = TaskState.SUBMIT_TASK await self._task_tracker.save(task_uid, task_schedule) await self.__publish_to_queue(task_uid, _FastStreamRabbitQueue.SUBMIT_TASK) @@ -558,6 +603,11 @@ async def _fs_handle_manually_cancelled( # pylint:disable=method-hidden _logger.info("Found and cancelled run for '%s'", task_uid) await self.__remove_task(task_uid, task_schedule) + subclass = self.__get_subclass(task_schedule.class_unique_reference) + deferred_context = self.__get_deferred_context(task_schedule.start_context) + with log_catch(_logger, reraise=False): + await subclass.on_cancelled(deferred_context) + async def __is_present(self, task_uid: TaskUID) -> bool: task_schedule: TaskScheduleModel | None = await self._task_tracker.get(task_uid) return task_schedule is not None @@ -569,47 +619,43 @@ def _register_subscribers(self) -> None: # pylint:disable=unexpected-keyword-arg # pylint:disable=no-value-for-parameter self._fs_handle_scheduled = self.router.subscriber( - queue=self._get_global_queue_name(_FastStreamRabbitQueue.SCHEDULED), + queue=self._get_global_queue(_FastStreamRabbitQueue.SCHEDULED), exchange=self.common_exchange, retry=True, )(self._fs_handle_scheduled) self._fs_handle_submit_task = self.router.subscriber( - queue=self._get_global_queue_name(_FastStreamRabbitQueue.SUBMIT_TASK), + queue=self._get_global_queue(_FastStreamRabbitQueue.SUBMIT_TASK), exchange=self.common_exchange, retry=True, )(self._fs_handle_submit_task) self._fs_handle_worker = self.router.subscriber( - queue=self._get_global_queue_name(_FastStreamRabbitQueue.WORKER), + queue=self._get_global_queue(_FastStreamRabbitQueue.WORKER), exchange=self.common_exchange, retry=True, )(self._fs_handle_worker) self._fs_handle_error_result = self.router.subscriber( - queue=self._get_global_queue_name(_FastStreamRabbitQueue.ERROR_RESULT), + queue=self._get_global_queue(_FastStreamRabbitQueue.ERROR_RESULT), exchange=self.common_exchange, retry=True, )(self._fs_handle_error_result) self._fs_handle_finished_with_error = self.router.subscriber( - queue=self._get_global_queue_name( - _FastStreamRabbitQueue.FINISHED_WITH_ERROR - ), + queue=self._get_global_queue(_FastStreamRabbitQueue.FINISHED_WITH_ERROR), exchange=self.common_exchange, retry=True, )(self._fs_handle_finished_with_error) self._fs_handle_deferred_result = self.router.subscriber( - queue=self._get_global_queue_name(_FastStreamRabbitQueue.DEFERRED_RESULT), + queue=self._get_global_queue(_FastStreamRabbitQueue.DEFERRED_RESULT), exchange=self.common_exchange, retry=True, )(self._fs_handle_deferred_result) self._fs_handle_manually_cancelled = self.router.subscriber( - queue=self._get_global_queue_name( - _FastStreamRabbitQueue.MANUALLY_CANCELLED - ), + queue=self._get_global_queue(_FastStreamRabbitQueue.MANUALLY_CANCELLED), exchange=self.cancellation_exchange, retry=True, )(self._fs_handle_manually_cancelled) diff --git a/packages/service-library/src/servicelib/deferred_tasks/_task_schedule.py b/packages/service-library/src/servicelib/deferred_tasks/_task_schedule.py index 5a88b99568b3..8d34e1081637 100644 --- a/packages/service-library/src/servicelib/deferred_tasks/_task_schedule.py +++ b/packages/service-library/src/servicelib/deferred_tasks/_task_schedule.py @@ -1,7 +1,9 @@ from datetime import datetime, timedelta from enum import Enum +from typing import Annotated import arrow +from common_library.basic_types import DEFAULT_FACTORY from pydantic import BaseModel, Field, NonNegativeInt from ._base_deferred_handler import StartContext @@ -23,37 +25,63 @@ class TaskState(str, Enum): class TaskScheduleModel(BaseModel): - timeout: timedelta = Field( - ..., description="Amount of time after which the task execution will time out" - ) - class_unique_reference: ClassUniqueReference = Field( - ..., - description="reference to the class containing the code and handlers for the execution of the task", - ) - start_context: StartContext = Field( - ..., - description="data used to assemble the ``StartContext``", - ) - - state: TaskState = Field( - ..., description="represents the execution step of the task" - ) - - execution_attempts: NonNegativeInt = Field( - ..., - description="remaining attempts to run the code, only retries if this is > 0", - ) - - time_started: datetime = Field( - default_factory=lambda: arrow.utcnow().datetime, - description="time when task schedule was created, used for statistics", - ) - - result: TaskExecutionResult | None = Field( - default=None, - description=( - f"Populated by {TaskState.WORKER}. It always has a value after worker handles it." - "Will be used " + timeout: Annotated[ + timedelta, + Field( + description="Amount of time after which the task execution will time out" ), - discriminator="result_type", - ) + ] + class_unique_reference: Annotated[ + ClassUniqueReference, + Field( + description="reference to the class containing the code and handlers for the execution of the task", + ), + ] + start_context: Annotated[ + StartContext, + Field( + description="data used to assemble the ``StartContext``", + ), + ] + + state: Annotated[ + TaskState, Field(description="represents the execution step of the task") + ] + + total_attempts: Annotated[ + NonNegativeInt, + Field( + description="maximum number of attempts before giving up (0 means no retries)" + ), + ] + + execution_attempts: Annotated[ + NonNegativeInt, + Field( + description="remaining attempts to run the code, only retries if this is > 0", + ), + ] + + wait_cancellation_until: Annotated[ + datetime | None, + Field(description="when set has to wait till this before cancelling the task"), + ] = None + + time_started: Annotated[ + datetime, + Field( + default_factory=lambda: arrow.utcnow().datetime, + description="time when task schedule was created, used for statistics", + ), + ] = DEFAULT_FACTORY + + result: Annotated[ + TaskExecutionResult | None, + Field( + description=( + f"Populated by {TaskState.WORKER}. It always has a value after worker handles it." + "Will be used " + ), + discriminator="result_type", + ), + ] = None diff --git a/packages/service-library/src/servicelib/deferred_tasks/_worker_tracker.py b/packages/service-library/src/servicelib/deferred_tasks/_worker_tracker.py index bcf9ce5ec2ab..1ac791e1ade5 100644 --- a/packages/service-library/src/servicelib/deferred_tasks/_worker_tracker.py +++ b/packages/service-library/src/servicelib/deferred_tasks/_worker_tracker.py @@ -57,6 +57,12 @@ async def handle_run( result_to_return = TaskResultSuccess(value=task_result) except asyncio.CancelledError: result_to_return = TaskResultCancelledError() + # NOTE: if the task is itself cancelled it shall re-raise: see https://superfastpython.com/asyncio-cancellederror-consumed/ + current_task = asyncio.current_task() + assert current_task is not None # nosec + if current_task.cancelling() > 0: + # owner function is being cancelled -> propagate cancellation + raise except Exception as e: # pylint:disable=broad-exception-caught result_to_return = TaskResultError( error=_format_exception(e), diff --git a/packages/service-library/src/servicelib/docker_utils.py b/packages/service-library/src/servicelib/docker_utils.py index 552a6d936047..a919cb9487d7 100644 --- a/packages/service-library/src/servicelib/docker_utils.py +++ b/packages/service-library/src/servicelib/docker_utils.py @@ -22,6 +22,7 @@ ) from settings_library.docker_registry import RegistrySettings from tenacity import ( + before_sleep_log, retry, retry_if_exception_type, stop_after_attempt, @@ -275,6 +276,7 @@ def _reset_progress_from_previous_attempt() -> None: stop=stop_after_attempt(retry_upon_error_count), reraise=True, retry=retry_if_exception_type(asyncio.TimeoutError), + before_sleep=before_sleep_log(_logger, logging.WARNING), ) async def _pull_image_with_retry() -> None: nonlocal attempt diff --git a/packages/service-library/src/servicelib/exception_utils.py b/packages/service-library/src/servicelib/exception_utils.py index 2de33fd98e65..76bfb149606f 100644 --- a/packages/service-library/src/servicelib/exception_utils.py +++ b/packages/service-library/src/servicelib/exception_utils.py @@ -5,6 +5,7 @@ from functools import wraps from typing import Any, Final, ParamSpec, TypeVar +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from pydantic import BaseModel, Field, NonNegativeFloat, PrivateAttr _logger = logging.getLogger(__name__) @@ -76,9 +77,65 @@ def else_reset(self) -> None: F = TypeVar("F", bound=Callable[..., Any]) -def silence_exceptions(exceptions: tuple[type[BaseException], ...]) -> Callable[[F], F]: - def _decorator(func_or_coro: F) -> F: +def _should_suppress_exception( + exc: BaseException, + predicate: Callable[[BaseException], bool] | None, + func_name: str, +) -> bool: + if predicate is None: + # No predicate provided, suppress all exceptions + return True + + try: + return predicate(exc) + except Exception as predicate_exc: # pylint: disable=broad-except + # the predicate function raised an exception + # log it and do not suppress the original exception + _logger.warning( + **create_troubleshooting_log_kwargs( + f"Predicate function raised exception {type(predicate_exc).__name__}:{predicate_exc} in {func_name}. " + f"Original exception will be re-raised: {type(exc).__name__}", + error=predicate_exc, + error_context={ + "func_name": func_name, + "original_exception": f"{type(exc).__name__}", + }, + tip="Predicate raised, please fix it.", + ) + ) + return False + + +def suppress_exceptions( + exceptions: tuple[type[BaseException], ...], + *, + reason: str, + predicate: Callable[[BaseException], bool] | None = None, +) -> Callable[[F], F]: + """ + Decorator to suppress specified exceptions. + + Args: + exceptions: Tuple of exception types to suppress + reason: Reason for suppression (for logging) + predicate: Optional function to check exception attributes. + If provided, exception is only suppressed if predicate returns True. + + Example: + # Suppress all ConnectionError exceptions + @suppress_exceptions((ConnectionError,), reason="Network issues") + def my_func(): ... + + # Suppress only ConnectionError with specific errno + @suppress_exceptions( + (ConnectionError,), + reason="Specific network error", + predicate=lambda e: hasattr(e, 'errno') and e.errno == 104 + ) + def my_func(): ... + """ + def _decorator(func_or_coro: F) -> F: if inspect.iscoroutinefunction(func_or_coro): @wraps(func_or_coro) @@ -86,7 +143,19 @@ async def _async_wrapper(*args, **kwargs) -> Any: try: assert inspect.iscoroutinefunction(func_or_coro) # nosec return await func_or_coro(*args, **kwargs) - except exceptions: + except exceptions as exc: + # Check if exception should be suppressed + if not _should_suppress_exception( + exc, predicate, func_or_coro.__name__ + ): + raise # Re-raise if predicate returns False or fails + + _logger.debug( + "Caught suppressed exception %s in %s: TIP: %s", + exc, + func_or_coro.__name__, + reason, + ) return None return _async_wrapper # type: ignore[return-value] # decorators typing is hard @@ -95,7 +164,19 @@ async def _async_wrapper(*args, **kwargs) -> Any: def _sync_wrapper(*args, **kwargs) -> Any: try: return func_or_coro(*args, **kwargs) - except exceptions: + except exceptions as exc: + # Check if exception should be suppressed + if not _should_suppress_exception( + exc, predicate, func_or_coro.__name__ + ): + raise # Re-raise if predicate returns False or fails + + _logger.debug( + "Caught suppressed exception %s in %s: TIP: %s", + exc, + func_or_coro.__name__, + reason, + ) return None return _sync_wrapper # type: ignore[return-value] # decorators typing is hard diff --git a/packages/service-library/src/servicelib/fastapi/cancellation_middleware.py b/packages/service-library/src/servicelib/fastapi/cancellation_middleware.py index 8116869af5db..0f7003137bbe 100644 --- a/packages/service-library/src/servicelib/fastapi/cancellation_middleware.py +++ b/packages/service-library/src/servicelib/fastapi/cancellation_middleware.py @@ -10,7 +10,7 @@ _logger = logging.getLogger(__name__) -class _TerminateTaskGroupError(Exception): +class _ClientDisconnectedError(Exception): pass @@ -21,9 +21,9 @@ async def _message_poller( message = await receive() if message["type"] == "http.disconnect": _logger.debug( - "client disconnected, terminating request to %s!", request.url + "client disconnected the request to %s!", request.url, stacklevel=2 ) - raise _TerminateTaskGroupError + raise _ClientDisconnectedError # Puts the message in the queue await queue.put(message) @@ -72,9 +72,9 @@ async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: ) await handler_task poller_task.cancel() - except* _TerminateTaskGroupError: + except* _ClientDisconnectedError: if not handler_task.done(): _logger.info( - "The client disconnected. request to %s was cancelled.", + "The client disconnected. The request to %s was cancelled.", request.url, ) diff --git a/packages/service-library/src/servicelib/fastapi/celery/__init__.py b/packages/service-library/src/servicelib/fastapi/celery/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/service-library/src/servicelib/fastapi/celery/app_server.py b/packages/service-library/src/servicelib/fastapi/celery/app_server.py new file mode 100644 index 000000000000..3c42aa9144d0 --- /dev/null +++ b/packages/service-library/src/servicelib/fastapi/celery/app_server.py @@ -0,0 +1,35 @@ +import datetime +import logging +import threading +from typing import Final + +from asgi_lifespan import LifespanManager +from fastapi import FastAPI + +from ...celery.app_server import BaseAppServer +from ...celery.task_manager import TaskManager + +_SHUTDOWN_TIMEOUT: Final[float] = datetime.timedelta(seconds=10).total_seconds() + +_logger = logging.getLogger(__name__) + + +class FastAPIAppServer(BaseAppServer[FastAPI]): + @property + def task_manager(self) -> TaskManager: + task_manager = self.app.state.task_manager + assert task_manager, "Task manager is not initialized" # nosec + assert isinstance(task_manager, TaskManager) + return task_manager + + async def run_until_shutdown( + self, startup_completed_event: threading.Event + ) -> None: + async with LifespanManager( + self.app, + startup_timeout=None, # waits for full app initialization (DB migrations, etc.) + shutdown_timeout=_SHUTDOWN_TIMEOUT, + ): + _logger.info("fastapi app initialized") + startup_completed_event.set() + await self.shutdown_event.wait() # NOTE: wait here until shutdown is requested diff --git a/packages/service-library/src/servicelib/fastapi/client_session.py b/packages/service-library/src/servicelib/fastapi/client_session.py index b92dcc2d525c..f9c126272eec 100644 --- a/packages/service-library/src/servicelib/fastapi/client_session.py +++ b/packages/service-library/src/servicelib/fastapi/client_session.py @@ -2,13 +2,17 @@ import httpx from fastapi import FastAPI +from settings_library.tracing import TracingSettings + +from .tracing import setup_httpx_client_tracing def setup_client_session( app: FastAPI, *, default_timeout: datetime.timedelta = datetime.timedelta(seconds=20), - max_keepalive_connections: int = 20 + max_keepalive_connections: int = 20, + tracing_settings: TracingSettings | None, ) -> None: async def on_startup() -> None: session = httpx.AsyncClient( @@ -16,6 +20,8 @@ async def on_startup() -> None: limits=httpx.Limits(max_keepalive_connections=max_keepalive_connections), timeout=default_timeout.total_seconds(), ) + if tracing_settings: + setup_httpx_client_tracing(session) app.state.aiohttp_client_session = session async def on_shutdown() -> None: diff --git a/packages/service-library/src/servicelib/fastapi/db_asyncpg_engine.py b/packages/service-library/src/servicelib/fastapi/db_asyncpg_engine.py index 8f472dc9b518..c089b81e034a 100644 --- a/packages/service-library/src/servicelib/fastapi/db_asyncpg_engine.py +++ b/packages/service-library/src/servicelib/fastapi/db_asyncpg_engine.py @@ -14,7 +14,9 @@ _logger = logging.getLogger(__name__) -async def connect_to_db(app: FastAPI, settings: PostgresSettings) -> None: +async def connect_to_db( + app: FastAPI, settings: PostgresSettings, application_name: str +) -> None: warnings.warn( "The 'connect_to_db' function is deprecated and will be removed in a future release. " "Please use 'postgres_lifespan' instead for managing the database connection lifecycle.", @@ -27,7 +29,9 @@ async def connect_to_db(app: FastAPI, settings: PostgresSettings) -> None: logging.DEBUG, f"Connecting and migraging {settings.dsn_with_async_sqlalchemy}", ): - engine = await create_async_engine_and_database_ready(settings) + engine = await create_async_engine_and_database_ready( + settings, application_name + ) app.state.engine = engine _logger.debug( diff --git a/packages/service-library/src/servicelib/fastapi/http_client_thin.py b/packages/service-library/src/servicelib/fastapi/http_client_thin.py index e4806f88bcfb..a62461f00091 100644 --- a/packages/service-library/src/servicelib/fastapi/http_client_thin.py +++ b/packages/service-library/src/servicelib/fastapi/http_client_thin.py @@ -8,7 +8,6 @@ from common_library.errors_classes import OsparcErrorMixin from httpx import AsyncClient, ConnectError, HTTPError, PoolTimeout, Response from httpx._types import TimeoutTypes, URLTypes -from servicelib.fastapi.tracing import setup_httpx_client_tracing from settings_library.tracing import TracingSettings from tenacity import RetryCallState from tenacity.asyncio import AsyncRetrying @@ -18,6 +17,7 @@ from tenacity.wait import wait_exponential from .http_client import BaseHTTPApi +from .tracing import setup_httpx_client_tracing _logger = logging.getLogger(__name__) @@ -128,7 +128,7 @@ def retry_on_errors( """ def decorator( - request_func: Callable[..., Awaitable[Response]] + request_func: Callable[..., Awaitable[Response]], ) -> Callable[..., Awaitable[Response]]: assert asyncio.iscoroutinefunction(request_func) @@ -178,7 +178,7 @@ def expect_status( """ def decorator( - request_func: Callable[..., Awaitable[Response]] + request_func: Callable[..., Awaitable[Response]], ) -> Callable[..., Awaitable[Response]]: assert asyncio.iscoroutinefunction(request_func) diff --git a/packages/service-library/src/servicelib/fastapi/http_error.py b/packages/service-library/src/servicelib/fastapi/http_error.py index 2cc9814dc8fa..1b078111a7d9 100644 --- a/packages/service-library/src/servicelib/fastapi/http_error.py +++ b/packages/service-library/src/servicelib/fastapi/http_error.py @@ -2,6 +2,7 @@ from collections.abc import Awaitable, Callable from typing import TypeVar +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from fastapi import FastAPI, HTTPException, status from fastapi.encoders import jsonable_encoder from fastapi.exceptions import RequestValidationError @@ -11,7 +12,6 @@ from fastapi.responses import JSONResponse from pydantic import ValidationError -from ..logging_errors import create_troubleshotting_log_kwargs from ..status_codes_utils import is_5xx_server_error validation_error_response_definition["properties"] = { @@ -23,7 +23,7 @@ } -TException = TypeVar("TException") +TException = TypeVar("TException", bound=BaseException) _logger = logging.getLogger(__name__) @@ -48,24 +48,35 @@ async def _http_error_handler(request: Request, exc: Exception) -> JSONResponse: "errors": error_extractor(exc) if error_extractor else [f"{exc}"] } + response = JSONResponse( + content=jsonable_encoder( + {"error": error_content} if envelope_error else error_content + ), + status_code=status_code, + ) + if is_5xx_server_error(status_code): _logger.exception( - create_troubleshotting_log_kwargs( - "Unexpected error happened in the Resource Usage Tracker. Please contact support.", + create_troubleshooting_log_kwargs( + f"A 5XX server error happened in current service. Responding with {error_content} and {status_code} status code", error=exc, error_context={ "request": request, - "request.method": f"{request.method}", + "request.client_host": ( + request.client.host if request.client else "unknown" + ), + "request.method": request.method, + "request.url_path": request.url.path, + "request.query_params": dict(request.query_params), + "request.headers": dict(request.headers), + "response": response, + "response.error_content": error_content, + "response.status_code": status_code, }, ) ) - return JSONResponse( - content=jsonable_encoder( - {"error": error_content} if envelope_error else error_content - ), - status_code=status_code, - ) + return response return _http_error_handler diff --git a/packages/service-library/src/servicelib/fastapi/lifespan_utils.py b/packages/service-library/src/servicelib/fastapi/lifespan_utils.py index 4ccf04109304..894d91788550 100644 --- a/packages/service-library/src/servicelib/fastapi/lifespan_utils.py +++ b/packages/service-library/src/servicelib/fastapi/lifespan_utils.py @@ -1,6 +1,6 @@ import contextlib -from collections.abc import Iterator -from typing import Final +from collections.abc import AsyncIterator, Callable, Iterator +from typing import Final, TypeAlias from common_library.errors_classes import OsparcErrorMixin from fastapi import FastAPI @@ -8,6 +8,8 @@ from ..logging_utils import log_context +Lifespan: TypeAlias = Callable[[FastAPI], AsyncIterator[None]] + class LifespanError(OsparcErrorMixin, RuntimeError): ... diff --git a/packages/service-library/src/servicelib/fastapi/logging_lifespan.py b/packages/service-library/src/servicelib/fastapi/logging_lifespan.py new file mode 100644 index 000000000000..035d9bc10aa7 --- /dev/null +++ b/packages/service-library/src/servicelib/fastapi/logging_lifespan.py @@ -0,0 +1,78 @@ +import logging +from collections.abc import AsyncIterator, Awaitable, Callable +from contextlib import AsyncExitStack + +from common_library.logging.logging_utils_filtering import LoggerName, MessageSubstring +from fastapi import FastAPI +from settings_library.tracing import TracingSettings + +from ..logging_utils import ( + LogLevelInt, + async_loggers, + log_context, +) +from .lifespan_utils import Lifespan + +_logger = logging.getLogger(__name__) + + +def create_logging_lifespan( + *, + log_format_local_dev_enabled: bool, + logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], + tracing_settings: TracingSettings | None, + log_base_level: LogLevelInt, + noisy_loggers: tuple[str, ...] | None, +) -> Lifespan: + """Returns a FastAPI-compatible lifespan handler to set up async logging.""" + exit_stack = AsyncExitStack() + exit_stack.enter_context( + async_loggers( + log_base_level=log_base_level, + noisy_loggers=noisy_loggers, + log_format_local_dev_enabled=log_format_local_dev_enabled, + logger_filter_mapping=logger_filter_mapping, + tracing_settings=tracing_settings, + ) + ) + + async def _logging_lifespan(app: FastAPI) -> AsyncIterator[None]: + assert app is not None, "app must be provided" + yield + with log_context(_logger, logging.INFO, "Re-enable Blocking logger"): + await exit_stack.aclose() + + return _logging_lifespan + + +def create_logging_shutdown_event( + *, + log_format_local_dev_enabled: bool, + logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], + tracing_settings: TracingSettings | None, + log_base_level: LogLevelInt, + noisy_loggers: tuple[str, ...] | None, +) -> Callable[[], Awaitable[None]]: + """retruns a fastapi-compatible shutdown event handler to be used with old style lifespan + handlers. This is useful for applications that do not use the new async lifespan + handlers introduced in fastapi 0.100.0. + + Note: This function is for backwards compatibility only and will be removed in the future. + setup_logging_lifespan should be used instead for new style lifespan handlers. + """ + exit_stack = AsyncExitStack() + exit_stack.enter_context( + async_loggers( + log_base_level=log_base_level, + noisy_loggers=noisy_loggers, + log_format_local_dev_enabled=log_format_local_dev_enabled, + logger_filter_mapping=logger_filter_mapping, + tracing_settings=tracing_settings, + ) + ) + + async def _on_shutdown_event() -> None: + with log_context(_logger, logging.INFO, "Re-enable Blocking logger"): + await exit_stack.aclose() + + return _on_shutdown_event diff --git a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py index 4593bbf7b01d..de44c393d8e5 100644 --- a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py +++ b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_client.py @@ -1,7 +1,6 @@ import asyncio import functools import logging -import warnings from collections.abc import Awaitable, Callable from typing import Any, Final @@ -14,10 +13,10 @@ from tenacity.stop import stop_after_attempt from tenacity.wait import wait_exponential -from ...long_running_tasks._errors import GenericClientError -from ...long_running_tasks._models import ClientConfiguration, TaskId, TaskStatus +from ...long_running_tasks.errors import GenericClientError +from ...long_running_tasks.models import ClientConfiguration, TaskId, TaskStatus -DEFAULT_HTTP_REQUESTS_TIMEOUT: Final[PositiveFloat] = 15 +_DEFAULT_HTTP_REQUESTS_TIMEOUT: Final[PositiveFloat] = 15 logger = logging.getLogger(__name__) @@ -89,7 +88,7 @@ def retry_on_http_errors( assert asyncio.iscoroutinefunction(request_func) @functools.wraps(request_func) - async def request_wrapper(zelf: "Client", *args, **kwargs) -> Any: + async def request_wrapper(zelf: "HttpClient", *args, **kwargs) -> Any: async for attempt in AsyncRetrying( stop=stop_after_attempt(max_attempt_number=3), wait=wait_exponential(min=1), @@ -107,7 +106,7 @@ async def request_wrapper(zelf: "Client", *args, **kwargs) -> Any: return request_wrapper -class Client: +class HttpClient: """ This is a client that aims to simplify the requests to get the status, result and/or cancel of a long running task. @@ -121,7 +120,7 @@ def __init__(self, app: FastAPI, async_client: AsyncClient, base_url: str): """ self.app = app self._async_client = async_client - self._base_url = base_url + self.base_url = base_url @property def _client_configuration(self) -> ClientConfiguration: @@ -130,7 +129,7 @@ def _client_configuration(self) -> ClientConfiguration: def _get_url(self, path: str) -> str: url_path = f"{self._client_configuration.router_prefix}{path}".lstrip("/") - url = TypeAdapter(AnyHttpUrl).validate_python(f"{self._base_url}{url_path}") + url = TypeAdapter(AnyHttpUrl).validate_python(f"{self.base_url}{url_path}") return f"{url}" @retry_on_http_errors @@ -172,7 +171,7 @@ async def get_task_result( return result.json() @retry_on_http_errors - async def cancel_and_delete_task( + async def remove_task( self, task_id: TaskId, *, timeout: PositiveFloat | None = None # noqa: ASYNC109 ) -> None: timeout = timeout or self._client_configuration.default_timeout @@ -181,16 +180,6 @@ async def cancel_and_delete_task( timeout=timeout, ) - if result.status_code == status.HTTP_200_OK: - warnings.warn( - "returning a 200 when cancelling a task has been deprecated with PR#3236" - "and will be removed after 11.2022" - "please do close your studies at least once before that date, so that the dy-sidecar" - "get replaced", - category=DeprecationWarning, - ) - return - if result.status_code not in ( status.HTTP_204_NO_CONTENT, status.HTTP_404_NOT_FOUND, @@ -207,7 +196,7 @@ def setup( app: FastAPI, *, router_prefix: str = "", - http_requests_timeout: PositiveFloat = DEFAULT_HTTP_REQUESTS_TIMEOUT, + http_requests_timeout: PositiveFloat = _DEFAULT_HTTP_REQUESTS_TIMEOUT, ): """ - `router_prefix` by default it is assumed the server mounts the APIs on diff --git a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_context_manager.py b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_context_manager.py index c16fadd8be2b..35e534fcc02a 100644 --- a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_context_manager.py +++ b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_context_manager.py @@ -1,20 +1,24 @@ import asyncio -from asyncio.log import logger +import logging +import warnings from collections.abc import AsyncIterator from contextlib import asynccontextmanager from typing import Any, Final +from common_library.logging.logging_errors import create_troubleshooting_log_message from pydantic import PositiveFloat -from ...long_running_tasks._errors import TaskClientTimeoutError -from ...long_running_tasks._models import ( +from ...long_running_tasks.errors import TaskClientTimeoutError, TaskExceptionError +from ...long_running_tasks.models import ( ProgressCallback, ProgressMessage, ProgressPercent, TaskId, TaskStatus, ) -from ._client import Client +from ._client import HttpClient + +_logger = logging.getLogger(__name__) # NOTE: very short running requests are involved MAX_CONCURRENCY: Final[int] = 10 @@ -66,7 +70,7 @@ async def update( @asynccontextmanager async def periodic_task_result( - client: Client, + client: HttpClient, task_id: TaskId, *, task_timeout: PositiveFloat, @@ -92,11 +96,18 @@ async def periodic_task_result( raises: `asyncio.TimeoutError` NOTE: the remote task will also be removed """ + warnings.warn( + "This context manager is deprecated and will be removed in future releases. " + "Please use the `servicelib.long_running_tasks.lrt_api` instead.", + DeprecationWarning, + stacklevel=2, + ) + progress_manager = _ProgressManager(progress_callback) async def _status_update() -> TaskStatus: task_status: TaskStatus = await client.get_task_status(task_id) - logger.debug("Task status %s", task_status.model_dump_json()) + _logger.debug("Task status %s", task_status.model_dump_json()) await progress_manager.update( task_id=task_id, message=task_status.task_progress.message, @@ -114,13 +125,22 @@ async def _wait_for_task_result() -> Any: try: result = await asyncio.wait_for(_wait_for_task_result(), timeout=task_timeout) - logger.debug("%s, %s", f"{task_id=}", f"{result=}") + _logger.debug("%s, %s", f"{task_id=}", f"{result=}") yield result except TimeoutError as e: - await client.cancel_and_delete_task(task_id) + await client.remove_task(task_id) raise TaskClientTimeoutError( task_id=task_id, timeout=task_timeout, exception=e, ) from e + except Exception as e: + _logger.warning( + create_troubleshooting_log_message( + user_error_msg=f"{task_id=} raised an exception", + error=e, + tip=f"Check the logs of the service responding to '{client.base_url}'", + ) + ) + raise TaskExceptionError(task_id=task_id, exception=e, traceback="") from e diff --git a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_dependencies.py b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_dependencies.py index 937ddcf33d17..ced9efa4f16b 100644 --- a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_dependencies.py +++ b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_dependencies.py @@ -1,8 +1,10 @@ from fastapi import Request -from ...long_running_tasks._task import TasksManager +from ._manager import FastAPILongRunningManager -def get_tasks_manager(request: Request) -> TasksManager: - output: TasksManager = request.app.state.long_running_task_manager - return output +def get_long_running_manager(request: Request) -> FastAPILongRunningManager: + assert isinstance( + request.app.state.long_running_manager, FastAPILongRunningManager + ) # nosec + return request.app.state.long_running_manager diff --git a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_error_handlers.py b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_error_handlers.py index e5f1ef7d9eea..0214e0092176 100644 --- a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_error_handlers.py +++ b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_error_handlers.py @@ -5,7 +5,7 @@ from starlette.requests import Request from starlette.responses import JSONResponse -from ...long_running_tasks._errors import ( +from ...long_running_tasks.errors import ( BaseLongRunningError, TaskNotCompletedError, TaskNotFoundError, @@ -18,10 +18,10 @@ async def base_long_running_error_handler( _: Request, exception: BaseLongRunningError ) -> JSONResponse: _logger.debug("%s", exception, stack_info=True) - error_fields = dict(code=exception.code, message=f"{exception}") + error_fields = {"code": exception.code, "message": f"{exception}"} status_code = ( status.HTTP_404_NOT_FOUND - if isinstance(exception, (TaskNotFoundError, TaskNotCompletedError)) + if isinstance(exception, TaskNotFoundError | TaskNotCompletedError) else status.HTTP_400_BAD_REQUEST ) return JSONResponse(content=jsonable_encoder(error_fields), status_code=status_code) diff --git a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_manager.py b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_manager.py new file mode 100644 index 000000000000..535dd97eaf81 --- /dev/null +++ b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_manager.py @@ -0,0 +1,11 @@ +from fastapi import Request + +from ...long_running_tasks.manager import LongRunningManager +from ...long_running_tasks.models import TaskContext + + +class FastAPILongRunningManager(LongRunningManager): + @staticmethod + def get_task_context(request: Request) -> TaskContext: + _ = request + return {} diff --git a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_routes.py b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_routes.py index b56ba3d21ddd..95b28ceec2fb 100644 --- a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_routes.py +++ b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_routes.py @@ -2,11 +2,11 @@ from fastapi import APIRouter, Depends, Request, status -from ...long_running_tasks._errors import TaskNotCompletedError, TaskNotFoundError -from ...long_running_tasks._models import TaskGet, TaskId, TaskResult, TaskStatus -from ...long_running_tasks._task import TasksManager +from ...long_running_tasks import lrt_api +from ...long_running_tasks.models import TaskGet, TaskId, TaskResult, TaskStatus from ..requests_decorators import cancel_on_disconnect -from ._dependencies import get_tasks_manager +from ._dependencies import get_long_running_manager +from ._manager import FastAPILongRunningManager router = APIRouter(prefix="/task") @@ -14,18 +14,24 @@ @router.get("", response_model=list[TaskGet]) @cancel_on_disconnect async def list_tasks( - request: Request, tasks_manager: Annotated[TasksManager, Depends(get_tasks_manager)] + request: Request, + long_running_manager: Annotated[ + FastAPILongRunningManager, Depends(get_long_running_manager) + ], ) -> list[TaskGet]: assert request # nosec return [ TaskGet( task_id=t.task_id, - task_name=t.task_name, - status_href="", - result_href="", - abort_href="", + status_href=str(request.url_for("get_task_status", task_id=t.task_id)), + result_href=str(request.url_for("get_task_result", task_id=t.task_id)), + abort_href=str(request.url_for("remove_task", task_id=t.task_id)), + ) + for t in await lrt_api.list_tasks( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + long_running_manager.get_task_context(request), ) - for t in tasks_manager.list_tasks(with_task_context=None) ] @@ -39,11 +45,18 @@ async def list_tasks( @cancel_on_disconnect async def get_task_status( request: Request, + long_running_manager: Annotated[ + FastAPILongRunningManager, Depends(get_long_running_manager) + ], task_id: TaskId, - tasks_manager: Annotated[TasksManager, Depends(get_tasks_manager)], ) -> TaskStatus: assert request # nosec - return tasks_manager.get_task_status(task_id=task_id, with_task_context=None) + return await lrt_api.get_task_status( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + long_running_manager.get_task_context(request), + task_id=task_id, + ) @router.get( @@ -58,29 +71,23 @@ async def get_task_status( @cancel_on_disconnect async def get_task_result( request: Request, + long_running_manager: Annotated[ + FastAPILongRunningManager, Depends(get_long_running_manager) + ], task_id: TaskId, - tasks_manager: Annotated[TasksManager, Depends(get_tasks_manager)], ) -> TaskResult | Any: assert request # nosec - try: - task_result = tasks_manager.get_task_result(task_id, with_task_context=None) - await tasks_manager.remove_task( - task_id, with_task_context=None, reraise_errors=False - ) - return task_result - except (TaskNotFoundError, TaskNotCompletedError): - raise - except Exception: - # the task shall be removed in this case - await tasks_manager.remove_task( - task_id, with_task_context=None, reraise_errors=False - ) - raise + return await lrt_api.get_task_result( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + long_running_manager.get_task_context(request), + task_id=task_id, + ) @router.delete( "/{task_id}", - summary="Cancel and deletes a task", + summary="Cancels and removes a task", response_model=None, status_code=status.HTTP_204_NO_CONTENT, responses={ @@ -88,10 +95,17 @@ async def get_task_result( }, ) @cancel_on_disconnect -async def cancel_and_delete_task( +async def remove_task( request: Request, + long_running_manager: Annotated[ + FastAPILongRunningManager, Depends(get_long_running_manager) + ], task_id: TaskId, - tasks_manager: Annotated[TasksManager, Depends(get_tasks_manager)], ) -> None: assert request # nosec - await tasks_manager.remove_task(task_id, with_task_context=None) + await lrt_api.remove_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + long_running_manager.get_task_context(request), + task_id=task_id, + ) diff --git a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_server.py b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_server.py index e8306b6d1874..9cf4c526acee 100644 --- a/packages/service-library/src/servicelib/fastapi/long_running_tasks/_server.py +++ b/packages/service-library/src/servicelib/fastapi/long_running_tasks/_server.py @@ -1,31 +1,40 @@ -from typing import Final +import datetime from fastapi import APIRouter, FastAPI -from pydantic import PositiveFloat +from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings -from ...long_running_tasks._errors import BaseLongRunningError -from ...long_running_tasks._task import TasksManager +from ...long_running_tasks.constants import ( + DEFAULT_STALE_TASK_CHECK_INTERVAL, + DEFAULT_STALE_TASK_DETECT_TIMEOUT, +) +from ...long_running_tasks.errors import BaseLongRunningError +from ...long_running_tasks.models import LRTNamespace from ._error_handlers import base_long_running_error_handler +from ._manager import FastAPILongRunningManager from ._routes import router -_MINUTE: Final[PositiveFloat] = 60 - def setup( app: FastAPI, *, router_prefix: str = "", - stale_task_check_interval_s: PositiveFloat = 1 * _MINUTE, - stale_task_detect_timeout_s: PositiveFloat = 5 * _MINUTE, + redis_settings: RedisSettings, + rabbit_settings: RabbitSettings, + lrt_namespace: LRTNamespace, + stale_task_check_interval: datetime.timedelta = DEFAULT_STALE_TASK_CHECK_INTERVAL, + stale_task_detect_timeout: datetime.timedelta = DEFAULT_STALE_TASK_DETECT_TIMEOUT, ) -> None: """ - - `router_prefix` APIs are mounted on `/task/...`, this - will change them to be mounted as `{router_prefix}/task/...` - - `stale_task_check_interval_s` interval at which the + - `router_prefix` APIs are mounted on `/...`, this + will change them to be mounted as `{router_prefix}/...` + - `redis_settings` settings for Redis connection + - `rabbit_settings` settings for RabbitMQ connection + - `lrt_namespace` namespace for the long-running tasks + - `stale_task_check_interval` interval at which the TaskManager checks for tasks which are no longer being actively monitored by a client - - `stale_task_detect_timeout_s` interval after which a - task is considered stale + - `stale_task_detect_timeout` interval after which atask is considered stale """ async def on_startup() -> None: @@ -35,19 +44,27 @@ async def on_startup() -> None: app.include_router(main_router) # add components to state - app.state.long_running_task_manager = TasksManager( - stale_task_check_interval_s=stale_task_check_interval_s, - stale_task_detect_timeout_s=stale_task_detect_timeout_s, + app.state.long_running_manager = long_running_manager = ( + FastAPILongRunningManager( + stale_task_check_interval=stale_task_check_interval, + stale_task_detect_timeout=stale_task_detect_timeout, + redis_settings=redis_settings, + rabbit_settings=rabbit_settings, + lrt_namespace=lrt_namespace, + ) ) + await long_running_manager.setup() async def on_shutdown() -> None: - if app.state.long_running_task_manager: - task_manager: TasksManager = app.state.long_running_task_manager - await task_manager.close() + if app.state.long_running_manager: + long_running_manager: FastAPILongRunningManager = ( + app.state.long_running_manager + ) + await long_running_manager.teardown() app.add_event_handler("startup", on_startup) app.add_event_handler("shutdown", on_shutdown) # add error handlers # NOTE: Exception handler can not be added during the on_startup script, otherwise not working correctly - app.add_exception_handler(BaseLongRunningError, base_long_running_error_handler) # type: ignore[arg-type] + app.add_exception_handler(BaseLongRunningError, base_long_running_error_handler) # type: ignore[arg-type] diff --git a/packages/service-library/src/servicelib/fastapi/long_running_tasks/client.py b/packages/service-library/src/servicelib/fastapi/long_running_tasks/client.py index 62b72256000c..ca72f24e4417 100644 --- a/packages/service-library/src/servicelib/fastapi/long_running_tasks/client.py +++ b/packages/service-library/src/servicelib/fastapi/long_running_tasks/client.py @@ -2,164 +2,11 @@ Provides a convenient way to return the result given a TaskId. """ -import asyncio -import logging -from collections.abc import AsyncGenerator -from typing import Any - -import httpx -from fastapi import status -from models_library.api_schemas_long_running_tasks.base import TaskProgress -from models_library.api_schemas_long_running_tasks.tasks import ( - TaskGet, - TaskResult, - TaskStatus, -) -from tenacity import ( - AsyncRetrying, - TryAgain, - before_sleep_log, - retry, - retry_if_exception_type, - stop_after_delay, - wait_random_exponential, -) -from yarl import URL - -from ...long_running_tasks._constants import DEFAULT_POLL_INTERVAL_S, HOUR -from ...long_running_tasks._models import ( - ClientConfiguration, - LRTask, - ProgressCallback, - ProgressMessage, - ProgressPercent, - RequestBody, -) -from ...long_running_tasks._task import TaskId -from ...rest_responses import unwrap_envelope_if_required -from ._client import DEFAULT_HTTP_REQUESTS_TIMEOUT, Client, setup -from ._context_manager import periodic_task_result - -_logger = logging.getLogger(__name__) - - -_DEFAULT_FASTAPI_RETRY_POLICY: dict[str, Any] = { - "retry": retry_if_exception_type(httpx.RequestError), - "wait": wait_random_exponential(max=20), - "stop": stop_after_delay(60), - "reraise": True, - "before_sleep": before_sleep_log(_logger, logging.INFO), -} - - -@retry(**_DEFAULT_FASTAPI_RETRY_POLICY) -async def _start( - session: httpx.AsyncClient, url: URL, json: RequestBody | None -) -> TaskGet: - response = await session.post(f"{url}", json=json) - response.raise_for_status() - data = unwrap_envelope_if_required(response.json()) - return TaskGet.model_validate(data) - - -@retry(**_DEFAULT_FASTAPI_RETRY_POLICY) -async def _wait_for_completion( - session: httpx.AsyncClient, - task_id: TaskId, - status_url: URL, - client_timeout: int, -) -> AsyncGenerator[TaskProgress, None]: - try: - async for attempt in AsyncRetrying( - stop=stop_after_delay(client_timeout), - reraise=True, - retry=retry_if_exception_type(TryAgain), - before_sleep=before_sleep_log(_logger, logging.DEBUG), - ): - with attempt: - response = await session.get(f"{status_url}") - response.raise_for_status() - data = unwrap_envelope_if_required(response.json()) - task_status = TaskStatus.model_validate(data) - - yield task_status.task_progress - if not task_status.done: - await asyncio.sleep( - float( - response.headers.get("retry-after", DEFAULT_POLL_INTERVAL_S) - ) - ) - msg = f"{task_id=}, {task_status.started=} has status: '{task_status.task_progress.message}' {task_status.task_progress.percent}%" - raise TryAgain(msg) # noqa: TRY301 - - except TryAgain as exc: - # this is a timeout - msg = f"Long running task {task_id}, calling to {status_url} timed-out after {client_timeout} seconds" - raise TimeoutError(msg) from exc - - -@retry(**_DEFAULT_FASTAPI_RETRY_POLICY) -async def _task_result(session: httpx.AsyncClient, result_url: URL) -> Any: - response = await session.get(f"{result_url}") - response.raise_for_status() - if response.status_code != status.HTTP_204_NO_CONTENT: - return unwrap_envelope_if_required(response.json()) - return None - - -@retry(**_DEFAULT_FASTAPI_RETRY_POLICY) -async def _abort_task(session: httpx.AsyncClient, abort_url: URL) -> None: - response = await session.delete(f"{abort_url}") - response.raise_for_status() - - -async def long_running_task_request( - session: httpx.AsyncClient, - url: URL, - json: RequestBody | None = None, - client_timeout: int = 1 * HOUR, -) -> AsyncGenerator[LRTask, None]: - """Will use the passed `httpx.AsyncClient` to call an oSparc long - running task `url` passing `json` as request body. - NOTE: this follows the usual aiohttp client syntax, and will raise the same errors - - Raises: - [https://docs.aiohttp.org/en/stable/client_reference.html#hierarchy-of-exceptions] - """ - task = None - try: - task = await _start(session, url, json) - last_progress = None - async for task_progress in _wait_for_completion( - session, - task.task_id, - URL(task.status_href), - client_timeout, - ): - last_progress = task_progress - yield LRTask(progress=task_progress) - assert last_progress # nosec - yield LRTask( - progress=last_progress, - _result=_task_result(session, URL(task.result_href)), - ) - - except (TimeoutError, asyncio.CancelledError): - if task: - await _abort_task(session, URL(task.abort_href)) - raise - +from ._client import HttpClient, setup +from ._context_manager import periodic_task_result # attach to the same object! __all__: tuple[str, ...] = ( - "DEFAULT_HTTP_REQUESTS_TIMEOUT", - "Client", - "ClientConfiguration", - "LRTask", - "ProgressCallback", - "ProgressMessage", - "ProgressPercent", - "TaskId", - "TaskResult", + "HttpClient", "periodic_task_result", "setup", ) diff --git a/packages/service-library/src/servicelib/fastapi/long_running_tasks/server.py b/packages/service-library/src/servicelib/fastapi/long_running_tasks/server.py index b9a29d1d90a4..b7cf0fba60ad 100644 --- a/packages/service-library/src/servicelib/fastapi/long_running_tasks/server.py +++ b/packages/service-library/src/servicelib/fastapi/long_running_tasks/server.py @@ -6,30 +6,12 @@ running task. The client will take care of recovering the result from it. """ -from models_library.api_schemas_long_running_tasks.tasks import TaskResult - -from ...long_running_tasks._errors import TaskAlreadyRunningError, TaskCancelledError -from ...long_running_tasks._task import ( - TaskId, - TaskProgress, - TasksManager, - TaskStatus, - start_task, -) -from ._dependencies import get_tasks_manager +from ._dependencies import get_long_running_manager from ._server import setup __all__: tuple[str, ...] = ( - "get_tasks_manager", + "get_long_running_manager", "setup", - "start_task", - "TaskAlreadyRunningError", - "TaskCancelledError", - "TaskId", - "TasksManager", - "TaskProgress", - "TaskResult", - "TaskStatus", ) # nopycln: file diff --git a/packages/service-library/src/servicelib/fastapi/monitoring.py b/packages/service-library/src/servicelib/fastapi/monitoring.py index 32dd26f53d6b..a9c33f0d2162 100644 --- a/packages/service-library/src/servicelib/fastapi/monitoring.py +++ b/packages/service-library/src/servicelib/fastapi/monitoring.py @@ -13,12 +13,6 @@ CONTENT_TYPE_LATEST, generate_latest, ) -from servicelib.prometheus_metrics import ( - PrometheusMetrics, - get_prometheus_metrics, - record_request_metrics, - record_response_metrics, -) from starlette.middleware.base import BaseHTTPMiddleware, RequestResponseEndpoint from starlette.types import ASGIApp @@ -26,6 +20,12 @@ UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, X_SIMCORE_USER_AGENT, ) +from ..prometheus_metrics import ( + PrometheusMetrics, + get_prometheus_metrics, + record_request_metrics, + record_response_metrics, +) _logger = logging.getLogger(__name__) _PROMETHEUS_METRICS = "prometheus_metrics" diff --git a/packages/service-library/src/servicelib/fastapi/postgres_lifespan.py b/packages/service-library/src/servicelib/fastapi/postgres_lifespan.py index 319a7121896a..e532d4a435fc 100644 --- a/packages/service-library/src/servicelib/fastapi/postgres_lifespan.py +++ b/packages/service-library/src/servicelib/fastapi/postgres_lifespan.py @@ -28,7 +28,9 @@ def create_postgres_database_input_state(settings: PostgresSettings) -> State: return {PostgresLifespanState.POSTGRES_SETTINGS: settings} -async def postgres_database_lifespan(_: FastAPI, state: State) -> AsyncIterator[State]: +async def postgres_database_lifespan( + app: FastAPI, state: State +) -> AsyncIterator[State]: _lifespan_name = f"{__name__}.{postgres_database_lifespan.__name__}" @@ -43,7 +45,7 @@ async def postgres_database_lifespan(_: FastAPI, state: State) -> AsyncIterator[ # connect to database async_engine: AsyncEngine = await create_async_engine_and_database_ready( - settings + settings, app.title ) try: diff --git a/packages/service-library/src/servicelib/fastapi/profiler.py b/packages/service-library/src/servicelib/fastapi/profiler.py index cb3e7c5c0840..9010c6296f09 100644 --- a/packages/service-library/src/servicelib/fastapi/profiler.py +++ b/packages/service-library/src/servicelib/fastapi/profiler.py @@ -1,11 +1,11 @@ from typing import Any, Final from fastapi import FastAPI -from servicelib.aiohttp import status -from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON from starlette.requests import Request from starlette.types import ASGIApp, Receive, Scope, Send +from ..aiohttp import status +from ..mimetype_constants import MIMETYPE_APPLICATION_JSON from ..utils_profiling_middleware import ( _is_profiling, _profiler, diff --git a/packages/service-library/src/servicelib/fastapi/redis_lifespan.py b/packages/service-library/src/servicelib/fastapi/redis_lifespan.py index b1ac98e9d6ca..b8955d2c8ae8 100644 --- a/packages/service-library/src/servicelib/fastapi/redis_lifespan.py +++ b/packages/service-library/src/servicelib/fastapi/redis_lifespan.py @@ -51,6 +51,7 @@ async def redis_client_sdk_lifespan(_: FastAPI, state: State) -> AsyncIterator[S redis_dsn_with_secrets, client_name=redis_state.REDIS_CLIENT_NAME, ) + await redis_client.setup() try: yield {"REDIS_CLIENT_SDK": redis_client, **called_state} diff --git a/packages/service-library/src/servicelib/fastapi/requests_decorators.py b/packages/service-library/src/servicelib/fastapi/requests_decorators.py index ae5f1ea047c6..b9116d9c1d5a 100644 --- a/packages/service-library/src/servicelib/fastapi/requests_decorators.py +++ b/packages/service-library/src/servicelib/fastapi/requests_decorators.py @@ -4,6 +4,7 @@ from functools import wraps from typing import Any, Protocol +from common_library.async_tools import cancel_wait_task from fastapi import Request, status from fastapi.exceptions import HTTPException @@ -13,8 +14,7 @@ class _HandlerWithRequestArg(Protocol): __name__: str - async def __call__(self, request: Request, *args: Any, **kwargs: Any) -> Any: - ... + async def __call__(self, request: Request, *args: Any, **kwargs: Any) -> Any: ... def _validate_signature(handler: _HandlerWithRequestArg): @@ -75,13 +75,8 @@ async def wrapper(request: Request, *args, **kwargs): # One has completed, cancel the other for t in pending: - t.cancel() - try: - await asyncio.wait_for(t, timeout=3) - - except asyncio.CancelledError: - pass + await cancel_wait_task(t, max_delay=3) except Exception: # pylint: disable=broad-except if t is handler_task: raise diff --git a/packages/service-library/src/servicelib/fastapi/rest_pagination.py b/packages/service-library/src/servicelib/fastapi/rest_pagination.py index 0a199152acea..0ef84d61ca7c 100644 --- a/packages/service-library/src/servicelib/fastapi/rest_pagination.py +++ b/packages/service-library/src/servicelib/fastapi/rest_pagination.py @@ -4,6 +4,7 @@ from fastapi_pagination.cursor import CursorPage # type: ignore[import-not-found] from fastapi_pagination.customization import ( # type: ignore[import-not-found] CustomizedPage, + UseIncludeTotal, UseParamsFields, ) from models_library.api_schemas_storage.storage_schemas import ( @@ -24,5 +25,8 @@ description="Page size", ) ), + UseIncludeTotal( + include_total=False + ), # make total field optional as S3 does not provide that ] CustomizedPathsCursorPageParams: TypeAlias = CustomizedPathsCursorPage.__params_type__ # type: ignore diff --git a/packages/service-library/src/servicelib/fastapi/tracing.py b/packages/service-library/src/servicelib/fastapi/tracing.py index 5b2cba5434d6..50c8aeab1d7d 100644 --- a/packages/service-library/src/servicelib/fastapi/tracing.py +++ b/packages/service-library/src/servicelib/fastapi/tracing.py @@ -3,7 +3,7 @@ import logging from collections.abc import AsyncIterator -from fastapi import FastAPI +from fastapi import FastAPI, Request from fastapi_lifespan_manager import State from httpx import AsyncClient, Client from opentelemetry import trace @@ -13,16 +13,19 @@ from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor from opentelemetry.instrumentation.httpx import HTTPXClientInstrumentor from opentelemetry.sdk.resources import Resource -from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace import SpanProcessor, TracerProvider from opentelemetry.sdk.trace.export import BatchSpanProcessor -from servicelib.logging_utils import log_context from settings_library.tracing import TracingSettings +from starlette.middleware.base import BaseHTTPMiddleware from yarl import URL +from ..logging_utils import log_context +from ..tracing import get_trace_id_header + _logger = logging.getLogger(__name__) try: - from opentelemetry.instrumentation.asyncpg import ( # type: ignore[import-not-found] + from opentelemetry.instrumentation.asyncpg import ( AsyncPGInstrumentor, ) @@ -70,6 +73,11 @@ HAS_AIOPIKA_INSTRUMENTOR = False +def _create_span_processor(tracing_destination: str) -> SpanProcessor: + otlp_exporter = OTLPSpanExporterHTTP(endpoint=tracing_destination) + return BatchSpanProcessor(otlp_exporter) + + def _startup(tracing_settings: TracingSettings, service_name: str) -> None: if ( not tracing_settings.TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT @@ -96,10 +104,10 @@ def _startup(tracing_settings: TracingSettings, service_name: str) -> None: service_name, tracing_destination, ) - # Configure OTLP exporter to send spans to the collector - otlp_exporter = OTLPSpanExporterHTTP(endpoint=tracing_destination) - span_processor = BatchSpanProcessor(otlp_exporter) - global_tracer_provider.add_span_processor(span_processor) + # Add the span processor to the tracer provider + global_tracer_provider.add_span_processor( + _create_span_processor(tracing_destination) + ) if HAS_AIOPG: with log_context( @@ -180,7 +188,11 @@ def _shutdown() -> None: _logger.exception("Failed to uninstrument RequestsInstrumentor") -def initialize_fastapi_app_tracing(app: FastAPI): +def initialize_fastapi_app_tracing( + app: FastAPI, *, add_response_trace_id_header: bool = False +): + if add_response_trace_id_header: + app.add_middleware(ResponseTraceIdHeaderMiddleware) FastAPIInstrumentor.instrument_app(app) @@ -216,3 +228,13 @@ async def tracing_instrumentation_lifespan( _shutdown() return tracing_instrumentation_lifespan + + +class ResponseTraceIdHeaderMiddleware(BaseHTTPMiddleware): + + async def dispatch(self, request: Request, call_next): + response = await call_next(request) + trace_id_header = get_trace_id_header() + if trace_id_header: + response.headers.update(trace_id_header) + return response diff --git a/packages/service-library/src/servicelib/logging_utils.py b/packages/service-library/src/servicelib/logging_utils.py index 7ef3bc28e94f..27bd08dd87f6 100644 --- a/packages/service-library/src/servicelib/logging_utils.py +++ b/packages/service-library/src/servicelib/logging_utils.py @@ -8,22 +8,34 @@ import asyncio import functools import logging +import logging.handlers +import queue from asyncio import iscoroutinefunction from collections.abc import Callable, Iterator from contextlib import contextmanager +from dataclasses import dataclass from datetime import datetime from inspect import getframeinfo, stack from pathlib import Path -from typing import Any, NotRequired, TypeAlias, TypedDict, TypeVar - +from typing import Any, Final, TypeAlias, TypedDict, TypeVar + +from common_library.json_serialization import json_dumps +from common_library.logging.logging_base import LogExtra +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs +from common_library.logging.logging_utils_filtering import ( + GeneralLogFilter, + LoggerName, + MessageSubstring, +) from settings_library.tracing import TracingSettings -from .logging_utils_filtering import GeneralLogFilter, LoggerName, MessageSubstring from .tracing import setup_log_tracing from .utils_secrets import mask_sensitive_data _logger = logging.getLogger(__name__) +LogLevelInt: TypeAlias = int +LogMessageStr: TypeAlias = str BLACK = "\033[0;30m" BLUE = "\033[0;34m" @@ -54,27 +66,6 @@ } -class LogExtra(TypedDict): - log_uid: NotRequired[str] - log_oec: NotRequired[str] - - -def get_log_record_extra( - *, - user_id: int | str | None = None, - error_code: str | None = None, -) -> LogExtra | None: - extra: LogExtra = {} - - if user_id: - assert int(user_id) > 0 # nosec - extra["log_uid"] = f"{user_id}" - if error_code: - extra["log_oec"] = error_code - - return extra or None - - class CustomFormatter(logging.Formatter): """Custom Formatter does these 2 things: 1. Overrides 'funcName' with the value of 'func_name_override', if it exists. @@ -87,11 +78,19 @@ def __init__(self, fmt: str, *, log_format_local_dev_enabled: bool) -> None: def format(self, record) -> str: if hasattr(record, "func_name_override"): - record.funcName = record.func_name_override + record.funcName = ( + record.func_name_override + ) # pyright: ignore[reportAttributeAccessIssue] if hasattr(record, "file_name_override"): - record.filename = record.file_name_override + record.filename = ( + record.file_name_override + ) # pyright: ignore[reportAttributeAccessIssue] - for name in LogExtra.__optional_keys__: # pylint: disable=no-member + # pylint: disable=no-member + optional_keys = LogExtra.__optional_keys__ | frozenset( + ["otelTraceID", "otelSpanID"] + ) + for name in optional_keys: if not hasattr(record, name): setattr(record, name, None) @@ -106,68 +105,51 @@ def format(self, record) -> str: # SEE https://docs.python.org/3/library/logging.html#logrecord-attributes -DEFAULT_FORMATTING = ( - "log_level=%(levelname)s " - "| log_timestamp=%(asctime)s " - "| log_source=%(name)s:%(funcName)s(%(lineno)d) " - "| log_uid=%(log_uid)s " - "| log_oec=%(log_oec)s" - "| log_msg=%(message)s" +_DEFAULT_FORMATTING: Final[str] = " | ".join( + [ + "log_level=%(levelname)s", + "log_timestamp=%(asctime)s", + "log_source=%(name)s:%(funcName)s(%(lineno)d)", + "log_uid=%(log_uid)s", + "log_oec=%(log_oec)s", + "log_trace_id=%(otelTraceID)s", + "log_span_id=%(otelSpanID)s", + "log_msg=%(message)s", + ] +) + +_LOCAL_FORMATTING: Final[str] = ( + "%(levelname)s: [%(asctime)s/%(processName)s] " + "[log_trace_id=%(otelTraceID)s|log_span_id=%(otelSpanID)s] " + "[%(name)s:%(funcName)s(%(lineno)d)] - %(message)s" ) -LOCAL_FORMATTING = "%(levelname)s: [%(asctime)s/%(processName)s] [%(name)s:%(funcName)s(%(lineno)d)] - %(message)s" # Graylog Grok pattern extractor: -# log_level=%{WORD:log_level} \| log_timestamp=%{TIMESTAMP_ISO8601:log_timestamp} \| log_source=%{DATA:log_source} \| (log_uid=%{WORD:log_uid} \| )?log_msg=%{GREEDYDATA:log_msg} +# log_level=%{WORD:log_level} \| log_timestamp=%{TIMESTAMP_ISO8601:log_timestamp} \| log_source=%{NOTSPACE:log_source} \| log_uid=%{NOTSPACE:log_uid} \| log_oec=%{NOTSPACE:log_oec} \| log_trace_id=%{NOTSPACE:log_trace_id} \| log_span_id=%{NOTSPACE:log_span_id} \| log_msg=%{GREEDYDATA:log_msg} -def config_all_loggers( +def _setup_logging_formatter( *, log_format_local_dev_enabled: bool, - logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], - tracing_settings: TracingSettings | None, -) -> None: - """ - Applies common configuration to ALL registered loggers - """ - the_manager: logging.Manager = logging.Logger.manager - root_logger = logging.getLogger() +) -> logging.Formatter: + fmt = _LOCAL_FORMATTING if log_format_local_dev_enabled else _DEFAULT_FORMATTING - loggers = [root_logger] + [ - logging.getLogger(name) for name in the_manager.loggerDict - ] + return CustomFormatter( + fmt, log_format_local_dev_enabled=log_format_local_dev_enabled + ) - fmt = DEFAULT_FORMATTING - if tracing_settings is not None: - fmt = ( - "log_level=%(levelname)s " - "| log_timestamp=%(asctime)s " - "| log_source=%(name)s:%(funcName)s(%(lineno)d) " - "| log_uid=%(log_uid)s " - "| log_oec=%(log_oec)s" - "| log_trace_id=%(otelTraceID)s " - "| log_span_id=%(otelSpanID)s " - "| log_resource.service.name=%(otelServiceName)s " - "| log_trace_sampled=%(otelTraceSampled)s] " - "| log_msg=%(message)s" - ) - setup_log_tracing(tracing_settings=tracing_settings) - if log_format_local_dev_enabled: - fmt = LOCAL_FORMATTING - if tracing_settings is not None: - fmt = ( - "%(levelname)s: [%(asctime)s/%(processName)s] " - "[log_trace_id=%(otelTraceID)s log_span_id=%(otelSpanID)s log_resource.service.name=%(otelServiceName)s log_trace_sampled=%(otelTraceSampled)s] " - "[%(name)s:%(funcName)s(%(lineno)d)] - %(message)s" - ) - for logger in loggers: - _set_logging_handler( - logger, fmt=fmt, log_format_local_dev_enabled=log_format_local_dev_enabled - ) +def _get_all_loggers() -> list[logging.Logger]: + manager = logging.Logger.manager + root_logger = logging.getLogger() + return [root_logger] + [logging.getLogger(name) for name in manager.loggerDict] + +def _apply_logger_filters( + logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], +) -> None: for logger_name, filtered_routes in logger_filter_mapping.items(): logger = logging.getLogger(logger_name) - # Check if the logger has any handlers or is in active use if not logger.hasHandlers(): _logger.warning( "Logger %s does not have any handlers. Filter will not be added.", @@ -179,43 +161,239 @@ def config_all_loggers( logger.addFilter(log_filter) -def _set_logging_handler( - logger: logging.Logger, +def _setup_base_logging_level(log_level: LogLevelInt) -> None: + logging.basicConfig(level=log_level) + logging.root.setLevel(log_level) + + +def _dampen_noisy_loggers( + noisy_loggers: tuple[str, ...], +) -> None: + """Sets a less verbose level for noisy loggers.""" + quiet_level: int = max( + min(logging.root.level + logging.CRITICAL - logging.ERROR, logging.CRITICAL), + logging.WARNING, + ) + + for name in noisy_loggers: + logging.getLogger(name).setLevel(quiet_level) + + +def _configure_common_logging_settings( *, - fmt: str, log_format_local_dev_enabled: bool, + tracing_settings: TracingSettings | None, + log_base_level: LogLevelInt, + noisy_loggers: tuple[str, ...] | None, +) -> logging.Formatter: + """ + Common configuration logic shared by both sync and async logging setups. + + Returns the configured formatter to be used with the appropriate handler. + """ + _setup_base_logging_level(log_base_level) + if noisy_loggers is not None: + _dampen_noisy_loggers(noisy_loggers) + if tracing_settings is not None: + setup_log_tracing(tracing_settings=tracing_settings) + + return _setup_logging_formatter( + log_format_local_dev_enabled=log_format_local_dev_enabled, + ) + + +def _apply_logging_configuration( + handler: logging.Handler, + logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], ) -> None: - for handler in logger.handlers: - handler.setFormatter( - CustomFormatter( - fmt, log_format_local_dev_enabled=log_format_local_dev_enabled - ) - ) + """ + Apply the logging configuration with the given handler. + """ + _clean_all_handlers() + _set_root_handler(handler) + if logger_filter_mapping: + _apply_logger_filters(logger_filter_mapping) -def test_logger_propagation(logger: logging.Logger) -> None: - """log propagation and levels can sometimes be daunting to get it right. - This function uses the `logger`` passed as argument to log the same message at different levels +def setup_loggers( + *, + log_format_local_dev_enabled: bool, + logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], + tracing_settings: TracingSettings | None, + log_base_level: LogLevelInt, + noisy_loggers: tuple[str, ...] | None, +) -> None: + """ + Applies comprehensive configuration to ALL registered loggers. + + Flow Diagram (Synchronous Logging): + ┌─────────────────┐ ┌─────────────────┐ + │ Application │ │ Root Logger │ + │ Thread │───────────────────▶│ StreamHandler │ + │ │ │ ├─ Formatter │ + │ logger.info() │ │ └─ Output │ + │ logger.error() │ │ │ + │ (blocking I/O) │ │ │ + └─────────────────┘ └─────────────────┘ + │ │ + │ ▼ + │ ┌─────────────┐ + │ │ Console/ │ + │ │ Terminal │ + │ └─────────────┘ + │ + └─ Blocks until I/O completes + + This function uses a comprehensive approach: + - Removes all handlers from all loggers + - Ensures all loggers propagate to root + - Sets up root logger with properly formatted handler + - All logging calls are synchronous and may block on I/O + + For async/non-blocking logging, use `async_loggers` context manager instead. + + Args: + log_format_local_dev_enabled: Enable local development formatting + logger_filter_mapping: Mapping of logger names to filtered message substrings + tracing_settings: OpenTelemetry tracing configuration + log_base_level: Base logging level to set + noisy_loggers: Loggers to set to a quieter level + """ + formatter = _configure_common_logging_settings( + log_format_local_dev_enabled=log_format_local_dev_enabled, + tracing_settings=tracing_settings, + log_base_level=log_base_level, + noisy_loggers=noisy_loggers, + ) + + # Create a properly formatted handler for the root logger + stream_handler = logging.StreamHandler() + stream_handler.setFormatter(formatter) - This should help to visually test a given configuration + _store_logger_state(_get_all_loggers()) + _apply_logging_configuration(stream_handler, logger_filter_mapping) + + +@contextmanager +def _queued_logging_handler( + log_formatter: logging.Formatter, +) -> Iterator[logging.Handler]: + log_queue: queue.Queue[logging.LogRecord] = queue.Queue() + # Create handler with proper formatting + handler = logging.StreamHandler() + handler.setFormatter(log_formatter) + + # Create and start the queue listener + listener = logging.handlers.QueueListener( + log_queue, handler, respect_handler_level=True + ) + listener.start() - USAGE: - from .logging_utils import test_logger_propagation - for n in ("aiohttp.access", "gunicorn.access"): - test_logger_propagation(logging.getLogger(n)) + queue_handler = logging.handlers.QueueHandler(log_queue) + + yield queue_handler + + # cleanup + with log_context( + _logger, + level=logging.DEBUG, + msg="Shutdown async logging listener", + ): + listener.stop() + + +def _clean_all_handlers() -> None: + """ + Cleans all handlers from all loggers. + This is useful for resetting the logging configuration. """ - msg = f"TESTING %s log using {logger=}" - logger.critical(msg, "critical") - logger.error(msg, "error") - logger.info(msg, "info") - logger.warning(msg, "warning") - logger.debug(msg, "debug") + root_logger = logging.getLogger() + all_loggers = _get_all_loggers() + for logger in all_loggers: + if logger is root_logger: + continue + logger.handlers.clear() + logger.propagate = True # Ensure propagation is enabled + + +def _set_root_handler(handler: logging.Handler) -> None: + root_logger = logging.getLogger() + root_logger.handlers.clear() # Clear existing handlers + root_logger.addHandler(handler) # Add the new handler + + +@contextmanager +def async_loggers( + *, + log_format_local_dev_enabled: bool, + logger_filter_mapping: dict[LoggerName, list[MessageSubstring]], + tracing_settings: TracingSettings | None, + log_base_level: LogLevelInt, + noisy_loggers: tuple[str, ...] | None, +) -> Iterator[None]: + """ + Context manager for non-blocking logging infrastructure. + + Flow Diagram: + ┌─────────────────┐ ┌──────────────┐ ┌─────────────────┐ + │ Application │ │ Queue │ │ Background │ + │ Thread │───▶│ (unlimited) │───▶│ Listener Thread │ + │ │ │ │ │ │ + │ logger.info() │ │ LogRecord │ │ StreamHandler │ + │ logger.error() │ │ LogRecord │ │ ├─ Formatter │ + │ (non-blocking) │ │ LogRecord │ │ └─ Output │ + └─────────────────┘ └──────────────┘ └─────────────────┘ + │ │ │ + │ │ ▼ + │ │ ┌─────────────┐ + │ │ │ Console/ │ + │ │ │ Terminal │ + │ │ └─────────────┘ + │ │ + └───────────────────────┴─ No blocking, immediate return + + The async logging setup ensures that: + 1. All log calls return immediately (non-blocking) + 2. Log records are queued in an unlimited queue + 3. A background thread processes the queue and handles actual I/O + 4. All loggers propagate to root for centralized handling + + For more details on the underlying implementation, see: + https://docs.python.org/3/library/logging.handlers.html#queuehandler + + Usage: + with async_loggers(log_format_local_dev_enabled=True, logger_filter_mapping={}, tracing_settings=None): + # Your async application code here + logger.info("This is non-blocking!") + + Args: + log_format_local_dev_enabled: Enable local development formatting + logger_filter_mapping: Mapping of logger names to filtered message substrings + tracing_settings: OpenTelemetry tracing configuration + log_base_level: Base logging level to set + noisy_loggers: Loggers to set to a quieter level + """ + formatter = _configure_common_logging_settings( + log_format_local_dev_enabled=log_format_local_dev_enabled, + tracing_settings=tracing_settings, + log_base_level=log_base_level, + noisy_loggers=noisy_loggers, + ) + + with ( + _queued_logging_handler(formatter) as queue_handler, + _stored_logger_states(_get_all_loggers()), + ): + _apply_logging_configuration(queue_handler, logger_filter_mapping) + + with log_context(_logger, logging.INFO, "Asynchronous logging"): + yield class LogExceptionsKwargsDict(TypedDict, total=True): logger: logging.Logger - level: int + level: LogLevelInt msg_prefix: str exc_info: bool stack_info: bool @@ -224,7 +402,7 @@ class LogExceptionsKwargsDict(TypedDict, total=True): @contextmanager def log_exceptions( logger: logging.Logger, - level: int, + level: LogLevelInt, msg_prefix: str = "", *, exc_info: bool = False, @@ -264,7 +442,7 @@ def log_exceptions( def _log_before_call( - logger_obj: logging.Logger, level: int, func: Callable, *args, **kwargs + logger_obj: logging.Logger, level: LogLevelInt, func: Callable, *args, **kwargs ) -> dict[str, str]: # NOTE: We should avoid logging arguments but in the meantime, we are trying to # avoid exposing sensitive data in the logs. For `args` is more difficult. We could eventually @@ -302,7 +480,7 @@ def _log_before_call( def _log_after_call( logger_obj: logging.Logger, - level: int, + level: LogLevelInt, func: Callable, result: Any, extra_args: dict[str, str], @@ -322,7 +500,7 @@ def _log_after_call( def log_decorator( logger: logging.Logger | None, - level: int = logging.DEBUG, + level: LogLevelInt = logging.DEBUG, *, # NOTE: default defined by legacy: ANE defined full stack tracebacks # on exceptions @@ -339,7 +517,6 @@ def log_decorator( logger_obj = logger or _logger def _decorator(func_or_coro: F) -> F: - _log_exc_kwargs = LogExceptionsKwargsDict( logger=logger_obj, level=level, @@ -385,15 +562,16 @@ def log_catch(logger: logging.Logger, *, reraise: bool = True) -> Iterator[None] logger.debug("call was cancelled") raise except Exception as exc: # pylint: disable=broad-except - logger.exception("Unhandled exception:") + logger.exception( + **create_troubleshooting_log_kwargs( + "Caught unhandled exception", + error=exc, + ) + ) if reraise: raise exc from exc -LogLevelInt: TypeAlias = int -LogMessageStr: TypeAlias = str - - def _un_capitalize(s: str) -> str: return s[:1].lower() + s[1:] if s else "" @@ -420,7 +598,7 @@ def log_context( logger.log(level, log_msg, *args, **kwargs, stacklevel=stackelvel) yield duration = ( - f" in {(datetime.now() - start ).total_seconds()}s" # noqa: DTZ005 + f" in {(datetime.now() - start).total_seconds()}s" # noqa: DTZ005 if log_duration else "" ) @@ -456,6 +634,58 @@ def guess_message_log_level(message: str) -> LogLevelInt: return logging.INFO -def set_parent_module_log_level(current_module: str, desired_log_level: int) -> None: +def set_parent_module_log_level( + current_module: str, desired_log_level: LogLevelInt +) -> None: parent_module = ".".join(current_module.split(".")[:-1]) logging.getLogger(parent_module).setLevel(desired_log_level) + + +@dataclass(frozen=True) +class _LoggerState: + logger: logging.Logger + handlers: list[logging.Handler] + propagate: bool + + +@contextmanager +def _stored_logger_states( + loggers: list[logging.Logger], +) -> Iterator[list[_LoggerState]]: + """ + Context manager to store and restore the state of loggers. + It captures the current handlers and propagation state of each logger. + """ + original_state = _store_logger_state(loggers) + + try: + yield original_state + finally: + _restore_logger_state(original_state) + + +def _store_logger_state(loggers: list[logging.Logger]) -> list[_LoggerState]: + logger_states = [ + _LoggerState(logger, logger.handlers.copy(), logger.propagate) + for logger in loggers + if logger.handlers or not logger.propagate + ] + # log which loggers states were stored + _logger.info( + "Stored logger states: %s. TIP: these loggers configuration will be restored later.", + json_dumps( + [ + f"{state.logger.name}(handlers={len(state.handlers)}, propagate={state.propagate})" + for state in logger_states + ] + ), + ) + return logger_states + + +def _restore_logger_state(original_state: list[_LoggerState]) -> None: + for state in original_state: + logger = state.logger + logger.handlers.clear() + logger.handlers.extend(state.handlers) + logger.propagate = state.propagate diff --git a/packages/service-library/src/servicelib/long_running_tasks/_constants.py b/packages/service-library/src/servicelib/long_running_tasks/_constants.py deleted file mode 100644 index 5cc87208a369..000000000000 --- a/packages/service-library/src/servicelib/long_running_tasks/_constants.py +++ /dev/null @@ -1,5 +0,0 @@ -from typing import Final - -MINUTE: Final[int] = 60 # in secs -HOUR: Final[int] = 60 * MINUTE # in secs -DEFAULT_POLL_INTERVAL_S: Final[float] = 1 diff --git a/packages/service-library/src/servicelib/long_running_tasks/_models.py b/packages/service-library/src/servicelib/long_running_tasks/_models.py deleted file mode 100644 index 89fb8b1b3997..000000000000 --- a/packages/service-library/src/servicelib/long_running_tasks/_models.py +++ /dev/null @@ -1,91 +0,0 @@ -# mypy: disable-error-code=truthy-function -from asyncio import Task -from collections.abc import Awaitable, Callable, Coroutine -from dataclasses import dataclass -from datetime import datetime -from typing import Any, TypeAlias - -from models_library.api_schemas_long_running_tasks.base import ( - ProgressMessage, - ProgressPercent, - TaskId, - TaskProgress, -) -from models_library.api_schemas_long_running_tasks.tasks import ( - TaskGet, - TaskResult, - TaskStatus, -) -from pydantic import BaseModel, ConfigDict, Field, PositiveFloat - -TaskName: TypeAlias = str - -TaskType: TypeAlias = Callable[..., Coroutine[Any, Any, Any]] - -ProgressCallback: TypeAlias = Callable[ - [ProgressMessage, ProgressPercent | None, TaskId], Awaitable[None] -] - -RequestBody: TypeAlias = Any - - -class TrackedTask(BaseModel): - task_id: str - task: Task - task_name: TaskName - task_progress: TaskProgress - # NOTE: this context lifetime is with the tracked task (similar to aiohttp storage concept) - task_context: dict[str, Any] - fire_and_forget: bool = Field( - ..., - description="if True then the task will not be auto-cancelled if no one enquires of its status", - ) - - started: datetime = Field(default_factory=datetime.utcnow) - last_status_check: datetime | None = Field( - default=None, - description=( - "used to detect when if the task is not actively " - "polled by the client who created it" - ), - ) - model_config = ConfigDict( - arbitrary_types_allowed=True, - ) - - -class ClientConfiguration(BaseModel): - router_prefix: str - default_timeout: PositiveFloat - - -@dataclass(frozen=True) -class LRTask: - progress: TaskProgress - _result: Coroutine[Any, Any, Any] | None = None - - def done(self) -> bool: - return self._result is not None - - async def result(self) -> Any: - if not self._result: - msg = "No result ready!" - raise ValueError(msg) - return await self._result - - -# explicit export of models for api-schemas - -assert TaskResult # nosec -assert TaskGet # nosec -assert TaskStatus # nosec - -__all__: tuple[str, ...] = ( - "ProgressMessage", - "ProgressPercent", - "TaskGet", - "TaskId", - "TaskProgress", - "TaskResult", - "TaskStatus", -) diff --git a/packages/service-library/src/servicelib/long_running_tasks/_rabbit_namespace.py b/packages/service-library/src/servicelib/long_running_tasks/_rabbit_namespace.py new file mode 100644 index 000000000000..7ace2e53a3dd --- /dev/null +++ b/packages/service-library/src/servicelib/long_running_tasks/_rabbit_namespace.py @@ -0,0 +1,8 @@ +from models_library.rabbitmq_basic_types import RPCNamespace +from pydantic import TypeAdapter + +from .models import LRTNamespace + + +def get_rabbit_namespace(namespace: LRTNamespace) -> RPCNamespace: + return TypeAdapter(RPCNamespace).validate_python(f"lrt-{namespace}") diff --git a/packages/service-library/src/servicelib/long_running_tasks/_redis_store.py b/packages/service-library/src/servicelib/long_running_tasks/_redis_store.py new file mode 100644 index 000000000000..fbed41205a95 --- /dev/null +++ b/packages/service-library/src/servicelib/long_running_tasks/_redis_store.py @@ -0,0 +1,127 @@ +from typing import Any, Final + +import redis.asyncio as aioredis +from common_library.json_serialization import json_dumps, json_loads +from pydantic import TypeAdapter +from settings_library.redis import RedisDatabase, RedisSettings + +from ..redis._client import RedisClientSDK +from ..redis._utils import handle_redis_returns_union_types +from ..utils import limited_gather +from .models import LRTNamespace, TaskData, TaskId + +_STORE_TYPE_TASK_DATA: Final[str] = "TD" +_LIST_CONCURRENCY: Final[int] = 3 +_MARKED_FOR_REMOVAL_FIELD: Final[str] = "marked_for_removal" + + +def _to_redis_hash_mapping(data: dict[str, Any]) -> dict[str, str]: + return {k: json_dumps(v) for k, v in data.items()} + + +def _load_from_redis_hash(data: dict[str, str]) -> dict[str, Any]: + return {k: json_loads(v) for k, v in data.items()} + + +class RedisStore: + def __init__(self, redis_settings: RedisSettings, namespace: LRTNamespace): + self.redis_settings = redis_settings + self.namespace: LRTNamespace = namespace.upper() + + self._client: RedisClientSDK | None = None + + async def setup(self) -> None: + self._client = RedisClientSDK( + self.redis_settings.build_redis_dsn(RedisDatabase.LONG_RUNNING_TASKS), + client_name=f"long_running_tasks_store_{self.namespace}", + ) + await self._client.setup() + + async def shutdown(self) -> None: + if self._client: + await self._client.shutdown() + + @property + def _redis(self) -> aioredis.Redis: + assert self._client # nosec + return self._client.redis + + def _get_redis_key_task_data_match(self) -> str: + return f"{self.namespace}:{_STORE_TYPE_TASK_DATA}*" + + def _get_redis_task_data_key(self, task_id: TaskId) -> str: + return f"{self.namespace}:{_STORE_TYPE_TASK_DATA}:{task_id}" + + async def get_task_data(self, task_id: TaskId) -> TaskData | None: + result: dict[str, Any] = await handle_redis_returns_union_types( + self._redis.hgetall( + self._get_redis_task_data_key(task_id), + ) + ) + return ( + TypeAdapter(TaskData).validate_python(_load_from_redis_hash(result)) + if result and len(result) + else None + ) + + async def add_task_data(self, task_id: TaskId, value: TaskData) -> None: + await handle_redis_returns_union_types( + self._redis.hset( + self._get_redis_task_data_key(task_id), + mapping=_to_redis_hash_mapping(value.model_dump()), + ) + ) + + async def update_task_data( + self, + task_id: TaskId, + *, + updates: dict[str, Any], + ) -> None: + await handle_redis_returns_union_types( + self._redis.hset( + self._get_redis_task_data_key(task_id), + mapping=_to_redis_hash_mapping(updates), + ) + ) + + async def list_tasks_data(self) -> list[TaskData]: + hash_keys: list[str] = [ + x + async for x in self._redis.scan_iter(self._get_redis_key_task_data_match()) + ] + + result = await limited_gather( + *[ + handle_redis_returns_union_types(self._redis.hgetall(key)) + for key in hash_keys + ], + limit=_LIST_CONCURRENCY, + ) + + return [ + TypeAdapter(TaskData).validate_python(_load_from_redis_hash(item)) + for item in result + if item + ] + + async def delete_task_data(self, task_id: TaskId) -> None: + await handle_redis_returns_union_types( + self._redis.delete(self._get_redis_task_data_key(task_id)) + ) + + async def mark_for_removal(self, task_id: TaskId) -> None: + await handle_redis_returns_union_types( + self._redis.hset( + self._get_redis_task_data_key(task_id), + mapping=_to_redis_hash_mapping({_MARKED_FOR_REMOVAL_FIELD: True}), + ) + ) + + async def is_marked_for_removal(self, task_id: TaskId) -> bool: + result = await handle_redis_returns_union_types( + self._redis.hget( + self._get_redis_task_data_key(task_id), _MARKED_FOR_REMOVAL_FIELD + ) + ) + return False if result is None else json_loads(result) diff --git a/packages/service-library/src/servicelib/long_running_tasks/_rpc_client.py b/packages/service-library/src/servicelib/long_running_tasks/_rpc_client.py new file mode 100644 index 000000000000..6ad3fe9785ba --- /dev/null +++ b/packages/service-library/src/servicelib/long_running_tasks/_rpc_client.py @@ -0,0 +1,130 @@ +import logging +from datetime import timedelta +from typing import Any, Final + +from models_library.rabbitmq_basic_types import RPCMethodName +from pydantic import PositiveInt, TypeAdapter + +from ..logging_utils import log_decorator +from ..rabbitmq._client_rpc import RabbitMQRPCClient +from ._rabbit_namespace import get_rabbit_namespace +from ._serialization import loads +from .errors import RPCTransferrableTaskError +from .models import ( + LRTNamespace, + RegisteredTaskName, + TaskBase, + TaskContext, + TaskId, + TaskStatus, +) + +_logger = logging.getLogger(__name__) + +_RPC_TIMEOUT_SHORT_REQUESTS: Final[PositiveInt] = int( + timedelta(seconds=20).total_seconds() +) + + +@log_decorator(_logger, level=logging.DEBUG) +async def start_task( + rabbitmq_rpc_client: RabbitMQRPCClient, + namespace: LRTNamespace, + *, + registered_task_name: RegisteredTaskName, + unique: bool = False, + task_context: TaskContext | None = None, + task_name: str | None = None, + fire_and_forget: bool = False, + **task_kwargs: Any, +) -> TaskId: + result = await rabbitmq_rpc_client.request( + get_rabbit_namespace(namespace), + TypeAdapter(RPCMethodName).validate_python("start_task"), + registered_task_name=registered_task_name, + unique=unique, + task_context=task_context, + task_name=task_name, + fire_and_forget=fire_and_forget, + **task_kwargs, + timeout_s=_RPC_TIMEOUT_SHORT_REQUESTS, + ) + assert isinstance(result, TaskId) # nosec + return result + + +@log_decorator(_logger, level=logging.DEBUG) +async def list_tasks( + rabbitmq_rpc_client: RabbitMQRPCClient, + namespace: LRTNamespace, + *, + task_context: TaskContext, +) -> list[TaskBase]: + result = await rabbitmq_rpc_client.request( + get_rabbit_namespace(namespace), + TypeAdapter(RPCMethodName).validate_python("list_tasks"), + task_context=task_context, + timeout_s=_RPC_TIMEOUT_SHORT_REQUESTS, + ) + return TypeAdapter(list[TaskBase]).validate_python(result) + + +@log_decorator(_logger, level=logging.DEBUG) +async def get_task_status( + rabbitmq_rpc_client: RabbitMQRPCClient, + namespace: LRTNamespace, + *, + task_context: TaskContext, + task_id: TaskId, +) -> TaskStatus: + result = await rabbitmq_rpc_client.request( + get_rabbit_namespace(namespace), + TypeAdapter(RPCMethodName).validate_python("get_task_status"), + task_context=task_context, + task_id=task_id, + timeout_s=_RPC_TIMEOUT_SHORT_REQUESTS, + ) + assert isinstance(result, TaskStatus) # nosec + return result + + +@log_decorator(_logger, level=logging.DEBUG) +async def get_task_result( + rabbitmq_rpc_client: RabbitMQRPCClient, + namespace: LRTNamespace, + *, + task_context: TaskContext, + task_id: TaskId, +) -> Any: + try: + serialized_result = await rabbitmq_rpc_client.request( + get_rabbit_namespace(namespace), + TypeAdapter(RPCMethodName).validate_python("get_task_result"), + task_context=task_context, + task_id=task_id, + timeout_s=_RPC_TIMEOUT_SHORT_REQUESTS, + ) + assert isinstance(serialized_result, str) # nosec + return loads(serialized_result) + except RPCTransferrableTaskError as e: + decoded_error = loads(f"{e}") + raise decoded_error from e + + +@log_decorator(_logger, level=logging.DEBUG) +async def remove_task( + rabbitmq_rpc_client: RabbitMQRPCClient, + namespace: LRTNamespace, + *, + task_context: TaskContext, + task_id: TaskId, +) -> None: + + result = await rabbitmq_rpc_client.request( + get_rabbit_namespace(namespace), + TypeAdapter(RPCMethodName).validate_python("remove_task"), + task_context=task_context, + task_id=task_id, + timeout_s=_RPC_TIMEOUT_SHORT_REQUESTS, + ) + assert result is None # nosec diff --git a/packages/service-library/src/servicelib/long_running_tasks/_rpc_server.py b/packages/service-library/src/servicelib/long_running_tasks/_rpc_server.py new file mode 100644 index 000000000000..2d7ff79ac087 --- /dev/null +++ b/packages/service-library/src/servicelib/long_running_tasks/_rpc_server.py @@ -0,0 +1,104 @@ +import logging +from contextlib import suppress +from typing import TYPE_CHECKING, Any + +from ..rabbitmq import RPCRouter +from .errors import BaseLongRunningError, RPCTransferrableTaskError, TaskNotFoundError +from .models import ( + RegisteredTaskName, + TaskBase, + TaskContext, + TaskId, + TaskStatus, +) + +_logger = logging.getLogger(__name__) + +if TYPE_CHECKING: + from .manager import LongRunningManager + + +router = RPCRouter() + + +@router.expose(reraise_if_error_type=(BaseLongRunningError,)) +async def start_task( + long_running_manager: "LongRunningManager", + *, + registered_task_name: RegisteredTaskName, + unique: bool = False, + task_context: TaskContext | None = None, + task_name: str | None = None, + fire_and_forget: bool = False, + **task_kwargs: Any, +) -> TaskId: + return await long_running_manager.tasks_manager.start_task( + registered_task_name, + unique=unique, + task_context=task_context, + task_name=task_name, + fire_and_forget=fire_and_forget, + **task_kwargs, + ) + + +@router.expose(reraise_if_error_type=(BaseLongRunningError,)) +async def list_tasks( + long_running_manager: "LongRunningManager", *, task_context: TaskContext +) -> list[TaskBase]: + return await long_running_manager.tasks_manager.list_tasks( + with_task_context=task_context + ) + + +@router.expose(reraise_if_error_type=(BaseLongRunningError,)) +async def get_task_status( + long_running_manager: "LongRunningManager", + *, + task_context: TaskContext, + task_id: TaskId, +) -> TaskStatus: + return await long_running_manager.tasks_manager.get_task_status( + task_id=task_id, with_task_context=task_context + ) + + +@router.expose(reraise_if_error_type=(BaseLongRunningError, RPCTransferrableTaskError)) +async def get_task_result( + long_running_manager: "LongRunningManager", + *, + task_context: TaskContext, + task_id: TaskId, +) -> str: + try: + result_field = await long_running_manager.tasks_manager.get_task_result( + task_id, with_task_context=task_context + ) + if result_field.str_error is not None: + raise RPCTransferrableTaskError(result_field.str_error) + + if result_field.str_result is not None: + return result_field.str_result + + msg = f"Please check {result_field=}, both fields should never be None" + raise ValueError(msg) + finally: + # Ensure the task is removed regardless of the result + with suppress(TaskNotFoundError): + await long_running_manager.tasks_manager.remove_task( + task_id, + with_task_context=task_context, + wait_for_removal=False, + ) + + +@router.expose(reraise_if_error_type=(BaseLongRunningError,)) +async def remove_task( + long_running_manager: "LongRunningManager", + *, + task_context: TaskContext, + task_id: TaskId, +) -> None: + await long_running_manager.tasks_manager.remove_task( + task_id, with_task_context=task_context, wait_for_removal=False + ) diff --git a/packages/service-library/src/servicelib/long_running_tasks/_serialization.py b/packages/service-library/src/servicelib/long_running_tasks/_serialization.py new file mode 100644 index 000000000000..16460f7ceeed --- /dev/null +++ b/packages/service-library/src/servicelib/long_running_tasks/_serialization.py @@ -0,0 +1,83 @@ +import base64 +import pickle +from abc import ABC, abstractmethod +from typing import Any, Final, Generic, TypeVar + +T = TypeVar("T") + + +class BaseObjectSerializer(ABC, Generic[T]): + + @classmethod + @abstractmethod + def get_init_kwargs_from_object(cls, obj: T) -> dict: + """dictionary reppreseting the kwargs passed to the __init__ method""" + + @classmethod + @abstractmethod + def prepare_object_init_kwargs(cls, data: dict) -> dict: + """cleanup data to be used as kwargs for the __init__ method if required""" + + +_SERIALIZERS: Final[dict[type, type[BaseObjectSerializer]]] = {} + + +def register_custom_serialization( + object_type: type, object_serializer: type[BaseObjectSerializer] +) -> None: + """Register a custom serializer for a specific object type. + + Arguments: + object_type -- the type or parent class of the object to be serialized + object_serializer -- custom implementation of BaseObjectSerializer for the object type + """ + _SERIALIZERS[object_type] = object_serializer + + +_TYPE_FIELD: Final[str] = "__pickle__type__field__" +_MODULE_FIELD: Final[str] = "__pickle__module__field__" + + +def dumps(obj: Any) -> str: + """Serialize object to base64-encoded string.""" + to_serialize: Any | dict = obj + object_class = type(obj) + + for registered_class, object_serializer in _SERIALIZERS.items(): + if issubclass(object_class, registered_class): + to_serialize = { + _TYPE_FIELD: type(obj).__name__, + _MODULE_FIELD: type(obj).__module__, + **object_serializer.get_init_kwargs_from_object(obj), + } + break + + return base64.b85encode(pickle.dumps(to_serialize)).decode("utf-8") + + +def loads(obj_str: str) -> Any: + """Deserialize object from base64-encoded string.""" + data = pickle.loads(base64.b85decode(obj_str)) # noqa: S301 + + if isinstance(data, dict) and _TYPE_FIELD in data and _MODULE_FIELD in data: + try: + # Import the module and get the exception class + module = __import__(data[_MODULE_FIELD], fromlist=[data[_TYPE_FIELD]]) + exception_class = getattr(module, data[_TYPE_FIELD]) + + for registered_class, object_serializer in _SERIALIZERS.items(): + if issubclass(exception_class, registered_class): + # remove unrequired + data.pop(_TYPE_FIELD) + data.pop(_MODULE_FIELD) + + raise exception_class( + **object_serializer.prepare_object_init_kwargs(data) + ) + except (ImportError, AttributeError, TypeError) as e: + msg = f"Could not reconstruct object from data: {data}" + raise ValueError(msg) from e + + if isinstance(data, Exception): + raise data + return data diff --git a/packages/service-library/src/servicelib/long_running_tasks/_task.py b/packages/service-library/src/servicelib/long_running_tasks/_task.py deleted file mode 100644 index b1b0bedfcc05..000000000000 --- a/packages/service-library/src/servicelib/long_running_tasks/_task.py +++ /dev/null @@ -1,423 +0,0 @@ -import asyncio -import inspect -import logging -import traceback -import urllib.parse -from collections import deque -from contextlib import suppress -from datetime import datetime -from typing import Any, Protocol -from uuid import uuid4 - -from models_library.api_schemas_long_running_tasks.base import ( - ProgressPercent, - TaskProgress, -) -from pydantic import PositiveFloat - -from ._errors import ( - TaskAlreadyRunningError, - TaskCancelledError, - TaskExceptionError, - TaskNotCompletedError, - TaskNotFoundError, -) -from ._models import TaskId, TaskName, TaskStatus, TrackedTask - -logger = logging.getLogger(__name__) - - -async def _await_task(task: asyncio.Task) -> None: - await task - - -def _mark_task_to_remove_if_required( - task_id: TaskId, - tasks_to_remove: list[TaskId], - tracked_task: TrackedTask, - utc_now: datetime, - stale_timeout_s: float, -) -> None: - if tracked_task.fire_and_forget: - return - - if tracked_task.last_status_check is None: - # the task just added or never received a poll request - elapsed_from_start = (utc_now - tracked_task.started).seconds - if elapsed_from_start > stale_timeout_s: - tasks_to_remove.append(task_id) - else: - # the task status was already queried by the client - elapsed_from_last_poll = (utc_now - tracked_task.last_status_check).seconds - if elapsed_from_last_poll > stale_timeout_s: - tasks_to_remove.append(task_id) - - -TrackedTaskGroupDict = dict[TaskId, TrackedTask] -TaskContext = dict[str, Any] - - -class TasksManager: - """ - Monitors execution and results retrieval of a collection of asyncio.Tasks - """ - - def __init__( - self, - stale_task_check_interval_s: PositiveFloat, - stale_task_detect_timeout_s: PositiveFloat, - ): - # Task groups: Every taskname maps to multiple asyncio.Task within TrackedTask model - self._tasks_groups: dict[TaskName, TrackedTaskGroupDict] = {} - - self._cancel_task_timeout_s: PositiveFloat = 1.0 - - self.stale_task_check_interval_s = stale_task_check_interval_s - self.stale_task_detect_timeout_s = stale_task_detect_timeout_s - self._stale_tasks_monitor_task: asyncio.Task = asyncio.create_task( - self._stale_tasks_monitor_worker(), - name=f"{__name__}.stale_task_monitor_worker", - ) - - def get_task_group(self, task_name: TaskName) -> TrackedTaskGroupDict: - return self._tasks_groups[task_name] - - async def _stale_tasks_monitor_worker(self) -> None: - """ - A task is considered stale, if the task status is not queried - in the last `stale_task_detect_timeout_s` and it is not a fire and forget type of task. - - This helps detect clients who: - - started tasks and did not remove them - - crashed without removing the task - - did not fetch the result - """ - # NOTE: - # When a task has finished with a result or error and its - # status is being polled it would appear that there is - # an issue with the client. - # Since we own the client, we assume (for now) this - # will not be the case. - - while await asyncio.sleep(self.stale_task_check_interval_s, result=True): - utc_now = datetime.utcnow() - - tasks_to_remove: list[TaskId] = [] - for tasks in self._tasks_groups.values(): - for task_id, tracked_task in tasks.items(): - _mark_task_to_remove_if_required( - task_id, - tasks_to_remove, - tracked_task, - utc_now, - self.stale_task_detect_timeout_s, - ) - - # finally remove tasks and warn - for task_id in tasks_to_remove: - # NOTE: task can be in the following cases: - # - still ongoing - # - finished with a result - # - finished with errors - # we just print the status from where one can infer the above - logger.warning( - "Removing stale task '%s' with status '%s'", - task_id, - self.get_task_status( - task_id, with_task_context=None - ).model_dump_json(), - ) - await self.remove_task( - task_id, with_task_context=None, reraise_errors=False - ) - - @staticmethod - def create_task_id(task_name: TaskName) -> str: - assert len(task_name) > 0 - return f"{task_name}.{uuid4()}" - - def is_task_running(self, task_name: TaskName) -> bool: - """returns True if a task named `task_name` is running""" - if task_name not in self._tasks_groups: - return False - - managed_tasks_ids = list(self._tasks_groups[task_name].keys()) - return len(managed_tasks_ids) > 0 - - def list_tasks(self, with_task_context: TaskContext | None) -> list[TrackedTask]: - tasks: list[TrackedTask] = [] - for task_group in self._tasks_groups.values(): - if not with_task_context: - tasks.extend(task_group.values()) - else: - tasks.extend( - [ - task - for task in task_group.values() - if task.task_context == with_task_context - ] - ) - return tasks - - def add_task( - self, - task_name: TaskName, - task: asyncio.Task, - task_progress: TaskProgress, - task_context: TaskContext, - task_id: TaskId, - *, - fire_and_forget: bool, - ) -> TrackedTask: - if task_name not in self._tasks_groups: - self._tasks_groups[task_name] = {} - - tracked_task = TrackedTask( - task_id=task_id, - task=task, - task_name=task_name, - task_progress=task_progress, - task_context=task_context, - fire_and_forget=fire_and_forget, - ) - self._tasks_groups[task_name][task_id] = tracked_task - - return tracked_task - - def _get_tracked_task( - self, task_id: TaskId, with_task_context: TaskContext | None - ) -> TrackedTask: - for tasks in self._tasks_groups.values(): - if task_id in tasks: - if with_task_context and ( - tasks[task_id].task_context != with_task_context - ): - raise TaskNotFoundError(task_id=task_id) - return tasks[task_id] - - raise TaskNotFoundError(task_id=task_id) - - def get_task_status( - self, task_id: TaskId, with_task_context: TaskContext | None - ) -> TaskStatus: - """ - returns: the status of the task, along with updates - form the progress - - raises TaskNotFoundError if the task cannot be found - """ - tracked_task: TrackedTask = self._get_tracked_task(task_id, with_task_context) - tracked_task.last_status_check = datetime.utcnow() - - task = tracked_task.task - done = task.done() - - return TaskStatus.model_validate( - { - "task_progress": tracked_task.task_progress, - "done": done, - "started": tracked_task.started, - } - ) - - def get_task_result( - self, task_id: TaskId, with_task_context: TaskContext | None - ) -> Any: - """ - returns: the result of the task - - raises TaskNotFoundError if the task cannot be found - raises TaskCancelledError if the task was cancelled - raises TaskNotCompletedError if the task is not completed - """ - tracked_task = self._get_tracked_task(task_id, with_task_context) - - try: - return tracked_task.task.result() - except asyncio.InvalidStateError as exc: - # the task is not ready - raise TaskNotCompletedError(task_id=task_id) from exc - except asyncio.CancelledError as exc: - # the task was cancelled - raise TaskCancelledError(task_id=task_id) from exc - - async def cancel_task( - self, task_id: TaskId, with_task_context: TaskContext | None - ) -> None: - """ - cancels the task - - raises TaskNotFoundError if the task cannot be found - """ - tracked_task = self._get_tracked_task(task_id, with_task_context) - await self._cancel_tracked_task(tracked_task.task, task_id, reraise_errors=True) - - async def _cancel_asyncio_task( - self, task: asyncio.Task, reference: str, *, reraise_errors: bool - ) -> None: - task.cancel() - with suppress(asyncio.CancelledError): - try: - try: - await asyncio.wait_for( - _await_task(task), timeout=self._cancel_task_timeout_s - ) - except asyncio.TimeoutError: - logger.warning( - "Timed out while awaiting for cancellation of '%s'", reference - ) - except Exception: # pylint:disable=broad-except - if reraise_errors: - raise - - async def _cancel_tracked_task( - self, task: asyncio.Task, task_id: TaskId, *, reraise_errors: bool - ) -> None: - try: - await self._cancel_asyncio_task( - task, task_id, reraise_errors=reraise_errors - ) - except Exception as e: # pylint:disable=broad-except - formatted_traceback = "".join(traceback.format_exception(e)) - raise TaskExceptionError( - task_id=task_id, exception=e, traceback=formatted_traceback - ) from e - - async def remove_task( - self, - task_id: TaskId, - with_task_context: TaskContext | None, - *, - reraise_errors: bool = True, - ) -> None: - """cancels and removes task""" - try: - tracked_task = self._get_tracked_task(task_id, with_task_context) - except TaskNotFoundError: - if reraise_errors: - raise - return - try: - await self._cancel_tracked_task( - tracked_task.task, task_id, reraise_errors=reraise_errors - ) - finally: - del self._tasks_groups[tracked_task.task_name][task_id] - - async def close(self) -> None: - """ - cancels all pending tasks and removes them before closing - """ - task_ids_to_remove: deque[TaskId] = deque() - - for tasks_dict in self._tasks_groups.values(): - for tracked_task in tasks_dict.values(): - task_ids_to_remove.append(tracked_task.task_id) - - for task_id in task_ids_to_remove: - # when closing we do not care about pending errors - await self.remove_task(task_id, None, reraise_errors=False) - - await self._cancel_asyncio_task( - self._stale_tasks_monitor_task, "stale_monitor", reraise_errors=False - ) - - -class TaskProtocol(Protocol): - async def __call__( - self, progress: TaskProgress, *args: Any, **kwargs: Any - ) -> Any: ... - - @property - def __name__(self) -> str: ... - - -def start_task( - tasks_manager: TasksManager, - task: TaskProtocol, - *, - unique: bool = False, - task_context: TaskContext | None = None, - task_name: str | None = None, - fire_and_forget: bool = False, - **task_kwargs: Any, -) -> TaskId: - """ - Creates a background task from an async function. - - An asyncio task will be created out of it by injecting a `TaskProgress` as the first - positional argument and adding all `handler_kwargs` as named parameters. - - NOTE: the progress is automatically bounded between 0 and 1 - NOTE: the `task` name must be unique in the module, otherwise when using - the unique parameter is True, it will not be able to distinguish between - them. - - Args: - tasks_manager (TasksManager): the tasks manager - task (TaskProtocol): the tasks to be run in the background - unique (bool, optional): If True, then only one such named task may be run. Defaults to False. - task_context (Optional[TaskContext], optional): a task context storage can be retrieved during the task lifetime. Defaults to None. - task_name (Optional[str], optional): optional task name. Defaults to None. - fire_and_forget: if True, then the task will not be cancelled if the status is never called - - Raises: - TaskAlreadyRunningError: if unique is True, will raise if more than 1 such named task is started - - Returns: - TaskId: the task unique identifier - """ - - # NOTE: If not task name is given, it will be composed of the handler's module and it's name - # to keep the urls shorter and more meaningful. - handler_module = inspect.getmodule(task) - handler_module_name = handler_module.__name__ if handler_module else "" - task_name = task_name or f"{handler_module_name}.{task.__name__}" - task_name = urllib.parse.quote(task_name, safe="") - - # only one unique task can be running - if unique and tasks_manager.is_task_running(task_name): - managed_tasks_ids = list(tasks_manager.get_task_group(task_name).keys()) - assert len(managed_tasks_ids) == 1 # nosec - managed_task: TrackedTask = tasks_manager.get_task_group(task_name)[ - managed_tasks_ids[0] - ] - raise TaskAlreadyRunningError(task_name=task_name, managed_task=managed_task) - - task_id = tasks_manager.create_task_id(task_name=task_name) - task_progress = TaskProgress.create(task_id=task_id) - - # bind the task with progress 0 and 1 - async def _progress_task(progress: TaskProgress, handler: TaskProtocol): - progress.update(message="starting", percent=ProgressPercent(0)) - try: - return await handler(progress, **task_kwargs) - finally: - progress.update(message="finished", percent=ProgressPercent(1)) - - async_task = asyncio.create_task( - _progress_task(task_progress, task), name=f"{task_name}" - ) - - tracked_task = tasks_manager.add_task( - task_name=task_name, - task=async_task, - task_progress=task_progress, - task_context=task_context or {}, - fire_and_forget=fire_and_forget, - task_id=task_id, - ) - - return tracked_task.task_id - - -__all__: tuple[str, ...] = ( - "TaskAlreadyRunningError", - "TaskCancelledError", - "TaskId", - "TasksManager", - "TaskProgress", - "TaskProtocol", - "TaskStatus", - "TrackedTask", -) diff --git a/packages/service-library/src/servicelib/long_running_tasks/constants.py b/packages/service-library/src/servicelib/long_running_tasks/constants.py new file mode 100644 index 000000000000..b5e729665ccd --- /dev/null +++ b/packages/service-library/src/servicelib/long_running_tasks/constants.py @@ -0,0 +1,7 @@ +from datetime import timedelta +from typing import Final + +DEFAULT_POLL_INTERVAL_S: Final[float] = 1 + +DEFAULT_STALE_TASK_CHECK_INTERVAL: Final[timedelta] = timedelta(minutes=1) +DEFAULT_STALE_TASK_DETECT_TIMEOUT: Final[timedelta] = timedelta(minutes=5) diff --git a/packages/service-library/src/servicelib/long_running_tasks/_errors.py b/packages/service-library/src/servicelib/long_running_tasks/errors.py similarity index 60% rename from packages/service-library/src/servicelib/long_running_tasks/_errors.py rename to packages/service-library/src/servicelib/long_running_tasks/errors.py index 33439c6436f3..127a7ae1d5a3 100644 --- a/packages/service-library/src/servicelib/long_running_tasks/_errors.py +++ b/packages/service-library/src/servicelib/long_running_tasks/errors.py @@ -5,6 +5,13 @@ class BaseLongRunningError(OsparcErrorMixin, Exception): """base exception for this module""" +class TaskNotRegisteredError(BaseLongRunningError): + msg_template: str = ( + "no task with task_name='{task_name}' was found in the task registry tasks={tasks}. " + "Make sure it's registered before starting it." + ) + + class TaskAlreadyRunningError(BaseLongRunningError): msg_template: str = "{task_name} must be unique, found: '{managed_task}'" @@ -27,6 +34,14 @@ class TaskExceptionError(BaseLongRunningError): ) +class TaskRaisedUnserializableError(BaseLongRunningError): + msg_template: str = ( + "Task {task_id} raised an exception that could not be serialized.\n" + "Original exception: '{original_exception_str}'\n" + "As a consequence, the following error was raised: '{exception}'" + ) + + class TaskClientTimeoutError(BaseLongRunningError): msg_template: str = ( "Timed out after {timeout} seconds while awaiting '{task_id}' to complete" @@ -37,3 +52,10 @@ class GenericClientError(BaseLongRunningError): msg_template: str = ( "Unexpected error while '{action}' for '{task_id}': status={status} body={body}" ) + + +class RPCTransferrableTaskError(Exception): + """ + The message contains the task's exception serialized as string. + Decode it and raise to obtain the task's original exception. + """ diff --git a/packages/service-library/src/servicelib/long_running_tasks/long_running_client_helper.py b/packages/service-library/src/servicelib/long_running_tasks/long_running_client_helper.py new file mode 100644 index 000000000000..b51acaf1b862 --- /dev/null +++ b/packages/service-library/src/servicelib/long_running_tasks/long_running_client_helper.py @@ -0,0 +1,46 @@ +import logging + +import redis.asyncio as aioredis +from settings_library.redis import RedisDatabase, RedisSettings + +from ..logging_utils import log_context +from ..redis._client import RedisClientSDK +from .models import LRTNamespace + +_logger = logging.getLogger(__name__) + + +class LongRunningClientHelper: + def __init__(self, redis_settings: RedisSettings): + self.redis_settings = redis_settings + + self._client: RedisClientSDK | None = None + + async def setup(self) -> None: + self._client = RedisClientSDK( + self.redis_settings.build_redis_dsn(RedisDatabase.LONG_RUNNING_TASKS), + client_name="long_running_tasks_cleanup_client", + ) + await self._client.setup() + + async def shutdown(self) -> None: + if self._client: + await self._client.shutdown() + + @property + def _redis(self) -> aioredis.Redis: + assert self._client # nosec + return self._client.redis + + async def cleanup(self, lrt_namespace: LRTNamespace) -> None: + """removes Redis keys associated to the LRTNamespace if they exist""" + keys_to_remove: list[str] = [ + x async for x in self._redis.scan_iter(f"{lrt_namespace}*") + ] + with log_context( + _logger, + logging.DEBUG, + msg=f"Removing {keys_to_remove=} from Redis for {lrt_namespace=}", + ): + if len(keys_to_remove) > 0: + await self._redis.delete(*keys_to_remove) diff --git a/packages/service-library/src/servicelib/long_running_tasks/lrt_api.py b/packages/service-library/src/servicelib/long_running_tasks/lrt_api.py new file mode 100644 index 000000000000..02f4c5265f38 --- /dev/null +++ b/packages/service-library/src/servicelib/long_running_tasks/lrt_api.py @@ -0,0 +1,115 @@ +from typing import Any + +from ..rabbitmq._client_rpc import RabbitMQRPCClient +from . import _rpc_client +from .models import ( + LRTNamespace, + RegisteredTaskName, + TaskBase, + TaskContext, + TaskId, + TaskStatus, +) + + +async def start_task( + rabbitmq_rpc_client: RabbitMQRPCClient, + lrt_namespace: LRTNamespace, + registered_task_name: RegisteredTaskName, + *, + unique: bool = False, + task_context: TaskContext | None = None, + task_name: str | None = None, + fire_and_forget: bool = False, + **task_kwargs: Any, +) -> TaskId: + """ + Creates a background task from an async function. + + An asyncio task will be created out of it by injecting a `TaskProgress` as the first + positional argument and adding all `handler_kwargs` as named parameters. + + NOTE: the progress is automatically bounded between 0 and 1 + NOTE: the `task` name must be unique in the module, otherwise when using + the unique parameter is True, it will not be able to distinguish between + them. + + Args: + tasks_manager (TasksManager): the tasks manager + task (TaskProtocol): the tasks to be run in the background + unique (bool, optional): If True, then only one such named task may be run. Defaults to False. + task_context (Optional[TaskContext], optional): a task context storage can be retrieved during the task lifetime. Defaults to None. + task_name (Optional[str], optional): optional task name. Defaults to None. + fire_and_forget: if True, then the task will not be cancelled if the status is never called + + Raises: + TaskAlreadyRunningError: if unique is True, will raise if more than 1 such named task is started + + Returns: + TaskId: the task unique identifier + """ + + return await _rpc_client.start_task( + rabbitmq_rpc_client, + lrt_namespace, + registered_task_name=registered_task_name, + unique=unique, + task_context=task_context, + task_name=task_name, + fire_and_forget=fire_and_forget, + **task_kwargs, + ) + + +async def list_tasks( + rabbitmq_rpc_client: RabbitMQRPCClient, + lrt_namespace: LRTNamespace, + task_context: TaskContext, +) -> list[TaskBase]: + return await _rpc_client.list_tasks( + rabbitmq_rpc_client, lrt_namespace, task_context=task_context + ) + + +async def get_task_status( + rabbitmq_rpc_client: RabbitMQRPCClient, + lrt_namespace: LRTNamespace, + task_context: TaskContext, + task_id: TaskId, +) -> TaskStatus: + """returns the status of a task""" + return await _rpc_client.get_task_status( + rabbitmq_rpc_client, lrt_namespace, task_id=task_id, task_context=task_context + ) + + +async def get_task_result( + rabbitmq_rpc_client: RabbitMQRPCClient, + lrt_namespace: LRTNamespace, + task_context: TaskContext, + task_id: TaskId, +) -> Any: + return await _rpc_client.get_task_result( + rabbitmq_rpc_client, + lrt_namespace, + task_context=task_context, + task_id=task_id, + ) + + +async def remove_task( + rabbitmq_rpc_client: RabbitMQRPCClient, + lrt_namespace: LRTNamespace, + task_context: TaskContext, + task_id: TaskId, +) -> None: + """cancels and removes a task + + When `wait_for_removal` is True, `cancellationt_timeout` is set to _RPC_TIMEOUT_SHORT_REQUESTS + """ + await _rpc_client.remove_task( + rabbitmq_rpc_client, + lrt_namespace, + task_id=task_id, + task_context=task_context, + ) diff --git a/packages/service-library/src/servicelib/long_running_tasks/manager.py b/packages/service-library/src/servicelib/long_running_tasks/manager.py new file mode 100644 index 000000000000..6e6ebe6a820f --- /dev/null +++ b/packages/service-library/src/servicelib/long_running_tasks/manager.py @@ -0,0 +1,87 @@ +import datetime +from abc import ABC, abstractmethod + +from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings + +from ..rabbitmq._client_rpc import RabbitMQRPCClient +from ._rabbit_namespace import get_rabbit_namespace +from ._rpc_server import router +from .models import LRTNamespace, TaskContext +from .task import TasksManager + + +class LongRunningManager(ABC): + """ + Provides a commond inteface for aiohttp and fastapi services + """ + + def __init__( + self, + stale_task_check_interval: datetime.timedelta, + stale_task_detect_timeout: datetime.timedelta, + redis_settings: RedisSettings, + rabbit_settings: RabbitSettings, + lrt_namespace: LRTNamespace, + ): + self._tasks_manager = TasksManager( + stale_task_check_interval=stale_task_check_interval, + stale_task_detect_timeout=stale_task_detect_timeout, + redis_settings=redis_settings, + lrt_namespace=lrt_namespace, + ) + self._lrt_namespace = lrt_namespace + self.rabbit_settings = rabbit_settings + self._rpc_server: RabbitMQRPCClient | None = None + self._rpc_client: RabbitMQRPCClient | None = None + + @property + def tasks_manager(self) -> TasksManager: + return self._tasks_manager + + @property + def rpc_server(self) -> RabbitMQRPCClient: + assert self._rpc_server is not None # nosec + return self._rpc_server + + @property + def rpc_client(self) -> RabbitMQRPCClient: + assert self._rpc_client is not None # nosec + return self._rpc_client + + @property + def lrt_namespace(self) -> LRTNamespace: + return self._lrt_namespace + + async def setup(self) -> None: + await self._tasks_manager.setup() + self._rpc_server = await RabbitMQRPCClient.create( + client_name=f"lrt-server-{self.lrt_namespace}", + settings=self.rabbit_settings, + ) + self._rpc_client = await RabbitMQRPCClient.create( + client_name=f"lrt-client-{self.lrt_namespace}", + settings=self.rabbit_settings, + ) + + await self.rpc_server.register_router( + router, + get_rabbit_namespace(self.lrt_namespace), + self, + ) + + async def teardown(self) -> None: + await self._tasks_manager.teardown() + + if self._rpc_server is not None: + await self._rpc_server.close() + self._rpc_server = None + + if self._rpc_client is not None: + await self._rpc_client.close() + self._rpc_client = None + + @staticmethod + @abstractmethod + def get_task_context(request) -> TaskContext: + """return the task context based on the current request""" diff --git a/packages/service-library/src/servicelib/long_running_tasks/models.py b/packages/service-library/src/servicelib/long_running_tasks/models.py new file mode 100644 index 000000000000..193c5eadbde3 --- /dev/null +++ b/packages/service-library/src/servicelib/long_running_tasks/models.py @@ -0,0 +1,137 @@ +# mypy: disable-error-code=truthy-function +from collections.abc import Awaitable, Callable, Coroutine +from dataclasses import dataclass +from datetime import UTC, datetime +from typing import Annotated, Any, TypeAlias + +from common_library.basic_types import DEFAULT_FACTORY +from models_library.api_schemas_long_running_tasks.base import ( + ProgressMessage, + ProgressPercent, + TaskId, + TaskProgress, +) +from models_library.api_schemas_long_running_tasks.tasks import ( + TaskBase, + TaskGet, + TaskResult, + TaskStatus, +) +from pydantic import BaseModel, ConfigDict, Field, PositiveFloat, model_validator + +TaskType: TypeAlias = Callable[..., Coroutine[Any, Any, Any]] + +ProgressCallback: TypeAlias = Callable[ + [ProgressMessage, ProgressPercent | None, TaskId], Awaitable[None] +] + +RequestBody: TypeAlias = Any +TaskContext: TypeAlias = dict[str, Any] + +LRTNamespace: TypeAlias = str + +RegisteredTaskName: TypeAlias = str + + +class ResultField(BaseModel): + str_result: str | None = None + str_error: str | None = None + + @model_validator(mode="after") + def validate_mutually_exclusive(self) -> "ResultField": + if self.str_result is not None and self.str_error is not None: + msg = "Cannot set both 'result' and 'error' - they are mutually exclusive" + raise ValueError(msg) + return self + + +class TaskData(BaseModel): + registered_task_name: RegisteredTaskName + task_id: str + task_progress: TaskProgress + # NOTE: this context lifetime is with the tracked task (similar to aiohttp storage concept) + task_context: TaskContext + fire_and_forget: Annotated[ + bool, + Field( + description="if True then the task will not be auto-cancelled if no one enquires of its status" + ), + ] + + started: Annotated[datetime, Field(default_factory=lambda: datetime.now(UTC))] = ( + DEFAULT_FACTORY + ) + last_status_check: Annotated[ + datetime | None, + Field( + description=( + "used to detect when if the task is not actively " + "polled by the client who created it" + ) + ), + ] = None + + is_done: Annotated[ + bool, + Field(description="True when the task finished running with or without errors"), + ] = False + result_field: Annotated[ + ResultField | None, Field(description="the result of the task") + ] = None + marked_for_removal: Annotated[ + bool, + Field(description=("if True, indicates the task is marked for removal")), + ] = False + + model_config = ConfigDict( + arbitrary_types_allowed=True, + json_schema_extra={ + "examples": [ + { + "registered_task_name": "a-task-name", + "task_id": "1a119618-7186-4bc1-b8de-7e3ff314cb7e", + "task_name": "running-task", + "task_status": "running", + "task_progress": { + "task_id": "1a119618-7186-4bc1-b8de-7e3ff314cb7e" + }, + "task_context": {"key": "value"}, + "fire_and_forget": False, + } + ] + }, + ) + + +class ClientConfiguration(BaseModel): + router_prefix: str + default_timeout: PositiveFloat + + +@dataclass(frozen=True) +class LRTask: + progress: TaskProgress + _result: Coroutine[Any, Any, Any] | None = None + + def done(self) -> bool: + return self._result is not None + + async def result(self) -> Any: + if not self._result: + msg = "No result ready!" + raise ValueError(msg) + return await self._result + + +__all__: tuple[str, ...] = ( + "ProgressMessage", + "ProgressPercent", + "TaskBase", + "TaskGet", + "TaskId", + "TaskProgress", + "TaskResult", + "TaskStatus", +) + +# nopycln: file diff --git a/packages/service-library/src/servicelib/long_running_tasks/task.py b/packages/service-library/src/servicelib/long_running_tasks/task.py new file mode 100644 index 000000000000..adb76dc0699e --- /dev/null +++ b/packages/service-library/src/servicelib/long_running_tasks/task.py @@ -0,0 +1,628 @@ +import asyncio +import datetime +import functools +import inspect +import logging +import urllib.parse +from contextlib import suppress +from typing import Any, ClassVar, Final, Protocol, TypeAlias +from uuid import uuid4 + +from common_library.async_tools import cancel_wait_task +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs +from models_library.api_schemas_long_running_tasks.base import TaskProgress +from pydantic import NonNegativeFloat, PositiveFloat +from settings_library.redis import RedisDatabase, RedisSettings +from tenacity import ( + AsyncRetrying, + retry_unless_exception_type, + stop_after_delay, + wait_exponential, +) + +from ..background_task import create_periodic_task +from ..logging_utils import log_catch, log_context +from ..redis import RedisClientSDK, exclusive +from ..utils import limited_gather +from ._redis_store import RedisStore +from ._serialization import dumps +from .errors import ( + TaskAlreadyRunningError, + TaskCancelledError, + TaskNotCompletedError, + TaskNotFoundError, + TaskNotRegisteredError, + TaskRaisedUnserializableError, +) +from .models import ( + LRTNamespace, + RegisteredTaskName, + ResultField, + TaskBase, + TaskContext, + TaskData, + TaskId, + TaskStatus, +) + +_logger = logging.getLogger(__name__) + + +_CANCEL_TASKS_CHECK_INTERVAL: Final[datetime.timedelta] = datetime.timedelta(seconds=5) +_STATUS_UPDATE_CHECK_INTERNAL: Final[datetime.timedelta] = datetime.timedelta(seconds=1) +_MAX_EXCLUSIVE_TASK_CANCEL_TIMEOUT: Final[NonNegativeFloat] = 5 +_TASK_REMOVAL_MAX_WAIT: Final[NonNegativeFloat] = 60 +_PARALLEL_TASKS_CANCELLATION: Final[int] = 5 + +AllowedErrrors: TypeAlias = tuple[type[BaseException], ...] + + +class TaskProtocol(Protocol): + async def __call__( + self, progress: TaskProgress, *args: Any, **kwargs: Any + ) -> Any: ... + + @property + def __name__(self) -> str: ... + + +class TaskRegistry: + _REGISTERED_TASKS: ClassVar[ + dict[RegisteredTaskName, tuple[AllowedErrrors, TaskProtocol]] + ] = {} + + @classmethod + def register( + cls, + task: TaskProtocol, + allowed_errors: AllowedErrrors = (), + **partial_kwargs, + ) -> None: + partial_task = functools.partial(task, **partial_kwargs) + # allows to call the partial via it's original name + partial_task.__name__ = task.__name__ # type: ignore[attr-defined] + cls._REGISTERED_TASKS[task.__name__] = [allowed_errors, partial_task] # type: ignore[assignment] + + @classmethod + def get_registered_tasks( + cls, + ) -> dict[RegisteredTaskName, tuple[AllowedErrrors, TaskProtocol]]: + return cls._REGISTERED_TASKS + + @classmethod + def get_task(cls, task_name: RegisteredTaskName) -> TaskProtocol: + return cls._REGISTERED_TASKS[task_name][1] + + @classmethod + def get_allowed_errors(cls, task_name: RegisteredTaskName) -> AllowedErrrors: + return cls._REGISTERED_TASKS[task_name][0] + + @classmethod + def unregister(cls, task: TaskProtocol) -> None: + if task.__name__ in cls._REGISTERED_TASKS: + del cls._REGISTERED_TASKS[task.__name__] + + +async def _get_tasks_to_remove( + tracked_tasks: RedisStore, + stale_task_detect_timeout_s: PositiveFloat, +) -> list[tuple[TaskId, TaskContext]]: + utc_now = datetime.datetime.now(tz=datetime.UTC) + + tasks_to_remove: list[tuple[TaskId, TaskContext]] = [] + + for tracked_task in await tracked_tasks.list_tasks_data(): + if tracked_task.fire_and_forget: + continue + + if tracked_task.last_status_check is None: + # the task just added or never received a poll request + elapsed_from_start = (utc_now - tracked_task.started).total_seconds() + if elapsed_from_start > stale_task_detect_timeout_s: + tasks_to_remove.append( + (tracked_task.task_id, tracked_task.task_context) + ) + else: + # the task status was already queried by the client + elapsed_from_last_poll = ( + utc_now - tracked_task.last_status_check + ).total_seconds() + if elapsed_from_last_poll > stale_task_detect_timeout_s: + tasks_to_remove.append( + (tracked_task.task_id, tracked_task.task_context) + ) + return tasks_to_remove + + +class TasksManager: # pylint:disable=too-many-instance-attributes + """ + Monitors execution and results retrieval of a collection of asyncio.Tasks + """ + + def __init__( + self, + redis_settings: RedisSettings, + stale_task_check_interval: datetime.timedelta, + stale_task_detect_timeout: datetime.timedelta, + lrt_namespace: LRTNamespace, + ): + # Task groups: Every taskname maps to multiple asyncio.Task within TrackedTask model + self._tasks_data = RedisStore(redis_settings, lrt_namespace) + self._created_tasks: dict[TaskId, asyncio.Task] = {} + + self.stale_task_check_interval = stale_task_check_interval + self.stale_task_detect_timeout_s: PositiveFloat = ( + stale_task_detect_timeout.total_seconds() + ) + self.lrt_namespace = lrt_namespace + self.redis_settings = redis_settings + + self.locks_redis_client_sdk: RedisClientSDK | None = None + + # stale_tasks_monitor + self._task_stale_tasks_monitor: asyncio.Task | None = None + self._started_event_task_stale_tasks_monitor = asyncio.Event() + + # cancelled_tasks_removal + self._task_cancelled_tasks_removal: asyncio.Task | None = None + self._started_event_task_cancelled_tasks_removal = asyncio.Event() + + # tasks_monitor + self._task_tasks_monitor: asyncio.Task | None = None + self._started_event_task_tasks_monitor = asyncio.Event() + + async def setup(self) -> None: + await self._tasks_data.setup() + + self.locks_redis_client_sdk = RedisClientSDK( + self.redis_settings.build_redis_dsn(RedisDatabase.LOCKS), + client_name=f"{__name__}_{self.lrt_namespace}_lock", + ) + await self.locks_redis_client_sdk.setup() + + # stale_tasks_monitor + self._task_stale_tasks_monitor = create_periodic_task( + task=exclusive( + self.locks_redis_client_sdk, + lock_key=f"{__name__}_{self.lrt_namespace}_stale_tasks_monitor", + )(self._stale_tasks_monitor), + interval=self.stale_task_check_interval, + task_name=f"{__name__}.{self._stale_tasks_monitor.__name__}", + ) + await self._started_event_task_stale_tasks_monitor.wait() + + # cancelled_tasks_removal + self._task_cancelled_tasks_removal = create_periodic_task( + task=self._cancelled_tasks_removal, + interval=_CANCEL_TASKS_CHECK_INTERVAL, + task_name=f"{__name__}.{self._cancelled_tasks_removal.__name__}", + ) + await self._started_event_task_cancelled_tasks_removal.wait() + + # tasks_monitor + self._task_tasks_monitor = create_periodic_task( + task=self._tasks_monitor, + interval=_STATUS_UPDATE_CHECK_INTERNAL, + task_name=f"{__name__}.{self._tasks_monitor.__name__}", + ) + await self._started_event_task_tasks_monitor.wait() + + async def teardown(self) -> None: + # stop cancelled_tasks_removal + if self._task_cancelled_tasks_removal: + await cancel_wait_task(self._task_cancelled_tasks_removal) + + # stopping only tasks that are handled by this manager + # otherwise it will cancel long running tasks that were running in diffierent processes + async def _remove_local_task(task_data: TaskData) -> None: + with log_catch(_logger, reraise=False): + await self.remove_task( + task_data.task_id, + task_data.task_context, + wait_for_removal=False, + ) + await self._attempt_to_remove_local_task(task_data.task_id) + + await limited_gather( + *[ + _remove_local_task(tracked_task) + for task_id in self._created_tasks + if (tracked_task := await self._tasks_data.get_task_data(task_id)) + is not None + ], + log=_logger, + limit=_PARALLEL_TASKS_CANCELLATION, + ) + + # stop stale_tasks_monitor + if self._task_stale_tasks_monitor: + await cancel_wait_task( + self._task_stale_tasks_monitor, + max_delay=_MAX_EXCLUSIVE_TASK_CANCEL_TIMEOUT, + ) + + # stop tasks_monitor + if self._task_tasks_monitor: + await cancel_wait_task(self._task_tasks_monitor) + + if self.locks_redis_client_sdk is not None: + await self.locks_redis_client_sdk.shutdown() + + await self._tasks_data.shutdown() + + async def _stale_tasks_monitor(self) -> None: + """ + A task is considered stale, if the task status is not queried + in the last `stale_task_detect_timeout_s` and it is not a fire and forget type of task. + + This helps detect clients who: + - started tasks and did not remove them + - crashed without removing the task + - did not fetch the result + """ + # NOTE: + # When a task has finished with a result or error and its + # status is being polled it would appear that there is + # an issue with the client. + # Since we own the client, we assume (for now) this + # will not be the case. + + self._started_event_task_stale_tasks_monitor.set() + + tasks_to_remove = await _get_tasks_to_remove( + self._tasks_data, self.stale_task_detect_timeout_s + ) + + # finally remove tasks and warn + for task_id, task_context in tasks_to_remove: + # NOTE: task can be in the following cases: + # - still ongoing + # - finished with a result + # - finished with errors + # we just print the status from where one can infer the above + with suppress(TaskNotFoundError): + task_status = await self.get_task_status( + task_id, with_task_context=task_context, exclude_to_remove=False + ) + with log_context( + _logger, + logging.WARNING, + f"Removing stale task '{task_id}' with status '{task_status.model_dump_json()}'", + ): + await self.remove_task( + task_id, with_task_context=task_context, wait_for_removal=True + ) + + async def _cancelled_tasks_removal(self) -> None: + """ + Periodically checks which tasks are marked for removal and attempts to remove the + task if it's handled by this process. + """ + self._started_event_task_cancelled_tasks_removal.set() + + tasks_data = await self._tasks_data.list_tasks_data() + await limited_gather( + *( + self._attempt_to_remove_local_task(x.task_id) + for x in tasks_data + if x.marked_for_removal is True + ), + limit=_PARALLEL_TASKS_CANCELLATION, + ) + + async def _tasks_monitor(self) -> None: # noqa: C901 + """ + A task which monitors locally running tasks and updates their status + in the Redis store when they are done. + """ + self._started_event_task_tasks_monitor.set() + task_id: TaskId + for task_id in set(self._created_tasks.keys()): + if task := self._created_tasks.get(task_id, None): + is_done = task.done() + if not is_done: + # task is still running, do not update + continue + + # write to redis only when done + task_data = await self._tasks_data.get_task_data(task_id) + if task_data is None or task_data.is_done: + # already done and updatet data in redis + continue + + result_field: ResultField | None = None + # get task result + try: + result_field = ResultField(str_result=dumps(task.result())) + except asyncio.InvalidStateError: + # task was not completed try again next time and see if it is done + continue + except asyncio.CancelledError: + result_field = ResultField( + str_error=dumps(TaskCancelledError(task_id=task_id)) + ) + # NOTE: if the task is itself cancelled it shall re-raise: see https://superfastpython.com/asyncio-cancellederror-consumed/ + current_task = asyncio.current_task() + assert current_task is not None # nosec + if current_task.cancelling() > 0: + # owner function is being cancelled -> propagate cancellation + raise + except Exception as e: # pylint:disable=broad-except + allowed_errors = TaskRegistry.get_allowed_errors( + task_data.registered_task_name + ) + if type(e) not in allowed_errors: + _logger.exception( + **create_troubleshooting_log_kwargs( + ( + f"Execution of {task_id=} finished with unexpected error, " + f"only the following {allowed_errors=} are permitted" + ), + error=e, + error_context={ + "task_id": task_id, + "task_data": task_data, + "namespace": self.lrt_namespace, + }, + ), + ) + try: + result_field = ResultField(str_error=dumps(e)) + except ( + Exception # pylint:disable=broad-except + ) as serialization_error: + _logger.exception( + **create_troubleshooting_log_kwargs( + ( + f"Execution of {task_id=} finished with an error " + f"which could not be serialized" + ), + error=serialization_error, + tip="Check the error above for more details", + ), + ) + result_field = ResultField( + str_error=dumps( + TaskRaisedUnserializableError( + task_id=task_id, + exception=serialization_error, + original_exception_str=f"{e}", + ) + ) + ) + + # update and store in Redis + updates = {"is_done": is_done, "result_field": task_data.result_field} + if result_field is not None: + updates["result_field"] = result_field + await self._tasks_data.update_task_data(task_id, updates=updates) + + async def list_tasks(self, with_task_context: TaskContext | None) -> list[TaskBase]: + if not with_task_context: + return [ + TaskBase(task_id=task.task_id) + for task in (await self._tasks_data.list_tasks_data()) + if task.marked_for_removal is False + ] + + return [ + TaskBase(task_id=task.task_id) + for task in (await self._tasks_data.list_tasks_data()) + if task.task_context == with_task_context + and task.marked_for_removal is False + ] + + async def _get_tracked_task( + self, task_id: TaskId, with_task_context: TaskContext + ) -> TaskData: + task_data = await self._tasks_data.get_task_data(task_id) + + if task_data is None: + raise TaskNotFoundError(task_id=task_id) + + if with_task_context and task_data.task_context != with_task_context: + raise TaskNotFoundError(task_id=task_id) + + return task_data + + async def get_task_status( + self, + task_id: TaskId, + with_task_context: TaskContext, + *, + exclude_to_remove: bool = True, + ) -> TaskStatus: + """ + returns: the status of the task, along with updates + form the progress + + raises TaskNotFoundError if the task cannot be found + """ + if exclude_to_remove and await self._tasks_data.is_marked_for_removal(task_id): + raise TaskNotFoundError(task_id=task_id) + + task_data = await self._get_tracked_task(task_id, with_task_context) + + await self._tasks_data.update_task_data( + task_id, + updates={"last_status_check": datetime.datetime.now(tz=datetime.UTC)}, + ) + return TaskStatus.model_validate( + { + "task_progress": task_data.task_progress, + "done": task_data.is_done, + "started": task_data.started, + } + ) + + async def get_allowed_errors( + self, task_id: TaskId, with_task_context: TaskContext + ) -> AllowedErrrors: + """ + returns: the allowed errors for the task + + raises TaskNotFoundError if the task cannot be found + """ + task_data = await self._get_tracked_task(task_id, with_task_context) + return TaskRegistry.get_allowed_errors(task_data.registered_task_name) + + async def get_task_result( + self, task_id: TaskId, with_task_context: TaskContext + ) -> ResultField: + """ + returns: the result of the task wrapped in ResultField + + raises TaskNotFoundError if the task cannot be found + raises TaskNotCompletedError if the task is not completed + """ + if await self._tasks_data.is_marked_for_removal(task_id): + raise TaskNotFoundError(task_id=task_id) + + tracked_task = await self._get_tracked_task(task_id, with_task_context) + + if not tracked_task.is_done or tracked_task.result_field is None: + raise TaskNotCompletedError(task_id=task_id) + + return tracked_task.result_field + + async def _attempt_to_remove_local_task(self, task_id: TaskId) -> None: + """if task is running in the local process, try to remove it""" + + task_to_cancel = self._created_tasks.pop(task_id, None) + if task_to_cancel is not None: + await cancel_wait_task(task_to_cancel) + await self._tasks_data.delete_task_data(task_id) + + async def remove_task( + self, + task_id: TaskId, + with_task_context: TaskContext, + *, + wait_for_removal: bool, + ) -> None: + """ + cancels and removes task + raises TaskNotFoundError if the task cannot be found + """ + if await self._tasks_data.is_marked_for_removal(task_id): + raise TaskNotFoundError(task_id=task_id) + + tracked_task = await self._get_tracked_task(task_id, with_task_context) + + await self._tasks_data.mark_for_removal(tracked_task.task_id) + + if not wait_for_removal: + return + + # wait for task to be removed since it might not have been running + # in this process + with suppress(TaskNotFoundError): + async for attempt in AsyncRetrying( + wait=wait_exponential(max=1), + stop=stop_after_delay(_TASK_REMOVAL_MAX_WAIT), + retry=retry_unless_exception_type(TaskNotFoundError), + ): + with attempt: + await self._get_tracked_task( + tracked_task.task_id, tracked_task.task_context + ) + + def _get_task_id(self, task_name: str, *, is_unique: bool) -> TaskId: + suffix = "unique" if is_unique else f"{uuid4()}" + return f"{self.lrt_namespace}.{task_name}.{suffix}" + + async def _update_progress( + self, + task_id: TaskId, + task_context: TaskContext, + task_progress: TaskProgress, + ) -> None: + # NOTE: avoids errors while updating progress, since the task could have been + # cancelled and it's data removed + try: + tracked_data = await self._get_tracked_task(task_id, task_context) + tracked_data.task_progress = task_progress + await self._tasks_data.update_task_data( + task_id, updates={"task_progress": task_progress.model_dump()} + ) + except TaskNotFoundError: + _logger.debug( + "Task '%s' not found while updating progress %s", + task_id, + task_progress, + ) + + async def start_task( + self, + registered_task_name: RegisteredTaskName, + *, + unique: bool, + task_context: TaskContext | None, + task_name: str | None, + fire_and_forget: bool, + **task_kwargs: Any, + ) -> TaskId: + registered_tasks = TaskRegistry.get_registered_tasks() + if registered_task_name not in registered_tasks: + raise TaskNotRegisteredError( + task_name=registered_task_name, tasks=registered_tasks + ) + + task = TaskRegistry.get_task(registered_task_name) + + # NOTE: If not task name is given, it will be composed of the handler's module and it's name + # to keep the urls shorter and more meaningful. + handler_module = inspect.getmodule(task) + handler_module_name = handler_module.__name__ if handler_module else "" + task_name = task_name or f"{handler_module_name}.{task.__name__}" + task_name = urllib.parse.quote(task_name, safe="") + + task_id = self._get_task_id(task_name, is_unique=unique) + + # only one unique task can be running + queried_task = await self._tasks_data.get_task_data(task_id) + if unique and queried_task is not None: + raise TaskAlreadyRunningError( + task_name=task_name, managed_task=queried_task + ) + + context_to_use = task_context or {} + task_progress = TaskProgress.create(task_id=task_id) + # set update callback + task_progress.set_update_callback( + functools.partial(self._update_progress, task_id, context_to_use) + ) + + async def _task_with_progress(progress: TaskProgress, handler: TaskProtocol): + # bind the task with progress 0 and 1 + await progress.update(message="starting", percent=0) + try: + return await handler(progress, **task_kwargs) + finally: + await progress.update(message="finished", percent=1) + + self._created_tasks[task_id] = asyncio.create_task( + _task_with_progress(task_progress, task), name=task_name + ) + + tracked_task = TaskData( + registered_task_name=registered_task_name, + task_id=task_id, + task_progress=task_progress, + task_context=context_to_use, + fire_and_forget=fire_and_forget, + ) + await self._tasks_data.add_task_data(task_id, tracked_task) + return tracked_task.task_id + + +__all__: tuple[str, ...] = ( + "TaskAlreadyRunningError", + "TaskCancelledError", + "TaskData", + "TaskId", + "TaskProgress", + "TaskProtocol", + "TaskStatus", + "TasksManager", +) diff --git a/packages/service-library/src/servicelib/rabbitmq/_client.py b/packages/service-library/src/servicelib/rabbitmq/_client.py index ccf1445c231d..097da98b8063 100644 --- a/packages/service-library/src/servicelib/rabbitmq/_client.py +++ b/packages/service-library/src/servicelib/rabbitmq/_client.py @@ -6,6 +6,8 @@ from uuid import uuid4 import aio_pika +from aiormq import ChannelInvalidStateError +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from pydantic import NonNegativeInt from ..logging_utils import log_catch, log_context @@ -46,14 +48,13 @@ def _get_x_death_count(message: aio_pika.abc.AbstractIncomingMessage) -> int: and isinstance(x_death[0], dict) and "count" in x_death[0] ): - assert isinstance(x_death[0]["count"], int) # nosec count = x_death[0]["count"] return count -async def _safe_nack( +async def _nack_message( message_handler: MessageHandler, max_retries_upon_error: int, message: aio_pika.abc.AbstractIncomingMessage, @@ -73,7 +74,7 @@ async def _safe_nack( # NOTE: puts message to the Dead Letter Exchange await message.nack(requeue=False) else: - _logger.exception( + _logger.error( "Handler '%s' is giving up on message '%s' with body '%s'", message_handler, message, @@ -86,19 +87,49 @@ async def _on_message( max_retries_upon_error: int, message: aio_pika.abc.AbstractIncomingMessage, ) -> None: - async with message.process(requeue=True, ignore_processed=True): - try: - with log_context( - _logger, - logging.DEBUG, - msg=f"Received message from {message.exchange=}, {message.routing_key=}", - ): - if not await message_handler(message.body): - await _safe_nack(message_handler, max_retries_upon_error, message) - except Exception: # pylint: disable=broad-exception-caught - _logger.exception("Exception raised when handling message") - with log_catch(_logger, reraise=False): - await _safe_nack(message_handler, max_retries_upon_error, message) + log_error_context = { + "message_id": message.message_id, + "message_body": message.body, + "message_handler": f"{message_handler}", + } + try: + async with message.process(requeue=True, ignore_processed=True): + try: + with log_context( + _logger, + logging.DEBUG, + msg=f"Received message from {message.exchange=}, {message.routing_key=}", + ): + if not await message_handler(message.body): + await _nack_message( + message_handler, max_retries_upon_error, message + ) + except Exception as exc: # pylint: disable=broad-exception-caught + _logger.exception( + **create_troubleshooting_log_kwargs( + "Unhandled exception raised in message handler or when nacking message", + error=exc, + error_context=log_error_context, + tip="This could indicate an error in the message handler, please check the message handler code", + ) + ) + with log_catch(_logger, reraise=False): + await _nack_message( + message_handler, max_retries_upon_error, message + ) + except ChannelInvalidStateError as exc: + # NOTE: this error can happen as can be seen in aio-pika code + # see https://github.com/mosquito/aio-pika/blob/master/aio_pika/robust_queue.py + _logger.exception( + **create_troubleshooting_log_kwargs( + "Cannot process message because channel is closed. Message will be requeued by RabbitMQ", + error=exc, + error_context=log_error_context, + tip="This could indicate the message handler takes > 30 minutes to complete " + "(default time the RabbitMQ broker waits to close a channel when a " + "message is not acknowledged) or an issue in RabbitMQ broker itself.", + ) + ) @dataclass @@ -143,6 +174,7 @@ async def close(self) -> None: async def _get_channel(self) -> aio_pika.abc.AbstractChannel: assert self._connection_pool # nosec async with self._connection_pool.acquire() as connection: + assert isinstance(connection, aio_pika.RobustConnection) # nosec channel: aio_pika.abc.AbstractChannel = await connection.channel() channel.close_callbacks.add(self._channel_close_callback) return channel diff --git a/packages/service-library/src/servicelib/rabbitmq/_client_base.py b/packages/service-library/src/servicelib/rabbitmq/_client_base.py index 69720659e503..ecc483f784b5 100644 --- a/packages/service-library/src/servicelib/rabbitmq/_client_base.py +++ b/packages/service-library/src/servicelib/rabbitmq/_client_base.py @@ -6,9 +6,11 @@ import aio_pika import aiormq -from servicelib.logging_utils import log_catch +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from settings_library.rabbit import RabbitSettings +from ..logging_utils import log_catch + _DEFAULT_RABBITMQ_SERVER_HEARTBEAT_S: Final[int] = 60 _logger = logging.getLogger(__name__) @@ -28,33 +30,49 @@ def _connection_close_callback( exc: BaseException | None, ) -> None: if exc: - if isinstance(exc, asyncio.CancelledError): - _logger.info("Rabbit connection cancelled") - elif isinstance(exc, aiormq.exceptions.ConnectionClosed): - _logger.info("Rabbit connection closed: %s", exc) + if isinstance( + exc, asyncio.CancelledError | aiormq.exceptions.ConnectionClosed + ): + _logger.info( + **create_troubleshooting_log_kwargs( + "RabbitMQ connection closed", + error=exc, + error_context={"sender": sender}, + ) + ) else: _logger.error( - "Rabbit connection closed with exception from %s:%s", - type(exc), - exc, + **create_troubleshooting_log_kwargs( + "RabbitMQ connection closed with unexpected error", + error=exc, + error_context={"sender": sender}, + ) ) self._healthy_state = False def _channel_close_callback( self, - sender: Any, # pylint: disable=unused-argument # noqa: ARG002 + sender: Any, exc: BaseException | None, ) -> None: if exc: - if isinstance(exc, asyncio.CancelledError): - _logger.info("Rabbit channel cancelled") - elif isinstance(exc, aiormq.exceptions.ChannelClosed): - _logger.info("Rabbit channel closed") + if isinstance( + exc, asyncio.CancelledError | aiormq.exceptions.ChannelClosed + ): + _logger.info( + **create_troubleshooting_log_kwargs( + "RabbitMQ channel closed", + error=exc, + error_context={"sender": sender}, + ) + ) else: _logger.error( - "Rabbit channel closed with exception from %s:%s", - type(exc), - exc, + **create_troubleshooting_log_kwargs( + "RabbitMQ channel closed with unexpected error", + error=exc, + error_context={"sender": sender}, + ) ) self._healthy_state = False diff --git a/packages/service-library/src/servicelib/rabbitmq/_client_rpc.py b/packages/service-library/src/servicelib/rabbitmq/_client_rpc.py index 53d9f1326585..6064be929230 100644 --- a/packages/service-library/src/servicelib/rabbitmq/_client_rpc.py +++ b/packages/service-library/src/servicelib/rabbitmq/_client_rpc.py @@ -49,7 +49,7 @@ async def _rpc_initialize(self) -> None: self._channel = await self._connection.channel() self._rpc = aio_pika.patterns.RPC(self._channel) - await self._rpc.initialize() + await self._rpc.initialize(durable=True, auto_delete=True) async def close(self) -> None: with log_context( @@ -134,6 +134,7 @@ async def register_handler( RPCNamespacedMethodName.from_namespace_and_method(namespace, method_name), handler, auto_delete=True, + durable=True, ) async def register_router( diff --git a/packages/service-library/src/servicelib/rabbitmq/_errors.py b/packages/service-library/src/servicelib/rabbitmq/_errors.py index ce58b62fd5cb..9705173203d5 100644 --- a/packages/service-library/src/servicelib/rabbitmq/_errors.py +++ b/packages/service-library/src/servicelib/rabbitmq/_errors.py @@ -24,7 +24,7 @@ class RemoteMethodNotRegisteredError(BaseRPCError): class RPCServerError(BaseRPCError): msg_template = ( "While running method '{method_name}' raised " - "'{exc_type}': '{exc_message}'\n{traceback}" + "'{exc_type}' [{error_code}]: '{exc_message}'\n{traceback}" ) diff --git a/packages/service-library/src/servicelib/rabbitmq/_rpc_router.py b/packages/service-library/src/servicelib/rabbitmq/_rpc_router.py index 49cab08f79b2..b7902bbd409e 100644 --- a/packages/service-library/src/servicelib/rabbitmq/_rpc_router.py +++ b/packages/service-library/src/servicelib/rabbitmq/_rpc_router.py @@ -6,6 +6,8 @@ from dataclasses import dataclass, field from typing import Any, TypeVar +from common_library.error_codes import create_error_code +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from models_library.rabbitmq_basic_types import RPCMethodName from ..logging_utils import log_context @@ -13,8 +15,11 @@ DecoratedCallable = TypeVar("DecoratedCallable", bound=Callable[..., Any]) -# NOTE: this is equivalent to http access logs -_logger = logging.getLogger("rpc.access") + +_logger = logging.getLogger( + # NOTE: this logger is equivalent to http access logs + "rpc.access" +) def _create_func_msg(func, args: tuple[Any, ...], kwargs: dict[str, Any]) -> str: @@ -64,9 +69,19 @@ async def _wrapper(*args, **kwargs): ): raise + error_code = create_error_code(exc) _logger.exception( - "Unhandled exception on the rpc-server side." - " Re-raising as RPCServerError." + # NOTE: equivalent to a 500 http status code error + **create_troubleshooting_log_kwargs( + f"Unhandled exception on the rpc-server side for '{func.__name__}'", + error=exc, + error_code=error_code, + error_context={ + "rpc_method": func.__name__, + "args": args, + "kwargs": kwargs, + }, + ) ) # NOTE: we do not return internal exceptions over RPC formatted_traceback = "\n".join( @@ -77,6 +92,7 @@ async def _wrapper(*args, **kwargs): exc_type=f"{exc.__class__.__module__}.{exc.__class__.__name__}", exc_message=f"{exc}", traceback=f"{formatted_traceback}", + error_code=error_code, ) from None self.routes[RPCMethodName(func.__name__)] = _wrapper diff --git a/packages/service-library/src/servicelib/rabbitmq/_utils.py b/packages/service-library/src/servicelib/rabbitmq/_utils.py index 404adb1b6525..2fae056f38c5 100644 --- a/packages/service-library/src/servicelib/rabbitmq/_utils.py +++ b/packages/service-library/src/servicelib/rabbitmq/_utils.py @@ -85,6 +85,19 @@ async def declare_queue( # NOTE: setting a name will ensure multiple instance will take their data here queue_parameters |= {"name": queue_name} + # avoids deprecated `transient_nonexcl_queues` warning in RabbitMQ + if ( + queue_parameters.get("durable", False) is False + and queue_parameters.get("exclusive", False) is False + ): + msg = ( + "Queue must be `durable` or `exclusive`, but not both. " + "This is to avoid the `transient_nonexcl_queues` warning. " + "NOTE: if both `durable` and `exclusive` are missing they are considered False. " + f"{queue_parameters=}" + ) + raise ValueError(msg) + # NOTE: if below line raises something similar to ``ChannelPreconditionFailed: PRECONDITION_FAILED`` # most likely someone changed the signature of the queues (parameters etc...) # Safest way to deal with it: diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/agent/containers.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/agent/containers.py index 2049f0a409f7..0e64ff625061 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/agent/containers.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/agent/containers.py @@ -6,8 +6,9 @@ from models_library.projects_nodes_io import NodeID from models_library.rabbitmq_basic_types import RPCMethodName, RPCNamespace from pydantic import NonNegativeInt, TypeAdapter -from servicelib.logging_utils import log_decorator -from servicelib.rabbitmq import RabbitMQRPCClient + +from ....logging_utils import log_decorator +from ....rabbitmq import RabbitMQRPCClient _logger = logging.getLogger(__name__) diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/agent/volumes.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/agent/volumes.py index 41cf2ffd8b84..07f8f9617508 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/agent/volumes.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/agent/volumes.py @@ -6,8 +6,9 @@ from models_library.projects_nodes_io import NodeID from models_library.rabbitmq_basic_types import RPCMethodName, RPCNamespace from pydantic import NonNegativeInt, TypeAdapter -from servicelib.logging_utils import log_decorator -from servicelib.rabbitmq import RabbitMQRPCClient + +from ....logging_utils import log_decorator +from ....rabbitmq import RabbitMQRPCClient _logger = logging.getLogger(__name__) diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/async_jobs/async_jobs.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/async_jobs/async_jobs.py index f6e1954c9368..1d2da04185c8 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/async_jobs/async_jobs.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/async_jobs/async_jobs.py @@ -8,7 +8,6 @@ from models_library.api_schemas_rpc_async_jobs.async_jobs import ( AsyncJobGet, AsyncJobId, - AsyncJobNameData, AsyncJobResult, AsyncJobStatus, ) @@ -27,6 +26,7 @@ wait_random_exponential, ) +from ....celery.models import OwnerMetadata from ....rabbitmq import RemoteMethodNotRegisteredError from ... import RabbitMQRPCClient @@ -41,13 +41,13 @@ async def cancel( *, rpc_namespace: RPCNamespace, job_id: AsyncJobId, - job_id_data: AsyncJobNameData, + owner_metadata: OwnerMetadata, ) -> None: await rabbitmq_rpc_client.request( rpc_namespace, TypeAdapter(RPCMethodName).validate_python("cancel"), job_id=job_id, - job_id_data=job_id_data, + owner_metadata=owner_metadata, timeout_s=_DEFAULT_TIMEOUT_S, ) @@ -57,13 +57,13 @@ async def status( *, rpc_namespace: RPCNamespace, job_id: AsyncJobId, - job_id_data: AsyncJobNameData, + owner_metadata: OwnerMetadata, ) -> AsyncJobStatus: _result = await rabbitmq_rpc_client.request( rpc_namespace, TypeAdapter(RPCMethodName).validate_python("status"), job_id=job_id, - job_id_data=job_id_data, + owner_metadata=owner_metadata, timeout_s=_DEFAULT_TIMEOUT_S, ) assert isinstance(_result, AsyncJobStatus) @@ -75,13 +75,13 @@ async def result( *, rpc_namespace: RPCNamespace, job_id: AsyncJobId, - job_id_data: AsyncJobNameData, + owner_metadata: OwnerMetadata, ) -> AsyncJobResult: _result = await rabbitmq_rpc_client.request( rpc_namespace, TypeAdapter(RPCMethodName).validate_python("result"), job_id=job_id, - job_id_data=job_id_data, + owner_metadata=owner_metadata, timeout_s=_DEFAULT_TIMEOUT_S, ) assert isinstance(_result, AsyncJobResult) @@ -92,14 +92,12 @@ async def list_jobs( rabbitmq_rpc_client: RabbitMQRPCClient, *, rpc_namespace: RPCNamespace, - filter_: str, - job_id_data: AsyncJobNameData, + owner_metadata: OwnerMetadata, ) -> list[AsyncJobGet]: _result: list[AsyncJobGet] = await rabbitmq_rpc_client.request( rpc_namespace, TypeAdapter(RPCMethodName).validate_python("list_jobs"), - filter_=filter_, - job_id_data=job_id_data, + owner_metadata=owner_metadata, timeout_s=_DEFAULT_TIMEOUT_S, ) return _result @@ -110,13 +108,13 @@ async def submit( *, rpc_namespace: RPCNamespace, method_name: str, - job_id_data: AsyncJobNameData, + owner_metadata: OwnerMetadata, **kwargs, ) -> AsyncJobGet: _result = await rabbitmq_rpc_client.request( rpc_namespace, TypeAdapter(RPCMethodName).validate_python(method_name), - job_id_data=job_id_data, + owner_metadata=owner_metadata, **kwargs, timeout_s=_DEFAULT_TIMEOUT_S, ) @@ -140,7 +138,7 @@ async def _wait_for_completion( rpc_namespace: RPCNamespace, method_name: RPCMethodName, job_id: AsyncJobId, - job_id_data: AsyncJobNameData, + owner_metadata: OwnerMetadata, client_timeout: datetime.timedelta, ) -> AsyncGenerator[AsyncJobStatus, None]: try: @@ -156,7 +154,7 @@ async def _wait_for_completion( rabbitmq_rpc_client, rpc_namespace=rpc_namespace, job_id=job_id, - job_id_data=job_id_data, + owner_metadata=owner_metadata, ) yield job_status if not job_status.done: @@ -191,7 +189,7 @@ async def wait_and_get_result( rpc_namespace: RPCNamespace, method_name: str, job_id: AsyncJobId, - job_id_data: AsyncJobNameData, + owner_metadata: OwnerMetadata, client_timeout: datetime.timedelta, ) -> AsyncGenerator[AsyncJobComposedResult, None]: """when a job is already submitted this will wait for its completion @@ -203,7 +201,7 @@ async def wait_and_get_result( rpc_namespace=rpc_namespace, method_name=method_name, job_id=job_id, - job_id_data=job_id_data, + owner_metadata=owner_metadata, client_timeout=client_timeout, ): assert job_status is not None # nosec @@ -217,7 +215,7 @@ async def wait_and_get_result( rabbitmq_rpc_client, rpc_namespace=rpc_namespace, job_id=job_id, - job_id_data=job_id_data, + owner_metadata=owner_metadata, ), ) except (TimeoutError, CancelledError) as error: @@ -226,7 +224,7 @@ async def wait_and_get_result( rabbitmq_rpc_client, rpc_namespace=rpc_namespace, job_id=job_id, - job_id_data=job_id_data, + owner_metadata=owner_metadata, ) except Exception as exc: raise exc from error # NOSONAR @@ -238,7 +236,7 @@ async def submit_and_wait( *, rpc_namespace: RPCNamespace, method_name: str, - job_id_data: AsyncJobNameData, + owner_metadata: OwnerMetadata, client_timeout: datetime.timedelta, **kwargs, ) -> AsyncGenerator[AsyncJobComposedResult, None]: @@ -248,7 +246,7 @@ async def submit_and_wait( rabbitmq_rpc_client, rpc_namespace=rpc_namespace, method_name=method_name, - job_id_data=job_id_data, + owner_metadata=owner_metadata, **kwargs, ) except (TimeoutError, CancelledError) as error: @@ -258,7 +256,7 @@ async def submit_and_wait( rabbitmq_rpc_client, rpc_namespace=rpc_namespace, job_id=async_job_rpc_get.job_id, - job_id_data=job_id_data, + owner_metadata=owner_metadata, ) except Exception as exc: raise exc from error @@ -269,7 +267,7 @@ async def submit_and_wait( rpc_namespace=rpc_namespace, method_name=method_name, job_id=async_job_rpc_get.job_id, - job_id_data=job_id_data, + owner_metadata=owner_metadata, client_timeout=client_timeout, ): yield wait_and_ diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/clusters_keeper/clusters.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/clusters_keeper/clusters.py index ada0c66d26d9..ca409aa7e651 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/clusters_keeper/clusters.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/clusters_keeper/clusters.py @@ -6,6 +6,7 @@ from models_library.rabbitmq_basic_types import RPCMethodName from models_library.users import UserID from models_library.wallets import WalletID +from pydantic import TypeAdapter from ....async_utils import run_sequentially_in_context from ..._client_rpc import RabbitMQRPCClient @@ -13,6 +14,10 @@ _TTL_CACHE_ON_CLUSTERS_S: Final[int] = 5 +_GET_OR_CREATE_CLUSTER_METHOD_NAME: Final[RPCMethodName] = TypeAdapter( + RPCMethodName +).validate_python("get_or_create_cluster") + @run_sequentially_in_context(target_args=["user_id", "wallet_id"]) @cached( @@ -32,7 +37,7 @@ async def get_or_create_cluster( # the 2nd decorator ensure that many calls in a short time will return quickly the same value on_demand_cluster: OnDemandCluster = await client.request( CLUSTERS_KEEPER_RPC_NAMESPACE, - RPCMethodName("get_or_create_cluster"), + _GET_OR_CREATE_CLUSTER_METHOD_NAME, timeout_s=RPC_REMOTE_METHOD_TIMEOUT_S, user_id=user_id, wallet_id=wallet_id, diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/director_v2/computations.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/director_v2/computations.py index a24ed19aba90..b1b90b999a26 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/director_v2/computations.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/director_v2/computations.py @@ -6,9 +6,12 @@ DIRECTOR_V2_RPC_NAMESPACE, ) from models_library.api_schemas_directorv2.comp_runs import ( + ComputationCollectionRunRpcGetPage, + ComputationCollectionRunTaskRpcGetPage, ComputationRunRpcGetPage, ComputationTaskRpcGetPage, ) +from models_library.computations import CollectionRunID from models_library.products import ProductName from models_library.projects import ProjectID from models_library.rabbitmq_basic_types import RPCMethodName @@ -114,3 +117,62 @@ async def list_computations_latest_iteration_tasks_page( ) assert isinstance(result, ComputationTaskRpcGetPage) # nosec return result + + +@log_decorator(_logger, level=logging.DEBUG) +async def list_computation_collection_runs_page( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + product_name: ProductName, + user_id: UserID, + project_ids: list[ProjectID] | None, + filter_only_running: bool = False, + # pagination + offset: int = 0, + limit: int = 20, +) -> ComputationCollectionRunRpcGetPage: + result = await rabbitmq_rpc_client.request( + DIRECTOR_V2_RPC_NAMESPACE, + _RPC_METHOD_NAME_ADAPTER.validate_python( + "list_computation_collection_runs_page" + ), + product_name=product_name, + user_id=user_id, + project_ids=project_ids, + filter_only_running=filter_only_running, + offset=offset, + limit=limit, + timeout_s=_DEFAULT_TIMEOUT_S, + ) + assert isinstance(result, ComputationCollectionRunRpcGetPage) # nosec + return result + + +@log_decorator(_logger, level=logging.DEBUG) +async def list_computation_collection_run_tasks_page( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + product_name: ProductName, + user_id: UserID, + collection_run_id: CollectionRunID, + # pagination + offset: int = 0, + limit: int = 20, + # ordering + order_by: OrderBy | None = None, +) -> ComputationCollectionRunTaskRpcGetPage: + result = await rabbitmq_rpc_client.request( + DIRECTOR_V2_RPC_NAMESPACE, + _RPC_METHOD_NAME_ADAPTER.validate_python( + "list_computation_collection_run_tasks_page" + ), + product_name=product_name, + user_id=user_id, + collection_run_id=collection_run_id, + offset=offset, + limit=limit, + order_by=order_by, + timeout_s=_DEFAULT_TIMEOUT_S, + ) + assert isinstance(result, ComputationCollectionRunTaskRpcGetPage) # nosec + return result diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/director_v2/computations_tasks.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/director_v2/computations_tasks.py new file mode 100644 index 000000000000..5d12960444cd --- /dev/null +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/director_v2/computations_tasks.py @@ -0,0 +1,41 @@ +# pylint: disable=too-many-arguments +import logging +from typing import Final + +from models_library.api_schemas_directorv2 import ( + DIRECTOR_V2_RPC_NAMESPACE, +) +from models_library.api_schemas_directorv2.computations import TaskLogFileIdGet +from models_library.projects import ProjectID +from models_library.rabbitmq_basic_types import RPCMethodName +from pydantic import TypeAdapter + +from ....logging_utils import log_decorator +from ... import RabbitMQRPCClient + +_logger = logging.getLogger(__name__) + + +_RPC_METHOD_NAME_ADAPTER: TypeAdapter[RPCMethodName] = TypeAdapter(RPCMethodName) + +_GET_COMPUTATION_TASK_LOG_FILE_IDS: Final[RPCMethodName] = ( + _RPC_METHOD_NAME_ADAPTER.validate_python("get_computation_task_log_file_ids") +) + + +@log_decorator(_logger, level=logging.DEBUG) +async def get_computation_task_log_file_ids( + rabbitmq_rpc_client: RabbitMQRPCClient, *, project_id: ProjectID +) -> list[TaskLogFileIdGet]: + """ + Raises: + ComputationalTaskMissingError + """ + result = await rabbitmq_rpc_client.request( + DIRECTOR_V2_RPC_NAMESPACE, + _GET_COMPUTATION_TASK_LOG_FILE_IDS, + project_id=project_id, + ) + assert isinstance(result, list) # nosec + assert all(isinstance(item, TaskLogFileIdGet) for item in result) # nosec + return result diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/director_v2/errors.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/director_v2/errors.py new file mode 100644 index 000000000000..7d58603d1522 --- /dev/null +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/director_v2/errors.py @@ -0,0 +1,9 @@ +from ..._errors import RPCInterfaceError + + +class BaseRpcError(RPCInterfaceError): # pylint: disable=too-many-ancestors + pass + + +class ComputationalTaskMissingError(BaseRpcError): # pylint: disable=too-many-ancestors + msg_template = "Computational run not found for project {project_id}" diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/services.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/services.py index fb3276ae670c..edf4a480c1fc 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/services.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_scheduler/services.py @@ -18,8 +18,9 @@ from models_library.services_types import ServicePortKey from models_library.users import UserID from pydantic import NonNegativeInt, TypeAdapter -from servicelib.logging_utils import log_decorator -from servicelib.rabbitmq import RabbitMQRPCClient + +from ....logging_utils import log_decorator +from ....rabbitmq import RabbitMQRPCClient _logger = logging.getLogger(__name__) diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_sidecar/container_extensions.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_sidecar/container_extensions.py new file mode 100644 index 000000000000..ec7201a2091a --- /dev/null +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_sidecar/container_extensions.py @@ -0,0 +1,84 @@ +import logging + +from models_library.projects_nodes_io import NodeID +from models_library.rabbitmq_basic_types import RPCMethodName +from models_library.services import ServiceOutput +from pydantic import TypeAdapter + +from ....logging_utils import log_decorator +from ... import RabbitMQRPCClient +from ._utils import get_rpc_namespace + +_logger = logging.getLogger(__name__) + + +@log_decorator(_logger, level=logging.DEBUG) +async def toggle_ports_io( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + node_id: NodeID, + enable_outputs: bool, + enable_inputs: bool +) -> None: + rpc_namespace = get_rpc_namespace(node_id) + result = await rabbitmq_rpc_client.request( + rpc_namespace, + TypeAdapter(RPCMethodName).validate_python("toggle_ports_io"), + enable_outputs=enable_outputs, + enable_inputs=enable_inputs, + ) + assert result is None # nosec + + +@log_decorator(_logger, level=logging.DEBUG) +async def create_output_dirs( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + node_id: NodeID, + outputs_labels: dict[str, ServiceOutput] +) -> None: + rpc_namespace = get_rpc_namespace(node_id) + result = await rabbitmq_rpc_client.request( + rpc_namespace, + TypeAdapter(RPCMethodName).validate_python("create_output_dirs"), + outputs_labels=outputs_labels, + ) + assert result is None # nosec + + +@log_decorator(_logger, level=logging.DEBUG) +async def attach_container_to_network( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + node_id: NodeID, + container_id: str, + network_id: str, + network_aliases: list[str] +) -> None: + rpc_namespace = get_rpc_namespace(node_id) + result = await rabbitmq_rpc_client.request( + rpc_namespace, + TypeAdapter(RPCMethodName).validate_python("attach_container_to_network"), + container_id=container_id, + network_id=network_id, + network_aliases=network_aliases, + ) + assert result is None # nosec + + +@log_decorator(_logger, level=logging.DEBUG) +async def detach_container_from_network( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + node_id: NodeID, + container_id: str, + network_id: str +) -> None: + rpc_namespace = get_rpc_namespace(node_id) + result = await rabbitmq_rpc_client.request( + rpc_namespace, + TypeAdapter(RPCMethodName).validate_python("detach_container_from_network"), + container_id=container_id, + network_id=network_id, + ) + assert result is None # nosec diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_sidecar/containers.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_sidecar/containers.py new file mode 100644 index 000000000000..2aace161a1cc --- /dev/null +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_sidecar/containers.py @@ -0,0 +1,87 @@ +import logging +from typing import Any + +from models_library.api_schemas_directorv2.dynamic_services import ContainersComposeSpec +from models_library.api_schemas_dynamic_sidecar.containers import ActivityInfoOrNone +from models_library.projects_nodes_io import NodeID +from models_library.rabbitmq_basic_types import RPCMethodName +from pydantic import TypeAdapter + +from ....logging_utils import log_decorator +from ... import RabbitMQRPCClient +from ._utils import get_rpc_namespace + +_logger = logging.getLogger(__name__) + + +@log_decorator(_logger, level=logging.DEBUG) +async def create_compose_spec( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + node_id: NodeID, + containers_compose_spec: ContainersComposeSpec, +) -> None: + rpc_namespace = get_rpc_namespace(node_id) + result = await rabbitmq_rpc_client.request( + rpc_namespace, + TypeAdapter(RPCMethodName).validate_python("create_compose_spec"), + containers_compose_spec=containers_compose_spec, + ) + assert result is None # nosec + + +@log_decorator(_logger, level=logging.DEBUG) +async def containers_docker_inspect( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + node_id: NodeID, + only_status: bool, +) -> dict[str, Any]: + rpc_namespace = get_rpc_namespace(node_id) + result = await rabbitmq_rpc_client.request( + rpc_namespace, + TypeAdapter(RPCMethodName).validate_python("containers_docker_inspect"), + only_status=only_status, + ) + assert isinstance(result, dict) # nosec + return result + + +@log_decorator(_logger, level=logging.DEBUG) +async def get_containers_activity( + rabbitmq_rpc_client: RabbitMQRPCClient, *, node_id: NodeID +) -> ActivityInfoOrNone: + rpc_namespace = get_rpc_namespace(node_id) + result = await rabbitmq_rpc_client.request( + rpc_namespace, + TypeAdapter(RPCMethodName).validate_python("get_containers_activity"), + ) + return TypeAdapter(ActivityInfoOrNone).validate_python(result) if result else None + + +@log_decorator(_logger, level=logging.DEBUG) +async def get_containers_name( + rabbitmq_rpc_client: RabbitMQRPCClient, *, node_id: NodeID, filters: str +) -> str: + rpc_namespace = get_rpc_namespace(node_id) + result = await rabbitmq_rpc_client.request( + rpc_namespace, + TypeAdapter(RPCMethodName).validate_python("get_containers_name"), + filters=filters, + ) + assert isinstance(result, str) + return result + + +@log_decorator(_logger, level=logging.DEBUG) +async def inspect_container( + rabbitmq_rpc_client: RabbitMQRPCClient, *, node_id: NodeID, container_id: str +) -> dict[str, Any]: + rpc_namespace = get_rpc_namespace(node_id) + result = await rabbitmq_rpc_client.request( + rpc_namespace, + TypeAdapter(RPCMethodName).validate_python("inspect_container"), + container_id=container_id, + ) + assert isinstance(result, dict) + return result diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_sidecar/containers_long_running_tasks.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_sidecar/containers_long_running_tasks.py new file mode 100644 index 000000000000..b8136ebc49b1 --- /dev/null +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_sidecar/containers_long_running_tasks.py @@ -0,0 +1,172 @@ +import logging + +from models_library.api_schemas_directorv2.dynamic_services import ContainersCreate +from models_library.projects_nodes_io import NodeID +from models_library.rabbitmq_basic_types import RPCMethodName +from pydantic import TypeAdapter +from servicelib.long_running_tasks.models import LRTNamespace, TaskId + +from ....logging_utils import log_decorator +from ... import RabbitMQRPCClient +from ._utils import get_rpc_namespace + +_logger = logging.getLogger(__name__) + + +@log_decorator(_logger, level=logging.DEBUG) +async def pull_user_services_images( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + node_id: NodeID, + lrt_namespace: LRTNamespace, +) -> TaskId: + rpc_namespace = get_rpc_namespace(node_id) + result = await rabbitmq_rpc_client.request( + rpc_namespace, + TypeAdapter(RPCMethodName).validate_python("pull_user_services_images"), + lrt_namespace=lrt_namespace, + ) + assert isinstance(result, TaskId) # nosec + return result + + +@log_decorator(_logger, level=logging.DEBUG) +async def create_user_services( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + node_id: NodeID, + lrt_namespace: LRTNamespace, + containers_create: ContainersCreate, +) -> TaskId: + rpc_namespace = get_rpc_namespace(node_id) + result = await rabbitmq_rpc_client.request( + rpc_namespace, + TypeAdapter(RPCMethodName).validate_python("create_user_services"), + lrt_namespace=lrt_namespace, + containers_create=containers_create, + ) + assert isinstance(result, TaskId) # nosec + return result + + +@log_decorator(_logger, level=logging.DEBUG) +async def remove_user_services( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + node_id: NodeID, + lrt_namespace: LRTNamespace, +) -> TaskId: + rpc_namespace = get_rpc_namespace(node_id) + result = await rabbitmq_rpc_client.request( + rpc_namespace, + TypeAdapter(RPCMethodName).validate_python("remove_user_services"), + lrt_namespace=lrt_namespace, + ) + assert isinstance(result, TaskId) # nosec + return result + + +@log_decorator(_logger, level=logging.DEBUG) +async def restore_user_services_state_paths( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + node_id: NodeID, + lrt_namespace: LRTNamespace, +) -> TaskId: + rpc_namespace = get_rpc_namespace(node_id) + result = await rabbitmq_rpc_client.request( + rpc_namespace, + TypeAdapter(RPCMethodName).validate_python("restore_user_services_state_paths"), + lrt_namespace=lrt_namespace, + ) + assert isinstance(result, TaskId) # nosec + return result + + +@log_decorator(_logger, level=logging.DEBUG) +async def save_user_services_state_paths( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + node_id: NodeID, + lrt_namespace: LRTNamespace, +) -> TaskId: + rpc_namespace = get_rpc_namespace(node_id) + result = await rabbitmq_rpc_client.request( + rpc_namespace, + TypeAdapter(RPCMethodName).validate_python("save_user_services_state_paths"), + lrt_namespace=lrt_namespace, + ) + assert isinstance(result, TaskId) # nosec + return result + + +@log_decorator(_logger, level=logging.DEBUG) +async def pull_user_services_input_ports( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + node_id: NodeID, + lrt_namespace: LRTNamespace, + port_keys: list[str] | None, +) -> TaskId: + rpc_namespace = get_rpc_namespace(node_id) + result = await rabbitmq_rpc_client.request( + rpc_namespace, + TypeAdapter(RPCMethodName).validate_python("pull_user_services_input_ports"), + lrt_namespace=lrt_namespace, + port_keys=port_keys, + ) + assert isinstance(result, TaskId) # nosec + return result + + +@log_decorator(_logger, level=logging.DEBUG) +async def pull_user_services_output_ports( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + node_id: NodeID, + lrt_namespace: LRTNamespace, + port_keys: list[str] | None, +) -> TaskId: + rpc_namespace = get_rpc_namespace(node_id) + result = await rabbitmq_rpc_client.request( + rpc_namespace, + TypeAdapter(RPCMethodName).validate_python("pull_user_services_output_ports"), + lrt_namespace=lrt_namespace, + port_keys=port_keys, + ) + assert isinstance(result, TaskId) # nosec + return result + + +@log_decorator(_logger, level=logging.DEBUG) +async def push_user_services_output_ports( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + node_id: NodeID, + lrt_namespace: LRTNamespace, +) -> TaskId: + rpc_namespace = get_rpc_namespace(node_id) + result = await rabbitmq_rpc_client.request( + rpc_namespace, + TypeAdapter(RPCMethodName).validate_python("push_user_services_output_ports"), + lrt_namespace=lrt_namespace, + ) + assert isinstance(result, TaskId) # nosec + return result + + +@log_decorator(_logger, level=logging.DEBUG) +async def restart_user_services( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + node_id: NodeID, + lrt_namespace: LRTNamespace, +) -> TaskId: + rpc_namespace = get_rpc_namespace(node_id) + result = await rabbitmq_rpc_client.request( + rpc_namespace, + TypeAdapter(RPCMethodName).validate_python("restart_user_services"), + lrt_namespace=lrt_namespace, + ) + assert isinstance(result, TaskId) # nosec + return result diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_sidecar/volumes.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_sidecar/volumes.py index 00fb9e78d72e..f714ed6ba3f9 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_sidecar/volumes.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/dynamic_sidecar/volumes.py @@ -13,7 +13,7 @@ @log_decorator(_logger, level=logging.DEBUG) -async def save_volume_state( +async def update_volume_status( rabbitmq_rpc_client: RabbitMQRPCClient, *, node_id: NodeID, @@ -23,7 +23,7 @@ async def save_volume_state( rpc_namespace = get_rpc_namespace(node_id) result = await rabbitmq_rpc_client.request( rpc_namespace, - TypeAdapter(RPCMethodName).validate_python("save_volume_state"), + TypeAdapter(RPCMethodName).validate_python("update_volume_status"), status=status, category=category, ) diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/storage/paths.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/storage/paths.py index c1049bfc1bbb..c03be37d3937 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/storage/paths.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/storage/paths.py @@ -2,14 +2,14 @@ from models_library.api_schemas_rpc_async_jobs.async_jobs import ( AsyncJobGet, - AsyncJobNameData, ) from models_library.api_schemas_storage import STORAGE_RPC_NAMESPACE -from models_library.products import ProductName from models_library.projects_nodes_io import LocationID from models_library.rabbitmq_basic_types import RPCMethodName from models_library.users import UserID +from pydantic import TypeAdapter +from ....celery.models import OwnerMetadata from ..._client_rpc import RabbitMQRPCClient from ..async_jobs.async_jobs import submit @@ -17,38 +17,38 @@ async def compute_path_size( client: RabbitMQRPCClient, *, - user_id: UserID, - product_name: ProductName, location_id: LocationID, path: Path, -) -> tuple[AsyncJobGet, AsyncJobNameData]: - job_id_data = AsyncJobNameData(user_id=user_id, product_name=product_name) + owner_metadata: OwnerMetadata, + user_id: UserID +) -> tuple[AsyncJobGet, OwnerMetadata]: async_job_rpc_get = await submit( rabbitmq_rpc_client=client, rpc_namespace=STORAGE_RPC_NAMESPACE, - method_name=RPCMethodName("compute_path_size"), - job_id_data=job_id_data, + method_name=TypeAdapter(RPCMethodName).validate_python("compute_path_size"), + owner_metadata=owner_metadata, location_id=location_id, path=path, + user_id=user_id, ) - return async_job_rpc_get, job_id_data + return async_job_rpc_get, owner_metadata async def delete_paths( client: RabbitMQRPCClient, *, - user_id: UserID, - product_name: ProductName, location_id: LocationID, paths: set[Path], -) -> tuple[AsyncJobGet, AsyncJobNameData]: - job_id_data = AsyncJobNameData(user_id=user_id, product_name=product_name) + owner_metadata: OwnerMetadata, + user_id: UserID +) -> tuple[AsyncJobGet, OwnerMetadata]: async_job_rpc_get = await submit( rabbitmq_rpc_client=client, rpc_namespace=STORAGE_RPC_NAMESPACE, - method_name=RPCMethodName("delete_paths"), - job_id_data=job_id_data, + method_name=TypeAdapter(RPCMethodName).validate_python("delete_paths"), + owner_metadata=owner_metadata, location_id=location_id, paths=paths, + user_id=user_id, ) - return async_job_rpc_get, job_id_data + return async_job_rpc_get, owner_metadata diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/storage/simcore_s3.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/storage/simcore_s3.py index df78448a5752..31ca1d11440c 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/storage/simcore_s3.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/storage/simcore_s3.py @@ -1,14 +1,15 @@ +from typing import Literal + from models_library.api_schemas_rpc_async_jobs.async_jobs import ( AsyncJobGet, - AsyncJobNameData, ) from models_library.api_schemas_storage import STORAGE_RPC_NAMESPACE from models_library.api_schemas_storage.storage_schemas import FoldersBody from models_library.api_schemas_webserver.storage import PathToExport -from models_library.products import ProductName from models_library.rabbitmq_basic_types import RPCMethodName from models_library.users import UserID from pydantic import TypeAdapter +from servicelib.celery.models import OwnerMetadata from ... import RabbitMQRPCClient from ..async_jobs.async_jobs import submit @@ -17,34 +18,38 @@ async def copy_folders_from_project( client: RabbitMQRPCClient, *, - user_id: UserID, - product_name: ProductName, body: FoldersBody, -) -> tuple[AsyncJobGet, AsyncJobNameData]: - job_id_data = AsyncJobNameData(user_id=user_id, product_name=product_name) + owner_metadata: OwnerMetadata, + user_id: UserID +) -> tuple[AsyncJobGet, OwnerMetadata]: async_job_rpc_get = await submit( rabbitmq_rpc_client=client, rpc_namespace=STORAGE_RPC_NAMESPACE, - method_name=RPCMethodName("copy_folders_from_project"), - job_id_data=job_id_data, + method_name=TypeAdapter(RPCMethodName).validate_python( + "copy_folders_from_project" + ), + owner_metadata=owner_metadata, body=body, + user_id=user_id, ) - return async_job_rpc_get, job_id_data + return async_job_rpc_get, owner_metadata async def start_export_data( rabbitmq_rpc_client: RabbitMQRPCClient, *, - user_id: UserID, - product_name: ProductName, paths_to_export: list[PathToExport], -) -> tuple[AsyncJobGet, AsyncJobNameData]: - job_id_data = AsyncJobNameData(user_id=user_id, product_name=product_name) + export_as: Literal["path", "download_link"], + owner_metadata: OwnerMetadata, + user_id: UserID +) -> tuple[AsyncJobGet, OwnerMetadata]: async_job_rpc_get = await submit( rabbitmq_rpc_client, rpc_namespace=STORAGE_RPC_NAMESPACE, method_name=TypeAdapter(RPCMethodName).validate_python("start_export_data"), - job_id_data=job_id_data, + owner_metadata=owner_metadata, paths_to_export=paths_to_export, + export_as=export_as, + user_id=user_id, ) - return async_job_rpc_get, job_id_data + return async_job_rpc_get, owner_metadata diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/auth/api_keys.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/auth/api_keys.py index 0358a0e3b6ad..c84dbf84993c 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/auth/api_keys.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/auth/api_keys.py @@ -1,13 +1,13 @@ import logging -from models_library.api_schemas_webserver import WEBSERVER_RPC_NAMESPACE from models_library.basic_types import IDStr -from models_library.rabbitmq_basic_types import RPCMethodName +from models_library.rabbitmq_basic_types import RPCMethodName, RPCNamespace from models_library.rpc.webserver.auth.api_keys import ApiKeyCreate, ApiKeyGet from models_library.users import UserID from pydantic import TypeAdapter -from servicelib.logging_utils import log_decorator -from servicelib.rabbitmq import RabbitMQRPCClient + +from .....logging_utils import log_decorator +from .....rabbitmq import RabbitMQRPCClient _logger = logging.getLogger(__name__) @@ -15,13 +15,14 @@ @log_decorator(_logger, level=logging.DEBUG) async def create_api_key( rabbitmq_rpc_client: RabbitMQRPCClient, + rpc_namespace: RPCNamespace, *, user_id: UserID, product_name: str, api_key: ApiKeyCreate, ) -> ApiKeyGet: result: ApiKeyGet = await rabbitmq_rpc_client.request( - WEBSERVER_RPC_NAMESPACE, + rpc_namespace, TypeAdapter(RPCMethodName).validate_python("create_api_key"), user_id=user_id, product_name=product_name, @@ -35,13 +36,14 @@ async def create_api_key( @log_decorator(_logger, level=logging.DEBUG) async def get_api_key( rabbitmq_rpc_client: RabbitMQRPCClient, + rpc_namespace: RPCNamespace, *, user_id: UserID, product_name: str, api_key_id: IDStr, ) -> ApiKeyGet: result: ApiKeyGet = await rabbitmq_rpc_client.request( - WEBSERVER_RPC_NAMESPACE, + rpc_namespace, TypeAdapter(RPCMethodName).validate_python("get_api_key"), user_id=user_id, product_name=product_name, @@ -54,13 +56,14 @@ async def get_api_key( @log_decorator(_logger, level=logging.DEBUG) async def delete_api_key_by_key( rabbitmq_rpc_client: RabbitMQRPCClient, + rpc_namespace: RPCNamespace, *, user_id: UserID, product_name: str, api_key: str, ) -> None: result = await rabbitmq_rpc_client.request( - WEBSERVER_RPC_NAMESPACE, + rpc_namespace, TypeAdapter(RPCMethodName).validate_python("delete_api_key_by_key"), user_id=user_id, product_name=product_name, diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/errors.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/errors.py index e0c3fc2419a2..a5f494191aec 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/errors.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/errors.py @@ -1,7 +1,11 @@ from ..._errors import RPCInterfaceError -class ProjectNotFoundRpcError(RPCInterfaceError): ... +class ProjectNotFoundRpcError( # pylint: disable=too-many-ancestors + RPCInterfaceError +): ... -class ProjectForbiddenRpcError(RPCInterfaceError): ... +class ProjectForbiddenRpcError( # pylint: disable=too-many-ancestors + RPCInterfaceError +): ... diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/functions/functions_rpc_interface.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/functions/functions_rpc_interface.py index 0ab7e17756aa..4f017023a727 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/functions/functions_rpc_interface.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/functions/functions_rpc_interface.py @@ -1,4 +1,5 @@ import logging +from typing import Literal from models_library.api_schemas_webserver import WEBSERVER_RPC_NAMESPACE from models_library.api_schemas_webserver.functions import ( @@ -16,8 +17,19 @@ RegisteredFunctionJob, RegisteredFunctionJobCollection, ) +from models_library.functions import ( + FunctionClass, + FunctionGroupAccessRights, + FunctionJobStatus, + FunctionOutputs, + FunctionUserAccessRights, + FunctionUserApiAccessRights, + RegisteredFunctionJobPatch, + RegisteredFunctionJobWithStatus, +) from models_library.products import ProductName -from models_library.rabbitmq_basic_types import RPCMethodName +from models_library.rabbitmq_basic_types import RPCMethodName, RPCNamespace +from models_library.rest_ordering import OrderBy from models_library.rest_pagination import PageMetaInfoLimitOffset from models_library.users import UserID from pydantic import TypeAdapter @@ -30,34 +42,34 @@ @log_decorator(_logger, level=logging.DEBUG) async def register_function( - rabbitmq_rpc_client: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, + rpc_namespace: RPCNamespace, *, user_id: UserID, product_name: ProductName, function: Function, ) -> RegisteredFunction: - result = await rabbitmq_rpc_client.request( - WEBSERVER_RPC_NAMESPACE, + result = await rpc_client.request( + rpc_namespace, TypeAdapter(RPCMethodName).validate_python("register_function"), function=function, user_id=user_id, product_name=product_name, ) - return TypeAdapter(RegisteredFunction).validate_python( - result - ) # Validates the result as a RegisteredFunction + return TypeAdapter(RegisteredFunction).validate_python(result) @log_decorator(_logger, level=logging.DEBUG) async def get_function( - rabbitmq_rpc_client: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, + rpc_namespace: RPCNamespace, *, user_id: UserID, product_name: ProductName, function_id: FunctionID, ) -> RegisteredFunction: - result = await rabbitmq_rpc_client.request( - WEBSERVER_RPC_NAMESPACE, + result = await rpc_client.request( + rpc_namespace, TypeAdapter(RPCMethodName).validate_python("get_function"), function_id=function_id, user_id=user_id, @@ -68,14 +80,15 @@ async def get_function( @log_decorator(_logger, level=logging.DEBUG) async def get_function_input_schema( - rabbitmq_rpc_client: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, + rpc_namespace: RPCNamespace, *, function_id: FunctionID, user_id: UserID, product_name: ProductName, ) -> FunctionInputSchema: - result = await rabbitmq_rpc_client.request( - WEBSERVER_RPC_NAMESPACE, + result = await rpc_client.request( + rpc_namespace, TypeAdapter(RPCMethodName).validate_python("get_function_input_schema"), function_id=function_id, user_id=user_id, @@ -86,14 +99,15 @@ async def get_function_input_schema( @log_decorator(_logger, level=logging.DEBUG) async def get_function_output_schema( - rabbitmq_rpc_client: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, + rpc_namespace: RPCNamespace, *, function_id: FunctionID, user_id: UserID, product_name: ProductName, ) -> FunctionOutputSchema: - result = await rabbitmq_rpc_client.request( - WEBSERVER_RPC_NAMESPACE, + result = await rpc_client.request( + rpc_namespace, TypeAdapter(RPCMethodName).validate_python("get_function_output_schema"), function_id=function_id, user_id=user_id, @@ -104,14 +118,15 @@ async def get_function_output_schema( @log_decorator(_logger, level=logging.DEBUG) async def delete_function( - rabbitmq_rpc_client: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, + rpc_namespace: RPCNamespace, *, function_id: FunctionID, user_id: UserID, product_name: ProductName, ) -> None: - result = await rabbitmq_rpc_client.request( - WEBSERVER_RPC_NAMESPACE, + result = await rpc_client.request( + rpc_namespace, TypeAdapter(RPCMethodName).validate_python("delete_function"), function_id=function_id, user_id=user_id, @@ -123,21 +138,29 @@ async def delete_function( @log_decorator(_logger, level=logging.DEBUG) async def list_functions( - rabbitmq_rpc_client: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, *, user_id: UserID, product_name: ProductName, pagination_offset: int, pagination_limit: int, + order_by: OrderBy | None = None, + filter_by_function_class: FunctionClass | None = None, + search_by_function_title: str | None = None, + search_by_multi_columns: str | None = None, ) -> tuple[list[RegisteredFunction], PageMetaInfoLimitOffset]: result: tuple[list[RegisteredFunction], PageMetaInfoLimitOffset] = ( - await rabbitmq_rpc_client.request( + await rpc_client.request( WEBSERVER_RPC_NAMESPACE, TypeAdapter(RPCMethodName).validate_python("list_functions"), pagination_offset=pagination_offset, pagination_limit=pagination_limit, user_id=user_id, product_name=product_name, + order_by=order_by, + filter_by_function_class=filter_by_function_class, + search_by_function_title=search_by_function_title, + search_by_multi_columns=search_by_multi_columns, ) ) return TypeAdapter( @@ -147,16 +170,18 @@ async def list_functions( @log_decorator(_logger, level=logging.DEBUG) async def list_function_jobs( - rabbitmq_rpc_client: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, *, user_id: UserID, product_name: ProductName, pagination_limit: int, pagination_offset: int, filter_by_function_id: FunctionID | None = None, + filter_by_function_job_ids: list[FunctionJobID] | None = None, + filter_by_function_job_collection_id: FunctionJobCollectionID | None = None, ) -> tuple[list[RegisteredFunctionJob], PageMetaInfoLimitOffset]: result: tuple[list[RegisteredFunctionJob], PageMetaInfoLimitOffset] = ( - await rabbitmq_rpc_client.request( + await rpc_client.request( WEBSERVER_RPC_NAMESPACE, TypeAdapter(RPCMethodName).validate_python("list_function_jobs"), user_id=user_id, @@ -164,6 +189,8 @@ async def list_function_jobs( pagination_offset=pagination_offset, pagination_limit=pagination_limit, filter_by_function_id=filter_by_function_id, + filter_by_function_job_ids=filter_by_function_job_ids, + filter_by_function_job_collection_id=filter_by_function_job_collection_id, ) ) return TypeAdapter( @@ -171,9 +198,43 @@ async def list_function_jobs( ).validate_python(result) +@log_decorator(_logger, level=logging.DEBUG) +async def list_function_jobs_with_status( + rpc_client: RabbitMQRPCClient, + *, + user_id: UserID, + product_name: ProductName, + pagination_offset: int, + pagination_limit: int, + filter_by_function_id: FunctionID | None = None, + filter_by_function_job_ids: list[FunctionJobID] | None = None, + filter_by_function_job_collection_id: FunctionJobCollectionID | None = None, +) -> tuple[ + list[RegisteredFunctionJobWithStatus], + PageMetaInfoLimitOffset, +]: + result = await rpc_client.request( + WEBSERVER_RPC_NAMESPACE, + TypeAdapter(RPCMethodName).validate_python("list_function_jobs_with_status"), + user_id=user_id, + product_name=product_name, + pagination_offset=pagination_offset, + pagination_limit=pagination_limit, + filter_by_function_id=filter_by_function_id, + filter_by_function_job_ids=filter_by_function_job_ids, + filter_by_function_job_collection_id=filter_by_function_job_collection_id, + ) + return TypeAdapter( + tuple[ + list[RegisteredFunctionJobWithStatus], + PageMetaInfoLimitOffset, + ] + ).validate_python(result) + + @log_decorator(_logger, level=logging.DEBUG) async def list_function_job_collections( - rabbitmq_rpc_client: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, *, user_id: UserID, product_name: ProductName, @@ -181,7 +242,7 @@ async def list_function_job_collections( pagination_offset: int, filters: FunctionJobCollectionsListFilters | None = None, ) -> tuple[list[RegisteredFunctionJobCollection], PageMetaInfoLimitOffset]: - result = await rabbitmq_rpc_client.request( + result = await rpc_client.request( WEBSERVER_RPC_NAMESPACE, TypeAdapter(RPCMethodName).validate_python("list_function_job_collections"), pagination_offset=pagination_offset, @@ -197,14 +258,14 @@ async def list_function_job_collections( @log_decorator(_logger, level=logging.DEBUG) async def update_function_title( - rabbitmq_rpc_client: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, *, user_id: UserID, product_name: ProductName, function_id: FunctionID, title: str, ) -> RegisteredFunction: - result = await rabbitmq_rpc_client.request( + result = await rpc_client.request( WEBSERVER_RPC_NAMESPACE, TypeAdapter(RPCMethodName).validate_python("update_function_title"), function_id=function_id, @@ -217,14 +278,14 @@ async def update_function_title( @log_decorator(_logger, level=logging.DEBUG) async def update_function_description( - rabbitmq_rpc_client: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, *, user_id: UserID, product_name: ProductName, function_id: FunctionID, description: str, ) -> RegisteredFunction: - result = await rabbitmq_rpc_client.request( + result = await rpc_client.request( WEBSERVER_RPC_NAMESPACE, TypeAdapter(RPCMethodName).validate_python("update_function_description"), function_id=function_id, @@ -237,14 +298,14 @@ async def update_function_description( @log_decorator(_logger, level=logging.DEBUG) async def run_function( - rabbitmq_rpc_client: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, *, function_id: FunctionID, inputs: FunctionInputs, user_id: UserID, product_name: ProductName, ) -> RegisteredFunctionJob: - result = await rabbitmq_rpc_client.request( + result = await rpc_client.request( WEBSERVER_RPC_NAMESPACE, TypeAdapter(RPCMethodName).validate_python("run_function"), function_id=function_id, @@ -259,13 +320,13 @@ async def run_function( @log_decorator(_logger, level=logging.DEBUG) async def register_function_job( - rabbitmq_rpc_client: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, *, user_id: UserID, product_name: ProductName, function_job: FunctionJob, ) -> RegisteredFunctionJob: - result = await rabbitmq_rpc_client.request( + result = await rpc_client.request( WEBSERVER_RPC_NAMESPACE, TypeAdapter(RPCMethodName).validate_python("register_function_job"), function_job=function_job, @@ -277,15 +338,37 @@ async def register_function_job( ) # Validates the result as a RegisteredFunctionJob +@log_decorator(_logger, level=logging.DEBUG) +async def patch_registered_function_job( + rpc_client: RabbitMQRPCClient, + *, + user_id: UserID, + product_name: ProductName, + function_job_uuid: FunctionJobID, + registered_function_job_patch: RegisteredFunctionJobPatch, +) -> RegisteredFunctionJob: + result = await rpc_client.request( + WEBSERVER_RPC_NAMESPACE, + TypeAdapter(RPCMethodName).validate_python("patch_registered_function_job"), + user_id=user_id, + product_name=product_name, + function_job_uuid=function_job_uuid, + registered_function_job_patch=registered_function_job_patch, + ) + return TypeAdapter(RegisteredFunctionJob).validate_python( + result + ) # Validates the result as a RegisteredFunctionJob + + @log_decorator(_logger, level=logging.DEBUG) async def get_function_job( - rabbitmq_rpc_client: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, *, user_id: UserID, function_job_id: FunctionJobID, product_name: ProductName, ) -> RegisteredFunctionJob: - result = await rabbitmq_rpc_client.request( + result = await rpc_client.request( WEBSERVER_RPC_NAMESPACE, TypeAdapter(RPCMethodName).validate_python("get_function_job"), function_job_id=function_job_id, @@ -297,14 +380,94 @@ async def get_function_job( @log_decorator(_logger, level=logging.DEBUG) -async def delete_function_job( +async def get_function_job_status( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + user_id: UserID, + function_job_id: FunctionJobID, + product_name: ProductName, +) -> FunctionJobStatus: + result = await rabbitmq_rpc_client.request( + WEBSERVER_RPC_NAMESPACE, + TypeAdapter(RPCMethodName).validate_python("get_function_job_status"), + function_job_id=function_job_id, + user_id=user_id, + product_name=product_name, + ) + return TypeAdapter(FunctionJobStatus).validate_python(result) + + +@log_decorator(_logger, level=logging.DEBUG) +async def get_function_job_outputs( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + user_id: UserID, + function_job_id: FunctionJobID, + product_name: ProductName, +) -> FunctionOutputs: + result = await rabbitmq_rpc_client.request( + WEBSERVER_RPC_NAMESPACE, + TypeAdapter(RPCMethodName).validate_python("get_function_job_outputs"), + function_job_id=function_job_id, + user_id=user_id, + product_name=product_name, + ) + return TypeAdapter(FunctionOutputs).validate_python(result) + + +@log_decorator(_logger, level=logging.DEBUG) +async def update_function_job_status( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + user_id: UserID, + product_name: ProductName, + function_job_id: FunctionJobID, + job_status: FunctionJobStatus, + check_write_permissions: bool = True, +) -> FunctionJobStatus: + result = await rabbitmq_rpc_client.request( + WEBSERVER_RPC_NAMESPACE, + TypeAdapter(RPCMethodName).validate_python("update_function_job_status"), + function_job_id=function_job_id, + job_status=job_status, + user_id=user_id, + product_name=product_name, + check_write_permissions=check_write_permissions, + ) + return TypeAdapter(FunctionJobStatus).validate_python(result) + + +@log_decorator(_logger, level=logging.DEBUG) +async def update_function_job_outputs( rabbitmq_rpc_client: RabbitMQRPCClient, *, user_id: UserID, product_name: ProductName, function_job_id: FunctionJobID, + outputs: FunctionOutputs, + check_write_permissions: bool = True, +) -> FunctionOutputs: + result = await rabbitmq_rpc_client.request( + WEBSERVER_RPC_NAMESPACE, + TypeAdapter(RPCMethodName).validate_python("update_function_job_outputs"), + function_job_id=function_job_id, + outputs=outputs, + user_id=user_id, + product_name=product_name, + check_write_permissions=check_write_permissions, + ) + return TypeAdapter(FunctionOutputs).validate_python(result) + + +@log_decorator(_logger, level=logging.DEBUG) +async def delete_function_job( + rpc_client: RabbitMQRPCClient, + *, + user_id: UserID, + product_name: ProductName, + function_job_id: FunctionJobID, ) -> None: - result: None = await rabbitmq_rpc_client.request( + result: None = await rpc_client.request( WEBSERVER_RPC_NAMESPACE, TypeAdapter(RPCMethodName).validate_python("delete_function_job"), function_job_id=function_job_id, @@ -316,14 +479,14 @@ async def delete_function_job( @log_decorator(_logger, level=logging.DEBUG) async def find_cached_function_jobs( - rabbitmq_rpc_client: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, *, user_id: UserID, product_name: ProductName, function_id: FunctionID, inputs: FunctionInputs, ) -> list[RegisteredFunctionJob] | None: - result = await rabbitmq_rpc_client.request( + result = await rpc_client.request( WEBSERVER_RPC_NAMESPACE, TypeAdapter(RPCMethodName).validate_python("find_cached_function_jobs"), function_id=function_id, @@ -338,13 +501,13 @@ async def find_cached_function_jobs( @log_decorator(_logger, level=logging.DEBUG) async def register_function_job_collection( - rabbitmq_rpc_client: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, *, user_id: UserID, product_name: ProductName, function_job_collection: FunctionJobCollection, ) -> RegisteredFunctionJobCollection: - result = await rabbitmq_rpc_client.request( + result = await rpc_client.request( WEBSERVER_RPC_NAMESPACE, TypeAdapter(RPCMethodName).validate_python("register_function_job_collection"), function_job_collection=function_job_collection, @@ -356,13 +519,13 @@ async def register_function_job_collection( @log_decorator(_logger, level=logging.DEBUG) async def get_function_job_collection( - rabbitmq_rpc_client: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, *, user_id: UserID, function_job_collection_id: FunctionJobCollectionID, product_name: ProductName, ) -> RegisteredFunctionJobCollection: - result = await rabbitmq_rpc_client.request( + result = await rpc_client.request( WEBSERVER_RPC_NAMESPACE, TypeAdapter(RPCMethodName).validate_python("get_function_job_collection"), function_job_collection_id=function_job_collection_id, @@ -374,13 +537,13 @@ async def get_function_job_collection( @log_decorator(_logger, level=logging.DEBUG) async def delete_function_job_collection( - rabbitmq_rpc_client: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, *, user_id: UserID, product_name: ProductName, function_job_collection_id: FunctionJobCollectionID, ) -> None: - result = await rabbitmq_rpc_client.request( + result = await rpc_client.request( WEBSERVER_RPC_NAMESPACE, TypeAdapter(RPCMethodName).validate_python("delete_function_job_collection"), function_job_collection_id=function_job_collection_id, @@ -388,3 +551,73 @@ async def delete_function_job_collection( product_name=product_name, ) assert result is None # nosec + + +@log_decorator(_logger, level=logging.DEBUG) +async def get_function_user_permissions( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + user_id: UserID, + product_name: ProductName, + function_id: FunctionID, +) -> FunctionUserAccessRights: + result = await rabbitmq_rpc_client.request( + WEBSERVER_RPC_NAMESPACE, + TypeAdapter(RPCMethodName).validate_python("get_function_user_permissions"), + function_id=function_id, + user_id=user_id, + product_name=product_name, + ) + return TypeAdapter(FunctionUserAccessRights).validate_python(result) + + +@log_decorator(_logger, level=logging.DEBUG) +async def get_functions_user_api_access_rights( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + user_id: UserID, + product_name: ProductName, +) -> FunctionUserApiAccessRights: + result = await rabbitmq_rpc_client.request( + WEBSERVER_RPC_NAMESPACE, + TypeAdapter(RPCMethodName).validate_python( + "get_functions_user_api_access_rights" + ), + user_id=user_id, + product_name=product_name, + ) + return TypeAdapter(FunctionUserApiAccessRights).validate_python(result) + + +@log_decorator(_logger, level=logging.DEBUG) +async def set_group_permissions( + rabbitmq_rpc_client: RabbitMQRPCClient, + *, + user_id: UserID, + product_name: ProductName, + object_type: Literal["function", "function_job", "function_job_collection"], + object_ids: list[FunctionID | FunctionJobID | FunctionJobCollectionID], + permission_group_id: int, + read: bool | None = None, + write: bool | None = None, + execute: bool | None = None, +) -> list[ + tuple[ + FunctionID | FunctionJobID | FunctionJobCollectionID, FunctionGroupAccessRights + ] +]: + result = await rabbitmq_rpc_client.request( + WEBSERVER_RPC_NAMESPACE, + TypeAdapter(RPCMethodName).validate_python("set_group_permissions"), + user_id=user_id, + product_name=product_name, + object_type=object_type, + object_ids=object_ids, + permission_group_id=permission_group_id, + read=read, + write=write, + execute=execute, + ) + return TypeAdapter( + list[tuple[FunctionID | FunctionJobID, FunctionGroupAccessRights]] + ).validate_python(result) diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/licenses/licensed_items.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/licenses/licensed_items.py index acb367de27b0..4bf2c0fec6d8 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/licenses/licensed_items.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/licenses/licensed_items.py @@ -1,13 +1,12 @@ import logging -from models_library.api_schemas_webserver import WEBSERVER_RPC_NAMESPACE from models_library.api_schemas_webserver.licensed_items import LicensedItemRpcGetPage from models_library.api_schemas_webserver.licensed_items_checkouts import ( LicensedItemCheckoutRpcGet, ) from models_library.licenses import LicensedItemID from models_library.products import ProductName -from models_library.rabbitmq_basic_types import RPCMethodName +from models_library.rabbitmq_basic_types import RPCMethodName, RPCNamespace from models_library.resource_tracker_licensed_items_checkouts import ( LicensedItemCheckoutID, ) @@ -15,22 +14,24 @@ from models_library.users import UserID from models_library.wallets import WalletID from pydantic import TypeAdapter -from servicelib.logging_utils import log_decorator -from servicelib.rabbitmq import RabbitMQRPCClient + +from .....logging_utils import log_decorator +from .....rabbitmq import RabbitMQRPCClient _logger = logging.getLogger(__name__) @log_decorator(_logger, level=logging.DEBUG) async def get_licensed_items( - rabbitmq_rpc_client: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, + rpc_namespace: RPCNamespace, *, product_name: str, offset: int = 0, limit: int = 20, ) -> LicensedItemRpcGetPage: - result: LicensedItemRpcGetPage = await rabbitmq_rpc_client.request( - WEBSERVER_RPC_NAMESPACE, + result: LicensedItemRpcGetPage = await rpc_client.request( + rpc_namespace, TypeAdapter(RPCMethodName).validate_python("get_licensed_items"), product_name=product_name, offset=offset, @@ -42,7 +43,8 @@ async def get_licensed_items( @log_decorator(_logger, level=logging.DEBUG) async def get_available_licensed_items_for_wallet( - rabbitmq_rpc_client: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, + rpc_namespace: RPCNamespace, *, product_name: ProductName, wallet_id: WalletID, @@ -50,8 +52,8 @@ async def get_available_licensed_items_for_wallet( offset: int = 0, limit: int = 20, ) -> LicensedItemRpcGetPage: - result: LicensedItemRpcGetPage = await rabbitmq_rpc_client.request( - WEBSERVER_RPC_NAMESPACE, + result: LicensedItemRpcGetPage = await rpc_client.request( + rpc_namespace, TypeAdapter(RPCMethodName).validate_python( "get_available_licensed_items_for_wallet" ), @@ -67,7 +69,8 @@ async def get_available_licensed_items_for_wallet( @log_decorator(_logger, level=logging.DEBUG) async def checkout_licensed_item_for_wallet( - rabbitmq_rpc_client: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, + rpc_namespace: RPCNamespace, *, product_name: ProductName, user_id: UserID, @@ -76,8 +79,8 @@ async def checkout_licensed_item_for_wallet( num_of_seats: int, service_run_id: ServiceRunID, ) -> LicensedItemCheckoutRpcGet: - result = await rabbitmq_rpc_client.request( - WEBSERVER_RPC_NAMESPACE, + result = await rpc_client.request( + rpc_namespace, TypeAdapter(RPCMethodName).validate_python("checkout_licensed_item_for_wallet"), licensed_item_id=licensed_item_id, product_name=product_name, @@ -92,14 +95,15 @@ async def checkout_licensed_item_for_wallet( @log_decorator(_logger, level=logging.DEBUG) async def release_licensed_item_for_wallet( - rabbitmq_rpc_client: RabbitMQRPCClient, + rpc_client: RabbitMQRPCClient, + rpc_namespace: RPCNamespace, *, product_name: ProductName, user_id: UserID, licensed_item_checkout_id: LicensedItemCheckoutID, ) -> LicensedItemCheckoutRpcGet: - result = await rabbitmq_rpc_client.request( - WEBSERVER_RPC_NAMESPACE, + result = await rpc_client.request( + rpc_namespace, TypeAdapter(RPCMethodName).validate_python("release_licensed_item_for_wallet"), product_name=product_name, user_id=user_id, diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/payments.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/payments.py new file mode 100644 index 000000000000..685dd706d3ce --- /dev/null +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/payments.py @@ -0,0 +1,32 @@ +import logging +from decimal import Decimal + +from models_library.payments import InvoiceDataGet +from models_library.products import ProductName +from models_library.rabbitmq_basic_types import RPCMethodName, RPCNamespace +from models_library.users import UserID +from pydantic import TypeAdapter +from servicelib.logging_utils import log_decorator +from servicelib.rabbitmq import RabbitMQRPCClient + +_logger = logging.getLogger(__name__) + + +@log_decorator(_logger, level=logging.DEBUG) +async def get_invoice_data( + rpc_client: RabbitMQRPCClient, + rpc_namespace: RPCNamespace, + *, + user_id: UserID, + dollar_amount: Decimal, + product_name: ProductName, +) -> InvoiceDataGet: + result: InvoiceDataGet = await rpc_client.request( + rpc_namespace, + TypeAdapter(RPCMethodName).validate_python("get_invoice_data"), + user_id=user_id, + dollar_amount=dollar_amount, + product_name=product_name, + ) + assert isinstance(result, InvoiceDataGet) # nosec + return result diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/products.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/products.py new file mode 100644 index 000000000000..6c1dc9744669 --- /dev/null +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/products.py @@ -0,0 +1,41 @@ +import logging +from decimal import Decimal + +from models_library.api_schemas_webserver.products import CreditResultRpcGet +from models_library.products import ProductName +from models_library.rabbitmq_basic_types import RPCMethodName, RPCNamespace +from pydantic import TypeAdapter +from servicelib.logging_utils import log_decorator +from servicelib.rabbitmq import RabbitMQRPCClient + +_logger = logging.getLogger(__name__) + + +@log_decorator(_logger, level=logging.DEBUG) +async def get_credit_amount( + rpc_client: RabbitMQRPCClient, + rpc_namespace: RPCNamespace, + *, + dollar_amount: Decimal, + product_name: ProductName, +) -> CreditResultRpcGet: + """ + Get credit amount for a specific dollar amount and product. + + Args: + rpc_client: RPC client to communicate with the webserver + rpc_namespace: Namespace for the RPC call + dollar_amount: The amount in dollars to be converted to credits + product_name: The product for which to calculate the credit amount + + Returns: + Credit result information containing the credit amount + """ + result: CreditResultRpcGet = await rpc_client.request( + rpc_namespace, + TypeAdapter(RPCMethodName).validate_python("get_credit_amount"), + dollar_amount=dollar_amount, + product_name=product_name, + ) + assert isinstance(result, CreditResultRpcGet) # nosec + return result diff --git a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/projects.py b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/projects.py index 15f40d66011e..02781d1cff54 100644 --- a/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/projects.py +++ b/packages/service-library/src/servicelib/rabbitmq/rpc_interfaces/webserver/projects.py @@ -1,14 +1,14 @@ import logging from typing import cast -from models_library.api_schemas_webserver import WEBSERVER_RPC_NAMESPACE from models_library.products import ProductName from models_library.projects import ProjectID -from models_library.rabbitmq_basic_types import RPCMethodName +from models_library.rabbitmq_basic_types import RPCMethodName, RPCNamespace from models_library.rest_pagination import PageOffsetInt from models_library.rpc.webserver.projects import ( ListProjectsMarkedAsJobRpcFilters, PageRpcProjectJobRpcGet, + ProjectJobRpcGet, ) from models_library.rpc_pagination import ( DEFAULT_NUMBER_OF_ITEMS_PER_PAGE, @@ -16,8 +16,9 @@ ) from models_library.users import UserID from pydantic import TypeAdapter, validate_call -from servicelib.logging_utils import log_decorator -from servicelib.rabbitmq import RabbitMQRPCClient + +from ....logging_utils import log_decorator +from ....rabbitmq import RabbitMQRPCClient _logger = logging.getLogger(__name__) @@ -26,20 +27,23 @@ @validate_call(config={"arbitrary_types_allowed": True}) async def mark_project_as_job( rpc_client: RabbitMQRPCClient, + rpc_namespace: RPCNamespace, *, product_name: ProductName, user_id: UserID, project_uuid: ProjectID, job_parent_resource_name: str, + storage_assets_deleted: bool, ) -> None: result = await rpc_client.request( - WEBSERVER_RPC_NAMESPACE, + rpc_namespace, TypeAdapter(RPCMethodName).validate_python("mark_project_as_job"), product_name=product_name, user_id=user_id, project_uuid=project_uuid, job_parent_resource_name=job_parent_resource_name, + storage_assets_deleted=storage_assets_deleted, ) assert result is None @@ -48,6 +52,7 @@ async def mark_project_as_job( @validate_call(config={"arbitrary_types_allowed": True}) async def list_projects_marked_as_jobs( rpc_client: RabbitMQRPCClient, + rpc_namespace: RPCNamespace, *, product_name: ProductName, user_id: UserID, @@ -57,7 +62,7 @@ async def list_projects_marked_as_jobs( filters: ListProjectsMarkedAsJobRpcFilters | None = None, ) -> PageRpcProjectJobRpcGet: result = await rpc_client.request( - WEBSERVER_RPC_NAMESPACE, + rpc_namespace, TypeAdapter(RPCMethodName).validate_python("list_projects_marked_as_jobs"), product_name=product_name, user_id=user_id, @@ -67,3 +72,25 @@ async def list_projects_marked_as_jobs( ) assert TypeAdapter(PageRpcProjectJobRpcGet).validate_python(result) # nosec return cast(PageRpcProjectJobRpcGet, result) + + +@log_decorator(_logger, level=logging.DEBUG) +@validate_call(config={"arbitrary_types_allowed": True}) +async def get_project_marked_as_job( + rpc_client: RabbitMQRPCClient, + *, + product_name: ProductName, + user_id: UserID, + project_uuid: ProjectID, + job_parent_resource_name: str, +) -> ProjectJobRpcGet: + result = await rpc_client.request( + WEBSERVER_RPC_NAMESPACE, + TypeAdapter(RPCMethodName).validate_python("get_project_marked_as_job"), + product_name=product_name, + user_id=user_id, + project_uuid=project_uuid, + job_parent_resource_name=job_parent_resource_name, + ) + assert TypeAdapter(ProjectJobRpcGet).validate_python(result) # nosec + return cast(ProjectJobRpcGet, result) diff --git a/packages/service-library/src/servicelib/redis/__init__.py b/packages/service-library/src/servicelib/redis/__init__.py index 9e63a9f6525c..08d1ff40c47d 100644 --- a/packages/service-library/src/servicelib/redis/__init__.py +++ b/packages/service-library/src/servicelib/redis/__init__.py @@ -6,28 +6,40 @@ CouldNotConnectToRedisError, LockLostError, ProjectLockError, + SemaphoreAcquisitionError, + SemaphoreNotAcquiredError, ) from ._models import RedisManagerDBConfig +from ._project_document_version import ( + PROJECT_DB_UPDATE_REDIS_LOCK_KEY, + PROJECT_DOCUMENT_VERSION_KEY, + increment_and_return_project_document_version, +) from ._project_lock import ( get_project_locked_state, is_project_locked, with_project_locked, ) +from ._semaphore_decorator import with_limited_concurrency from ._utils import handle_redis_returns_union_types __all__: tuple[str, ...] = ( + "PROJECT_DB_UPDATE_REDIS_LOCK_KEY", + "PROJECT_DOCUMENT_VERSION_KEY", "CouldNotAcquireLockError", "CouldNotConnectToRedisError", - "exclusive", - "get_project_locked_state", - "handle_redis_returns_union_types", - "is_project_locked", "LockLostError", "ProjectLockError", "RedisClientSDK", "RedisClientsManager", "RedisManagerDBConfig", + "SemaphoreAcquisitionError", + "SemaphoreNotAcquiredError", + "exclusive", + "get_project_locked_state", + "handle_redis_returns_union_types", + "increment_and_return_project_document_version", + "is_project_locked", + "with_limited_concurrency", "with_project_locked", ) - -# nopycln: file diff --git a/packages/service-library/src/servicelib/redis/_client.py b/packages/service-library/src/servicelib/redis/_client.py index c2a081541104..ee4e9a2040e0 100644 --- a/packages/service-library/src/servicelib/redis/_client.py +++ b/packages/service-library/src/servicelib/redis/_client.py @@ -8,24 +8,34 @@ import redis.asyncio as aioredis import redis.exceptions +import tenacity +from common_library.async_tools import cancel_wait_task from redis.asyncio.lock import Lock from redis.asyncio.retry import Retry from redis.backoff import ExponentialBackoff -from ..async_utils import cancel_wait_task from ..background_task import periodic from ..logging_utils import log_catch, log_context from ._constants import ( DEFAULT_DECODE_RESPONSES, DEFAULT_HEALTH_CHECK_INTERVAL, DEFAULT_LOCK_TTL, - DEFAULT_SOCKET_TIMEOUT, ) _logger = logging.getLogger(__name__) -# SEE https://github.com/ITISFoundation/osparc-simcore/pull/7077 -_HEALTHCHECK_TASK_TIMEOUT_S: Final[float] = 3.0 +_HEALTHCHECK_TIMEOUT_S: Final[float] = 3.0 + + +@tenacity.retry( + wait=tenacity.wait_fixed(2), + stop=tenacity.stop_after_delay(20), + before_sleep=tenacity.before_sleep_log(_logger, logging.INFO), + reraise=True, +) +async def wait_till_redis_is_responsive(client: aioredis.Redis) -> None: + if not await client.ping(): + raise tenacity.TryAgain @dataclass @@ -36,8 +46,9 @@ class RedisClientSDK: health_check_interval: datetime.timedelta = DEFAULT_HEALTH_CHECK_INTERVAL _client: aioredis.Redis = field(init=False) - _health_check_task: Task | None = None - _health_check_task_started_event: asyncio.Event | None = None + _task_health_check: Task | None = None + _started_event_task_health_check: asyncio.Event | None = None + _cancelled_event_task_health_check: asyncio.Event | None = None _is_healthy: bool = False @property @@ -54,26 +65,32 @@ def __post_init__(self) -> None: redis.exceptions.ConnectionError, ], retry_on_timeout=True, - socket_timeout=DEFAULT_SOCKET_TIMEOUT.total_seconds(), + socket_timeout=None, # NOTE: setting a timeout here can lead to issues with long running commands encoding="utf-8", decode_responses=self.decode_responses, client_name=self.client_name, ) - # NOTE: connection is done here already self._is_healthy = False - self._health_check_task_started_event = asyncio.Event() + self._started_event_task_health_check = asyncio.Event() + self._cancelled_event_task_health_check = asyncio.Event() + async def setup(self) -> None: @periodic(interval=self.health_check_interval) async def _periodic_check_health() -> None: - assert self._health_check_task_started_event # nosec - self._health_check_task_started_event.set() + assert self._started_event_task_health_check # nosec + assert self._cancelled_event_task_health_check # nosec + self._started_event_task_health_check.set() self._is_healthy = await self.ping() + if self._cancelled_event_task_health_check.is_set(): + raise asyncio.CancelledError - self._health_check_task = asyncio.create_task( + self._task_health_check = asyncio.create_task( _periodic_check_health(), name=f"redis_service_health_check_{self.redis_dsn}__{uuid4()}", ) + await wait_till_redis_is_responsive(self._client) + _logger.info( "Connection to %s succeeded with %s", f"redis at {self.redis_dsn=}", @@ -84,21 +101,21 @@ async def shutdown(self) -> None: with log_context( _logger, level=logging.DEBUG, msg=f"Shutdown RedisClientSDK {self}" ): - if self._health_check_task: - assert self._health_check_task_started_event # nosec - # NOTE: wait for the health check task to have started once before we can cancel it - await self._health_check_task_started_event.wait() - await cancel_wait_task( - self._health_check_task, max_delay=_HEALTHCHECK_TASK_TIMEOUT_S - ) + if self._task_health_check: + assert self._started_event_task_health_check # nosec + await self._started_event_task_health_check.wait() + assert self._cancelled_event_task_health_check # nosec + self._cancelled_event_task_health_check.set() + await cancel_wait_task(self._task_health_check, max_delay=None) await self._client.aclose(close_connection_pool=True) async def ping(self) -> bool: with log_catch(_logger, reraise=False): # NOTE: retry_* input parameters from aioredis.from_url do not apply for the ping call - await self._client.ping() + await asyncio.wait_for(self._client.ping(), timeout=_HEALTHCHECK_TIMEOUT_S) return True + return False @property diff --git a/packages/service-library/src/servicelib/redis/_clients_manager.py b/packages/service-library/src/servicelib/redis/_clients_manager.py index 60b93360b88d..758977f8526b 100644 --- a/packages/service-library/src/servicelib/redis/_clients_manager.py +++ b/packages/service-library/src/servicelib/redis/_clients_manager.py @@ -27,6 +27,7 @@ async def setup(self) -> None: health_check_interval=config.health_check_interval, client_name=f"{self.client_name}", ) + await self._client_sdks[config.database].setup() async def shutdown(self) -> None: await asyncio.gather( diff --git a/packages/service-library/src/servicelib/redis/_constants.py b/packages/service-library/src/servicelib/redis/_constants.py index 6a10c6b75b0e..845e70d7fa8b 100644 --- a/packages/service-library/src/servicelib/redis/_constants.py +++ b/packages/service-library/src/servicelib/redis/_constants.py @@ -3,9 +3,16 @@ from pydantic import NonNegativeInt +DEFAULT_EXPECTED_LOCK_OVERALL_TIME: Final[datetime.timedelta] = datetime.timedelta( + seconds=30 +) DEFAULT_LOCK_TTL: Final[datetime.timedelta] = datetime.timedelta(seconds=10) -DEFAULT_SOCKET_TIMEOUT: Final[datetime.timedelta] = datetime.timedelta(seconds=30) +DEFAULT_SEMAPHORE_BLOCK_TIMEOUT: Final[datetime.timedelta] = datetime.timedelta( + seconds=30 +) +DEFAULT_SEMAPHORE_TTL: Final[datetime.timedelta] = datetime.timedelta(seconds=10) +SEMAPHORE_KEY_PREFIX: Final[str] = "semaphores:" DEFAULT_DECODE_RESPONSES: Final[bool] = True DEFAULT_HEALTH_CHECK_INTERVAL: Final[datetime.timedelta] = datetime.timedelta(seconds=5) diff --git a/packages/service-library/src/servicelib/redis/_decorators.py b/packages/service-library/src/servicelib/redis/_decorators.py index 6d686a33af59..63b1019ba656 100644 --- a/packages/service-library/src/servicelib/redis/_decorators.py +++ b/packages/service-library/src/servicelib/redis/_decorators.py @@ -1,5 +1,4 @@ import asyncio -import contextlib import functools import logging import socket @@ -9,11 +8,13 @@ import arrow import redis.exceptions +from common_library.async_tools import cancel_wait_task +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from redis.asyncio.lock import Lock from ..background_task import periodic from ._client import RedisClientSDK -from ._constants import DEFAULT_LOCK_TTL +from ._constants import DEFAULT_EXPECTED_LOCK_OVERALL_TIME, DEFAULT_LOCK_TTL from ._errors import CouldNotAcquireLockError, LockLostError from ._utils import auto_extend_lock @@ -23,9 +24,9 @@ R = TypeVar("R") _EXCLUSIVE_TASK_NAME: Final[str] = "exclusive/{module_name}.{func_name}" -_EXCLUSIVE_AUTO_EXTEND_TASK_NAME: Final[ - str -] = "exclusive/autoextend_lock_{redis_lock_key}" +_EXCLUSIVE_AUTO_EXTEND_TASK_NAME: Final[str] = ( + "exclusive/autoextend_lock_{redis_lock_key}" +) @periodic(interval=DEFAULT_LOCK_TTL / 2, raise_on_error=True) @@ -94,6 +95,7 @@ async def _wrapper(*args: P.args, **kwargs: P.kwargs) -> R: ): raise CouldNotAcquireLockError(lock=lock) + lock_acquisition_time = arrow.utcnow() try: async with asyncio.TaskGroup() as tg: started_event = asyncio.Event() @@ -116,10 +118,12 @@ async def _wrapper(*args: P.args, **kwargs: P.kwargs) -> R: module_name=coro.__module__, func_name=coro.__name__ ), ) - res = await work_task - auto_extend_lock_task.cancel() - return res + # cancel the auto-extend task (work is done) + # NOTE: if we do not explicitely await the task inside the context manager + # it sometimes hangs forever (Python issue?) + await cancel_wait_task(auto_extend_lock_task, max_delay=None) + return res except BaseExceptionGroup as eg: # Separate exceptions into LockLostError and others @@ -134,10 +138,39 @@ async def _wrapper(*args: P.args, **kwargs: P.kwargs) -> R: assert len(lock_lost_errors.exceptions) == 1 # nosec raise lock_lost_errors.exceptions[0] from eg finally: - with contextlib.suppress(redis.exceptions.LockNotOwnedError): + try: # in the case where the lock would have been lost, # this would raise again and is not necessary await lock.release() + except redis.exceptions.LockNotOwnedError as exc: + _logger.exception( + **create_troubleshooting_log_kwargs( + f"Unexpected error while releasing lock '{redis_lock_key}'", + error=exc, + error_context={ + "redis_lock_key": redis_lock_key, + "lock_value": lock_value, + "client_name": client.client_name, + "hostname": socket.gethostname(), + "coroutine": coro.__name__, + }, + tip="This might happen if the lock was lost before releasing it. " + "Look for synchronous code that prevents refreshing the lock or asyncio loop overload.", + ) + ) + finally: + lock_release_time = arrow.utcnow() + locking_time = lock_release_time - lock_acquisition_time + if locking_time > DEFAULT_EXPECTED_LOCK_OVERALL_TIME: + _logger.warning( + "Lock `%s' for %s was held for %s which is longer than the expected (%s). " + "TIP: consider reducing the locking time by optimizing the code inside " + "the critical section or increasing the default locking time", + redis_lock_key, + coro.__name__, + locking_time, + DEFAULT_EXPECTED_LOCK_OVERALL_TIME, + ) return _wrapper diff --git a/packages/service-library/src/servicelib/redis/_errors.py b/packages/service-library/src/servicelib/redis/_errors.py index 7fc3c7823ae0..2d09a3730869 100644 --- a/packages/service-library/src/servicelib/redis/_errors.py +++ b/packages/service-library/src/servicelib/redis/_errors.py @@ -4,8 +4,7 @@ from common_library.errors_classes import OsparcErrorMixin -class BaseRedisError(OsparcErrorMixin, RuntimeError): - ... +class BaseRedisError(OsparcErrorMixin, RuntimeError): ... class CouldNotAcquireLockError(BaseRedisError): @@ -25,3 +24,25 @@ class LockLostError(BaseRedisError): ProjectLockError: TypeAlias = redis.exceptions.LockError # NOTE: backwards compatible + + +class SemaphoreError(BaseRedisError): + msg_template: str = ( + "Unexpected error with semaphore '{name}' by this instance `{instance_id}`" + ) + + +class SemaphoreAcquisitionError(SemaphoreError): + msg_template: str = ( + "Could not acquire semaphore '{name}' by this instance `{instance_id}`" + ) + + +class SemaphoreNotAcquiredError(SemaphoreError): + msg_template: str = ( + "Semaphore '{name}' was not acquired by this instance `{instance_id}`" + ) + + +class SemaphoreLostError(SemaphoreError): + msg_template: str = "Semaphore '{name}' was lost by this instance `{instance_id}`" diff --git a/packages/service-library/src/servicelib/redis/_project_document_version.py b/packages/service-library/src/servicelib/redis/_project_document_version.py new file mode 100644 index 000000000000..7193adb8ca79 --- /dev/null +++ b/packages/service-library/src/servicelib/redis/_project_document_version.py @@ -0,0 +1,39 @@ +"""Project document versioning utilities. + +This module provides utilities for managing project document versions using Redis. +The versioning system ensures that all users working on a project are synchronized +with the latest changes through atomic version incrementing. +""" + +from typing import Final + +from models_library.projects import ProjectID + +from ._client import RedisClientSDK + +# Redis key patterns +PROJECT_DOCUMENT_VERSION_KEY: Final[str] = "projects:{}:version" +PROJECT_DB_UPDATE_REDIS_LOCK_KEY: Final[str] = "project_db_update:{}" + + +async def increment_and_return_project_document_version( + redis_client: RedisClientSDK, project_uuid: ProjectID +) -> int: + """ + Atomically increments and returns the project document version using Redis. + Returns the incremented version number. + + This function ensures thread-safe version incrementing by using Redis INCR command + which is atomic. The version starts at 1 for the first call. + + Args: + redis_client: The Redis client SDK instance + project_uuid: The project UUID to get/increment version for + + Returns: + The new (incremented) version number + """ + version_key = PROJECT_DOCUMENT_VERSION_KEY.format(project_uuid) + # If key doesn't exist, it's created with value 0 and then incremented to 1 + output = await redis_client.redis.incr(version_key) + return int(output) diff --git a/packages/service-library/src/servicelib/redis/_project_lock.py b/packages/service-library/src/servicelib/redis/_project_lock.py index d618d88c58f0..3ea3cc2bcf19 100644 --- a/packages/service-library/src/servicelib/redis/_project_lock.py +++ b/packages/service-library/src/servicelib/redis/_project_lock.py @@ -6,8 +6,8 @@ from models_library.projects import ProjectID from models_library.projects_access import Owner from models_library.projects_state import ProjectLocked, ProjectStatus -from servicelib.logging_utils import log_catch +from ..logging_utils import log_catch from ._client import RedisClientSDK from ._decorators import exclusive from ._errors import CouldNotAcquireLockError, ProjectLockError diff --git a/packages/service-library/src/servicelib/redis/_semaphore.py b/packages/service-library/src/servicelib/redis/_semaphore.py new file mode 100644 index 000000000000..a5ec957275f9 --- /dev/null +++ b/packages/service-library/src/servicelib/redis/_semaphore.py @@ -0,0 +1,549 @@ +import asyncio +import contextlib +import datetime +import logging +import socket +import uuid +from collections.abc import AsyncIterator +from typing import Annotated, ClassVar + +import arrow +import redis.exceptions +from common_library.async_tools import cancel_wait_task +from common_library.basic_types import DEFAULT_FACTORY +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs +from pydantic import ( + BaseModel, + Field, + PositiveInt, + computed_field, + field_validator, +) +from redis.commands.core import AsyncScript +from tenacity import ( + retry, + retry_if_exception_type, + wait_random_exponential, +) + +from ..background_task import periodic +from ._client import RedisClientSDK +from ._constants import ( + DEFAULT_EXPECTED_LOCK_OVERALL_TIME, + DEFAULT_SEMAPHORE_TTL, + SEMAPHORE_KEY_PREFIX, +) +from ._errors import ( + SemaphoreAcquisitionError, + SemaphoreError, + SemaphoreLostError, + SemaphoreNotAcquiredError, +) +from ._semaphore_lua import ( + ACQUIRE_SEMAPHORE_SCRIPT, + REGISTER_SEMAPHORE_TOKEN_SCRIPT, + RELEASE_SEMAPHORE_SCRIPT, + RENEW_SEMAPHORE_SCRIPT, + SCRIPT_BAD_EXIT_CODE, + SCRIPT_OK_EXIT_CODE, +) +from ._utils import handle_redis_returns_union_types + +_logger = logging.getLogger(__name__) + + +class DistributedSemaphore(BaseModel): + """ + Warning: This should only be used directly via the decorator + + A distributed semaphore implementation using Redis. + + This semaphore allows limiting the number of concurrent operations across + multiple processes/instances using Redis as the coordination backend. + + Args: + redis_client: Redis client for coordination + key: Unique identifier for the semaphore + capacity: Maximum number of concurrent holders + ttl: Time-to-live for semaphore entries (auto-cleanup) + blocking: Whether acquire() should block until available + blocking_timeout: Maximum time to wait when blocking (None = no timeout) + + Example: + async with DistributedSemaphore( + redis_client, "my_resource", capacity=3 + ): + # Only 3 instances can execute this block concurrently + await do_limited_work() + """ + + model_config = { + "arbitrary_types_allowed": True, # For RedisClientSDK + } + + # Configuration fields with validation + redis_client: RedisClientSDK + key: Annotated[ + str, Field(min_length=1, description="Unique identifier for the semaphore") + ] + capacity: Annotated[ + PositiveInt, Field(description="Maximum number of concurrent holders") + ] + ttl: datetime.timedelta = DEFAULT_SEMAPHORE_TTL + blocking: Annotated[ + bool, Field(description="Whether acquire() should block until available") + ] = True + blocking_timeout: Annotated[ + datetime.timedelta | None, + Field(description="Maximum time to wait when blocking"), + ] = None + instance_id: Annotated[ + str, + Field( + description="Unique instance identifier", + default_factory=lambda: f"{uuid.uuid4()}", + ), + ] = DEFAULT_FACTORY + + # Class and/or Private state attributes (not part of the model) + register_semaphore: ClassVar[AsyncScript | None] = None + acquire_script: ClassVar[AsyncScript | None] = None + release_script: ClassVar[AsyncScript | None] = None + renew_script: ClassVar[AsyncScript | None] = None + + _token: str | None = None # currently held token, if any + + @classmethod + def _register_scripts(cls, redis_client: RedisClientSDK) -> None: + """Register Lua scripts with Redis if not already done. + This is done once per class, not per instance. Internally the Redis client + caches the script SHA, so this is efficient. Even if called multiple times, + the script is only registered once.""" + if cls.acquire_script is None: + cls.register_semaphore = redis_client.redis.register_script( + REGISTER_SEMAPHORE_TOKEN_SCRIPT + ) + cls.acquire_script = redis_client.redis.register_script( + ACQUIRE_SEMAPHORE_SCRIPT + ) + cls.release_script = redis_client.redis.register_script( + RELEASE_SEMAPHORE_SCRIPT + ) + cls.renew_script = redis_client.redis.register_script( + RENEW_SEMAPHORE_SCRIPT + ) + + def __init__(self, **data) -> None: + super().__init__(**data) + self.__class__._register_scripts(self.redis_client) # noqa: SLF001 + + @computed_field # type: ignore[prop-decorator] + @property + def semaphore_key(self) -> str: + """Redis key for the semaphore sorted set.""" + return f"{SEMAPHORE_KEY_PREFIX}{self.key}_cap{self.capacity}" + + @computed_field # type: ignore[prop-decorator] + @property + def tokens_key(self) -> str: + """Redis key for the token pool LIST.""" + return f"{self.semaphore_key}:tokens" + + @computed_field # type: ignore[prop-decorator] + @property + def holders_set(self) -> str: + """Redis key for the holders SET.""" + return f"{self.semaphore_key}:holders_set" + + @computed_field # type: ignore[prop-decorator] + @property + def holder_key(self) -> str: + """Redis key for this instance's holder entry.""" + return f"{self.semaphore_key}:holders:{self.instance_id}" + + @computed_field # type: ignore[prop-decorator] + @property + def holders_set_ttl(self) -> datetime.timedelta: + """TTL for the holders SET""" + return self.ttl * 5 + + @computed_field # type: ignore[prop-decorator] + @property + def tokens_set_ttl(self) -> datetime.timedelta: + """TTL for the tokens SET""" + return self.ttl * 5 + + @field_validator("ttl") + @classmethod + def validate_ttl(cls, v: datetime.timedelta) -> datetime.timedelta: + if v.total_seconds() < 1: + msg = "TTL must be positive" + raise ValueError(msg) + return v + + @field_validator("blocking_timeout") + @classmethod + def validate_timeout( + cls, v: datetime.timedelta | None + ) -> datetime.timedelta | None: + if v is not None and v.total_seconds() <= 0: + msg = "Timeout must be positive" + raise ValueError(msg) + return v + + async def _ensure_semaphore_initialized(self) -> None: + """Initializes the semaphore in Redis if not already done.""" + assert self.register_semaphore is not None # nosec + result = await self.register_semaphore( # pylint: disable=not-callable + keys=[self.tokens_key, self.holders_set], + args=[self.capacity, self.holders_set_ttl.total_seconds()], + client=self.redis_client.redis, + ) + assert isinstance(result, list) # nosec + exit_code, status = result + assert exit_code == SCRIPT_OK_EXIT_CODE # nosec + _logger.debug("Semaphore '%s' init status: %s", self.key, status) + + async def _blocking_acquire(self) -> str | None: + @retry( + wait=wait_random_exponential(min=0.1, max=0.5), + retry=retry_if_exception_type(redis.exceptions.TimeoutError), + ) + async def _acquire_forever_on_socket_timeout() -> list[str] | None: + # NOTE: brpop returns None on timeout + + tokens_key_token: list[str] | None = await handle_redis_returns_union_types( + self.redis_client.redis.brpop( + [self.tokens_key], + timeout=None, # NOTE: we always block forever since tenacity takes care of timing out + ) + ) + return tokens_key_token + + try: + # NOTE: redis-py library timeouts when the defined socket timeout triggers + # The BRPOP command itself could timeout but the redis-py socket timeout defeats the purpose + # so we always block forever on BRPOP, tenacity takes care of retrying when a socket timeout happens + # and we use asyncio.timeout to enforce the blocking_timeout if defined + async with asyncio.timeout( + self.blocking_timeout.total_seconds() if self.blocking_timeout else None + ): + tokens_key_token = await _acquire_forever_on_socket_timeout() + assert tokens_key_token is not None # nosec + assert len(tokens_key_token) == 2 # nosec # noqa: PLR2004 + assert tokens_key_token[0] == self.tokens_key # nosec + return tokens_key_token[1] + except TimeoutError as e: + raise SemaphoreAcquisitionError( + name=self.key, instance_id=self.instance_id + ) from e + + async def _non_blocking_acquire(self) -> str | None: + token: str | list[str] | None = await handle_redis_returns_union_types( + self.redis_client.redis.rpop(self.tokens_key) + ) + if token is None: + _logger.debug( + "Semaphore '%s' not acquired (no tokens available) (instance: %s)", + self.key, + self.instance_id, + ) + return None + + assert isinstance(token, str) # nosec + return token + + async def acquire(self) -> bool: + """ + Acquire the semaphore. + + Returns: + True if acquired successfully, False if not acquired and non-blocking + + Raises: + SemaphoreAcquisitionError: If acquisition fails and blocking=True + """ + await self._ensure_semaphore_initialized() + + if await self.is_acquired(): + _logger.debug( + "Semaphore '%s' already acquired by this instance (instance: %s)", + self.key, + self.instance_id, + ) + return True + + if self.blocking is False: + self._token = await self._non_blocking_acquire() + if not self._token: + return False + else: + self._token = await self._blocking_acquire() + + assert self._token is not None # nosec + # set up the semaphore holder with a TTL + assert self.acquire_script is not None # nosec + result = await self.acquire_script( # pylint: disable=not-callable + keys=[self.holders_set, self.holder_key], + args=[ + self._token, + self.instance_id, + self.ttl.total_seconds(), + self.holders_set_ttl.total_seconds(), + ], + client=self.redis_client.redis, + ) + + # Lua script returns: [exit_code, status, current_count, expired_count] + assert isinstance(result, list) # nosec + exit_code, status, token, current_count = result + + assert exit_code == SCRIPT_OK_EXIT_CODE # nosec + assert status == "acquired" # nosec + + _logger.debug( + "Acquired semaphore '%s' with token %s (instance: %s, count: %s)", + self.key, + token, + self.instance_id, + current_count, + ) + return True + + async def release(self) -> None: + """ + Release the semaphore + + Raises: + SemaphoreNotAcquiredError: If semaphore was not acquired by this instance + """ + + # Execute the release Lua script atomically + assert self.release_script is not None # nosec + release_args = [self.instance_id] + if self._token is not None: + release_args.append(self._token) + result = await self.release_script( # pylint: disable=not-callable + keys=[self.tokens_key, self.holders_set, self.holder_key], + args=release_args, + client=self.redis_client.redis, + ) + self._token = None + + assert isinstance(result, list) # nosec + exit_code, status, current_count = result + if exit_code == SCRIPT_OK_EXIT_CODE: + assert status == "released" # nosec + _logger.debug( + "Released semaphore '%s' (instance: %s, count: %s)", + self.key, + self.instance_id, + current_count, + ) + return + + # Instance was already expired or not acquired + assert exit_code == SCRIPT_BAD_EXIT_CODE # nosec + _logger.error( + "Failed to release semaphore '%s' - %s (instance: %s, count: %s)", + self.key, + status, + self.instance_id, + current_count, + ) + if status == "not_held": + raise SemaphoreNotAcquiredError(name=self.key, instance_id=self.instance_id) + assert status == "expired" # nosec + raise SemaphoreLostError(name=self.key, instance_id=self.instance_id) + + async def reacquire(self) -> None: + """ + Re-acquire a semaphore + This function is intended to be called by decorators or external renewal mechanisms. + + + Raises: + SemaphoreLostError: If the semaphore was lost or expired + """ + + ttl_seconds = self.ttl.total_seconds() + + # Execute the renewal Lua script atomically + assert self.renew_script is not None # nosec + result = await self.renew_script( # pylint: disable=not-callable + keys=[self.holders_set, self.holder_key, self.tokens_key], + args=[ + self.instance_id, + ttl_seconds, + self.holders_set_ttl.total_seconds(), + self.tokens_set_ttl.total_seconds(), + ], + client=self.redis_client.redis, + ) + + assert isinstance(result, list) # nosec + exit_code, status, current_count = result + + if exit_code == SCRIPT_OK_EXIT_CODE: + assert status == "renewed" # nosec + _logger.debug( + "Renewed semaphore '%s' (instance: %s, count: %s)", + self.key, + self.instance_id, + current_count, + ) + return + assert exit_code == SCRIPT_BAD_EXIT_CODE # nosec + + _logger.warning( + "Semaphore '%s' holder key was lost (instance: %s, status: %s, count: %s)", + self.key, + self.instance_id, + status, + current_count, + ) + if status == "not_held": + raise SemaphoreNotAcquiredError(name=self.key, instance_id=self.instance_id) + assert status == "expired" # nosec + raise SemaphoreLostError(name=self.key, instance_id=self.instance_id) + + async def is_acquired(self) -> bool: + """Check if the semaphore is currently acquired by this instance.""" + return bool( + await handle_redis_returns_union_types( + self.redis_client.redis.exists(self.holder_key) + ) + == 1 + ) + + async def current_count(self) -> int: + """Get the current number of semaphore holders""" + return await handle_redis_returns_union_types( + self.redis_client.redis.scard(self.holders_set) + ) + + async def available_tokens(self) -> int: + """Get the size of the semaphore (number of available tokens)""" + await self._ensure_semaphore_initialized() + return await handle_redis_returns_union_types( + self.redis_client.redis.llen(self.tokens_key) + ) + + +@contextlib.asynccontextmanager +async def distributed_semaphore( # noqa: C901 + redis_client: RedisClientSDK, + *, + key: str, + capacity: PositiveInt, + ttl: datetime.timedelta = DEFAULT_SEMAPHORE_TTL, + blocking: bool = True, + blocking_timeout: datetime.timedelta | None = None, + expected_lock_overall_time: datetime.timedelta = DEFAULT_EXPECTED_LOCK_OVERALL_TIME, +) -> AsyncIterator[DistributedSemaphore]: + """ + Async context manager for DistributedSemaphore. + + Example: + async with distributed_semaphore(redis_client, "my_resource", capacity=3) as sem: + # Only 3 instances can execute this block concurrently + await do_limited_work() + """ + semaphore = DistributedSemaphore( + redis_client=redis_client, + key=key, + capacity=capacity, + ttl=ttl, + blocking=blocking, + blocking_timeout=blocking_timeout, + ) + + @periodic(interval=semaphore.ttl / 3, raise_on_error=True) + async def _periodic_reacquisition( + semaphore: DistributedSemaphore, + started: asyncio.Event, + cancellation_event: asyncio.Event, + ) -> None: + if cancellation_event.is_set(): + raise asyncio.CancelledError + if not started.is_set(): + started.set() + await semaphore.reacquire() + + lock_acquisition_time = None + try: + if not await semaphore.acquire(): + raise SemaphoreAcquisitionError(name=key, instance_id=semaphore.instance_id) + + lock_acquisition_time = arrow.utcnow() + + async with ( + asyncio.TaskGroup() as tg + ): # NOTE: using task group ensures proper cancellation propagation of parent task + auto_reacquisition_started = asyncio.Event() + cancellation_event = asyncio.Event() + auto_reacquisition_task = tg.create_task( + _periodic_reacquisition( + semaphore, auto_reacquisition_started, cancellation_event + ), + name=f"semaphore/auto_reacquisition_task_{semaphore.key}_{semaphore.instance_id}", + ) + await auto_reacquisition_started.wait() + try: + # NOTE: this try/finally ensures that cancellation_event is set when we exit the context + # even in case of exceptions + yield semaphore + finally: + cancellation_event.set() # NOTE: this ensure cancellation is effective + await cancel_wait_task(auto_reacquisition_task) + except BaseExceptionGroup as eg: + semaphore_errors, other_errors = eg.split(SemaphoreError) + if other_errors: + assert len(other_errors.exceptions) == 1 # nosec + raise other_errors.exceptions[0] from eg + assert semaphore_errors is not None # nosec + assert len(semaphore_errors.exceptions) == 1 # nosec + raise semaphore_errors.exceptions[0] from eg + finally: + try: + await semaphore.release() + except SemaphoreNotAcquiredError as exc: + _logger.exception( + **create_troubleshooting_log_kwargs( + f"Unexpected error while releasing semaphore '{semaphore.key}'", + error=exc, + error_context={ + "semaphore_key": semaphore.key, + "semaphore_instance_id": semaphore.instance_id, + "hostname": socket.gethostname(), + }, + tip="This indicates a logic error in the code using the semaphore", + ) + ) + except SemaphoreLostError as exc: + _logger.exception( + **create_troubleshooting_log_kwargs( + f"Unexpected error while releasing semaphore '{semaphore.key}'", + error=exc, + error_context={ + "semaphore_key": semaphore.key, + "semaphore_instance_id": semaphore.instance_id, + "hostname": socket.gethostname(), + }, + tip="This indicates that the semaphore was lost or expired before release. " + "Look for synchronouse code or the loop is very busy and cannot schedule the reacquisition task.", + ) + ) + if lock_acquisition_time is not None: + lock_release_time = arrow.utcnow() + locking_time = lock_release_time - lock_acquisition_time + if locking_time > expected_lock_overall_time: + _logger.warning( + "Semaphore '%s' was held for %s by %s which is longer than expected (%s). " + "TIP: consider reducing the locking time by optimizing the code inside " + "the critical section or increasing the default locking time", + semaphore.key, + locking_time, + semaphore.instance_id, + expected_lock_overall_time, + ) diff --git a/packages/service-library/src/servicelib/redis/_semaphore_decorator.py b/packages/service-library/src/servicelib/redis/_semaphore_decorator.py new file mode 100644 index 000000000000..72e7fd9d309e --- /dev/null +++ b/packages/service-library/src/servicelib/redis/_semaphore_decorator.py @@ -0,0 +1,184 @@ +import datetime +import functools +import logging +from collections.abc import AsyncIterator, Callable, Coroutine +from contextlib import AbstractAsyncContextManager, asynccontextmanager +from typing import Any, ParamSpec, TypeVar + +from ._client import RedisClientSDK +from ._constants import ( + DEFAULT_EXPECTED_LOCK_OVERALL_TIME, + DEFAULT_SEMAPHORE_TTL, +) +from ._semaphore import distributed_semaphore + +_logger = logging.getLogger(__name__) + + +P = ParamSpec("P") +R = TypeVar("R") + + +def with_limited_concurrency( + redis_client: RedisClientSDK | Callable[..., RedisClientSDK], + *, + key: str | Callable[..., str], + capacity: int | Callable[..., int], + ttl: datetime.timedelta = DEFAULT_SEMAPHORE_TTL, + blocking: bool = True, + blocking_timeout: datetime.timedelta | None = None, + expected_lock_overall_time: datetime.timedelta = DEFAULT_EXPECTED_LOCK_OVERALL_TIME, +) -> Callable[ + [Callable[P, Coroutine[Any, Any, R]]], Callable[P, Coroutine[Any, Any, R]] +]: + """ + Decorator to limit concurrent execution of a function using a distributed semaphore. + + This decorator ensures that only a specified number of instances of the decorated + function can run concurrently across multiple processes/instances using Redis + as the coordination backend. + + Args: + redis_client: Redis client for coordination (can be callable) + key: Unique identifier for the semaphore (can be callable) + capacity: Maximum number of concurrent executions (can be callable) + ttl: Time-to-live for semaphore entries (default: 5 minutes) + blocking: Whether to block when semaphore is full (default: True) + blocking_timeout: Maximum time to wait when blocking (default: socket timeout) + expected_lock_overall_time: helper for logging warnings if lock is held longer than expected + + Example: + @with_limited_concurrency( + redis_client, + key=f"{user_id}-{wallet_id}", + capacity=20, + blocking=True, + blocking_timeout=None + ) + async def process_user_wallet(user_id: str, wallet_id: str): + # Only 20 instances of this function can run concurrently + # for the same user_id-wallet_id combination + await do_processing() + + Raises: + SemaphoreAcquisitionError: If semaphore cannot be acquired and blocking=True + """ + + def _decorator( + coro: Callable[P, Coroutine[Any, Any, R]], + ) -> Callable[P, Coroutine[Any, Any, R]]: + @functools.wraps(coro) + async def _wrapper(*args: P.args, **kwargs: P.kwargs) -> R: + semaphore_key = key(*args, **kwargs) if callable(key) else key + semaphore_capacity = ( + capacity(*args, **kwargs) if callable(capacity) else capacity + ) + client = ( + redis_client(*args, **kwargs) + if callable(redis_client) + else redis_client + ) + + assert isinstance(semaphore_key, str) # nosec + assert isinstance(semaphore_capacity, int) # nosec + assert isinstance(client, RedisClientSDK) # nosec + + async with distributed_semaphore( + redis_client=client, + key=semaphore_key, + capacity=semaphore_capacity, + ttl=ttl, + blocking=blocking, + blocking_timeout=blocking_timeout, + expected_lock_overall_time=expected_lock_overall_time, + ): + return await coro(*args, **kwargs) + + return _wrapper + + return _decorator + + +def with_limited_concurrency_cm( + redis_client: RedisClientSDK | Callable[..., RedisClientSDK], + *, + key: str | Callable[..., str], + capacity: int | Callable[..., int], + ttl: datetime.timedelta = DEFAULT_SEMAPHORE_TTL, + blocking: bool = True, + blocking_timeout: datetime.timedelta | None = None, + expected_lock_overall_time: datetime.timedelta = DEFAULT_EXPECTED_LOCK_OVERALL_TIME, +) -> Callable[ + [Callable[P, AbstractAsyncContextManager[R]]], + Callable[P, AbstractAsyncContextManager[R]], +]: + """ + Decorator to limit concurrent execution of async context managers using a distributed semaphore. + + This decorator ensures that only a specified number of instances of the decorated + async context manager can be active concurrently across multiple processes/instances + using Redis as the coordination backend. + + Args: + redis_client: Redis client for coordination (can be callable) + key: Unique identifier for the semaphore (can be callable) + capacity: Maximum number of concurrent executions (can be callable) + ttl: Time-to-live for semaphore entries (default: 5 minutes) + blocking: Whether to block when semaphore is full (default: True) + blocking_timeout: Maximum time to wait when blocking (default: socket timeout) + expected_lock_overall_time: helper for logging warnings if lock is held longer than expected + + Example: + @asynccontextmanager + @with_limited_concurrency_cm( + redis_client, + key="cluster:my-cluster", + capacity=5, + blocking=True, + blocking_timeout=None + ) + async def get_cluster_client(): + async with pool.acquire() as client: + yield client + + Raises: + SemaphoreAcquisitionError: If semaphore cannot be acquired and blocking=True + """ + + def _decorator( + cm_func: Callable[P, AbstractAsyncContextManager[R]], + ) -> Callable[P, AbstractAsyncContextManager[R]]: + @functools.wraps(cm_func) + @asynccontextmanager + async def _wrapper(*args: P.args, **kwargs: P.kwargs) -> AsyncIterator[R]: + semaphore_key = key(*args, **kwargs) if callable(key) else key + semaphore_capacity = ( + capacity(*args, **kwargs) if callable(capacity) else capacity + ) + client = ( + redis_client(*args, **kwargs) + if callable(redis_client) + else redis_client + ) + + assert isinstance(semaphore_key, str) # nosec + assert isinstance(semaphore_capacity, int) # nosec + assert isinstance(client, RedisClientSDK) # nosec + + async with ( + distributed_semaphore( + redis_client=client, + key=semaphore_key, + capacity=semaphore_capacity, + ttl=ttl, + blocking=blocking, + blocking_timeout=blocking_timeout, + expected_lock_overall_time=expected_lock_overall_time, + ), + cm_func(*args, **kwargs) as value, + ): + yield value + + return _wrapper + + return _decorator diff --git a/packages/service-library/src/servicelib/redis/_semaphore_lua.py b/packages/service-library/src/servicelib/redis/_semaphore_lua.py new file mode 100644 index 000000000000..71f29fa88817 --- /dev/null +++ b/packages/service-library/src/servicelib/redis/_semaphore_lua.py @@ -0,0 +1,38 @@ +"""used to load a lua script from the package resources in memory + +Example: + >>> from servicelib.redis._semaphore_lua import ACQUIRE_SEMAPHORE_SCRIPT + # This will register the script in redis and return a Script object + # which can be used to execute the script. Even from multiple processes + # the script will be loaded only once in redis as the redis server computes + # the SHA1 of the script and uses it to identify it. + >>> from aioredis import Redis + >>> redis = Redis(...) + >>> my_acquire_script = redis.register_script( + ACQUIRE_SEMAPHORE_SCRIPT + >>> my_acquire_script(keys=[...], args=[...]) +""" + +from functools import lru_cache +from importlib import resources +from typing import Final + + +@lru_cache +def _load_script(script_name: str) -> str: + with resources.as_file( + resources.files("servicelib.redis.lua") / f"{script_name}.lua" + ) as script_file: + return script_file.read_text(encoding="utf-8").strip() + + +# fair semaphore scripts (token pool based) +REGISTER_SEMAPHORE_TOKEN_SCRIPT: Final[str] = _load_script("register_semaphore_tokens") +ACQUIRE_SEMAPHORE_SCRIPT: Final[str] = _load_script("acquire_semaphore") +RELEASE_SEMAPHORE_SCRIPT: Final[str] = _load_script("release_semaphore") +CLEANUP_SEMAPHORE_SCRIPT: Final[str] = _load_script("cleanup_semaphore") +RENEW_SEMAPHORE_SCRIPT: Final[str] = _load_script("renew_semaphore") + + +SCRIPT_OK_EXIT_CODE: Final[int] = 0 +SCRIPT_BAD_EXIT_CODE: Final[int] = 255 diff --git a/packages/service-library/src/servicelib/redis/_utils.py b/packages/service-library/src/servicelib/redis/_utils.py index 52d112ca4fee..cf695afef6aa 100644 --- a/packages/service-library/src/servicelib/redis/_utils.py +++ b/packages/service-library/src/servicelib/redis/_utils.py @@ -1,6 +1,6 @@ import logging from collections.abc import Awaitable -from typing import Any +from typing import ParamSpec, TypeVar import redis.exceptions from redis.asyncio.lock import Lock @@ -28,7 +28,11 @@ async def auto_extend_lock(lock: Lock) -> None: raise LockLostError(lock=lock) from exc -async def handle_redis_returns_union_types(result: Any | Awaitable[Any]) -> Any: +P = ParamSpec("P") +R = TypeVar("R") + + +async def handle_redis_returns_union_types(result: R | Awaitable[R]) -> R: """Used to handle mypy issues with redis 5.x return types""" if isinstance(result, Awaitable): return await result diff --git a/packages/service-library/src/servicelib/redis/lua/acquire_semaphore.lua b/packages/service-library/src/servicelib/redis/lua/acquire_semaphore.lua new file mode 100644 index 000000000000..396a2ec34df4 --- /dev/null +++ b/packages/service-library/src/servicelib/redis/lua/acquire_semaphore.lua @@ -0,0 +1,33 @@ +-- Fair distributed semaphore using token pool (BRPOP-based) +-- KEYS[1]: holders_key (SET of current holder instance IDs) +-- KEYS[2]: holder_key (individual holder TTL key for this instance) + +-- ARGV[1]: token (the token received from BRPOP) +-- ARGV[2]: instance_id (the instance trying to acquire the semaphore) +-- ARGV[3]: ttl_seconds (for the holder_key) +-- ARGV[4]: holders_set_ttl_seconds (to set expiry on holders set) +-- +-- Returns: {exit_code, status, token, current_count} +-- exit_code: 0 if acquired +-- status: 'acquired' + +local holders_key = KEYS[1] +local holder_key = KEYS[2] + +local token = ARGV[1] +local instance_id = ARGV[2] +local ttl_seconds = tonumber(ARGV[3]) +local holders_set_ttl_seconds = tonumber(ARGV[4]) + + + +-- Step 1: Register as holder +redis.call('SADD', holders_key, instance_id) +redis.call('SETEX', holder_key, ttl_seconds, token) + +-- Step 2: Set expiry on holders set to prevent infinite growth +redis.call('EXPIRE', holders_key, holders_set_ttl_seconds) + +local current_count = redis.call('SCARD', holders_key) + +return {0, 'acquired', token, current_count} diff --git a/packages/service-library/src/servicelib/redis/lua/cleanup_semaphore.lua b/packages/service-library/src/servicelib/redis/lua/cleanup_semaphore.lua new file mode 100644 index 000000000000..34a3b87dd28d --- /dev/null +++ b/packages/service-library/src/servicelib/redis/lua/cleanup_semaphore.lua @@ -0,0 +1,56 @@ +-- Cleanup orphaned tokens from crashed clients +-- KEYS[1]: tokens_key (LIST of available tokens) +-- KEYS[2]: holders_key (SET of current holders) +-- KEYS[3]: holder_prefix (prefix for holder keys, e.g. "semaphores:holders:key:") +-- ARGV[1]: capacity (total semaphore capacity) +-- +-- Returns: {recovered_tokens, missing_tokens, excess_tokens} +-- This script should be run periodically to recover tokens from crashed clients + +local tokens_key = KEYS[1] +local holders_key = KEYS[2] +local holder_prefix = KEYS[3] + +local capacity = tonumber(ARGV[1]) + +-- Step 1: Get all current holders +local current_holders = redis.call('SMEMBERS', holders_key) +local recovered_tokens = 0 +local cleaned_holders = {} + +-- Step 2: Check each holder to see if their TTL key still exists +for i = 1, #current_holders do + local holder_id = current_holders[i] + local holder_key = holder_prefix .. holder_id + local exists = redis.call('EXISTS', holder_key) + + if exists == 0 then + -- Holder key doesn't exist but holder is in SET + -- This indicates a crashed client - clean up and recover token + redis.call('SREM', holders_key, holder_id) + redis.call('LPUSH', tokens_key, 'token_recovered_' .. holder_id) + recovered_tokens = recovered_tokens + 1 + table.insert(cleaned_holders, holder_id) + end +end + +-- Step 3: Ensure we have the correct total number of tokens +local remaining_holders = redis.call('SCARD', holders_key) +local available_tokens_count = redis.call('LLEN', tokens_key) +local total_tokens = remaining_holders + available_tokens_count + +-- If we're missing tokens (due to crashes or Redis issues), add them back +local missing_tokens = capacity - total_tokens +for i = 1, missing_tokens do + redis.call('LPUSH', tokens_key, 'token_missing_' .. i) + recovered_tokens = recovered_tokens + 1 +end + +-- If we somehow have too many tokens (shouldn't happen), remove extras +local excess_tokens = total_tokens - capacity +for i = 1, excess_tokens do + redis.call('RPOP', tokens_key) +end + + +return {recovered_tokens, missing_tokens, excess_tokens} diff --git a/packages/service-library/src/servicelib/redis/lua/register_semaphore_tokens.lua b/packages/service-library/src/servicelib/redis/lua/register_semaphore_tokens.lua new file mode 100644 index 000000000000..36b2c769ef25 --- /dev/null +++ b/packages/service-library/src/servicelib/redis/lua/register_semaphore_tokens.lua @@ -0,0 +1,38 @@ +-- Simple token initialization and management for Python BRPOP +-- KEYS[1]: tokens_key (LIST of available tokens) +-- KEYS[2]: holders_key (SET of current holder instance IDs) + +-- ARGV[1]: capacity (max concurrent holders) +-- ARGV[2]: ttl_seconds +-- +-- Returns: {exit_code} +-- exit_code: 0 if registered successfully + +local tokens_key = KEYS[1] +local holders_key = KEYS[2] + +local capacity = tonumber(ARGV[1]) +local ttl_seconds = tonumber(ARGV[2]) + +-- Use a persistent marker to track if semaphore was ever initialized +local init_marker_key = tokens_key .. ':initialized' + +-- Check if we've ever initialized this semaphore +local was_initialized = redis.call('EXISTS', init_marker_key) + +if was_initialized == 0 then + -- First time initialization - set the permanent marker + redis.call('SET', init_marker_key, '1') + redis.call('EXPIRE', init_marker_key, ttl_seconds) + + -- Initialize with capacity number of tokens + for i = 1, capacity do + redis.call('LPUSH', tokens_key, 'token_' .. i) + end + -- Set expiry on tokens list + redis.call('EXPIRE', tokens_key, ttl_seconds) + return {0, 'initialized'} +end + + +return {0, 'already_initialized'} diff --git a/packages/service-library/src/servicelib/redis/lua/release_semaphore.lua b/packages/service-library/src/servicelib/redis/lua/release_semaphore.lua new file mode 100644 index 000000000000..088662c83842 --- /dev/null +++ b/packages/service-library/src/servicelib/redis/lua/release_semaphore.lua @@ -0,0 +1,51 @@ +-- Release fair semaphore and return token to pool +-- KEYS[1]: tokens_key (LIST of available tokens) +-- KEYS[2]: holders_key (SET of current holders) +-- KEYS[3]: holder_key (individual holder TTL key for this instance) + +-- ARGV[1]: instance_id +-- ARGV[2]: passed_token (the token held by this instance or nil if unknown) +-- +-- Returns: {exit_code, status, current_count} +-- exit_code: 0 if released, 255 if failed +-- status: 'released', 'not_held', or 'expired' + +local tokens_key = KEYS[1] +local holders_key = KEYS[2] +local holder_key = KEYS[3] + +local instance_id = ARGV[1] +local passed_token = ARGV[2] + +-- Step 1: Check if this instance is currently a holder +local is_holder = redis.call('SISMEMBER', holders_key, instance_id) +if is_holder == 0 then + -- Not in holders set - check if holder key exists + return {255, 'not_held', redis.call('SCARD', holders_key)} +end + +-- Step 2: Get the token from holder key +local token = redis.call('GET', holder_key) +if not token then + -- the token expired but we are still in the holders set + -- this indicates a lost semaphore (e.g. due to TTL expiry) + -- remove from holders set and return error + redis.call('SREM', holders_key, instance_id) + -- if the token was passed return it to the pool + if passed_token then + redis.call('LPUSH', tokens_key, passed_token) + end + -- Note: we do NOT push a recovered token since we don't know its state + return {255, 'expired', redis.call('SCARD', holders_key)} +end + +-- Step 3: Release the semaphore +redis.call('SREM', holders_key, instance_id) +redis.call('DEL', holder_key) + +-- Step 4: Return token to available pool +-- This automatically unblocks any waiting BRPOP calls +redis.call('LPUSH', tokens_key, token) + + +return {0, 'released', redis.call('SCARD', holders_key)} diff --git a/packages/service-library/src/servicelib/redis/lua/renew_semaphore.lua b/packages/service-library/src/servicelib/redis/lua/renew_semaphore.lua new file mode 100644 index 000000000000..35b290b29d92 --- /dev/null +++ b/packages/service-library/src/servicelib/redis/lua/renew_semaphore.lua @@ -0,0 +1,47 @@ +-- Renew semaphore holder TTL (simplified for token pool design) +-- KEYS[1]: holders_key (SET of current holders) +-- KEYS[2]: holder_key (individual holder TTL key for this instance) +-- KEYS[3]: tokens_key (LIST of available tokens) +-- ARGV[1]: instance_id +-- ARGV[2]: ttl_seconds +-- ARGV[3]: holders_ttl_seconds (to renew holders set) +-- ARGV[4]: tokens_ttl_seconds (to renew tokens list) +-- +-- Returns: {exit_code, status, current_count} +-- exit_code: 0 if renewed, 255 if failed +-- status: 'renewed', 'not_held', or 'expired' + +local holders_key = KEYS[1] +local holder_key = KEYS[2] +local tokens_key = KEYS[3] + +local instance_id = ARGV[1] +local ttl_seconds = tonumber(ARGV[2]) +local holders_ttl_seconds = tonumber(ARGV[3]) +local tokens_ttl_seconds = tonumber(ARGV[4]) + +-- Step 1: Check if this instance is currently a holder +local is_holder = redis.call('SISMEMBER', holders_key, instance_id) +if is_holder == 0 then + -- Not in holders set + return {255, 'not_held', redis.call('SCARD', holders_key)} +end + +-- Step 2: Check if holder key exists (to detect if it expired) +local exists = redis.call('EXISTS', holder_key) +if exists == 0 then + -- Holder key expired + return {255, 'expired', redis.call('SCARD', holders_key)} +end + +-- Step 3: Renew the holder key TTL +local token = redis.call('GET', holder_key) +redis.call('SETEX', holder_key, ttl_seconds, token) + +-- Step 4: Renew the holders set and tokens list TTLs to prevent infinite growth +redis.call('EXPIRE', holders_key, holders_ttl_seconds) +redis.call('EXPIRE', tokens_key, tokens_ttl_seconds) +local init_marker_tokens_key = tokens_key .. ':initialized' +redis.call('EXPIRE', init_marker_tokens_key, tokens_ttl_seconds) + +return {0, 'renewed', redis.call('SCARD', holders_key)} diff --git a/packages/service-library/src/servicelib/rest_constants.py b/packages/service-library/src/servicelib/rest_constants.py index d763657b6c97..4791b189df7f 100644 --- a/packages/service-library/src/servicelib/rest_constants.py +++ b/packages/service-library/src/servicelib/rest_constants.py @@ -23,3 +23,4 @@ class PydanticExportParametersDict(TypedDict): # Headers keys X_PRODUCT_NAME_HEADER: Final[str] = "X-Simcore-Products-Name" +X_CLIENT_SESSION_ID_HEADER: Final[str] = "X-Client-Session-Id" diff --git a/packages/service-library/src/servicelib/socketio_utils.py b/packages/service-library/src/servicelib/socketio_utils.py index efc634367157..b6cce908d49a 100644 --- a/packages/service-library/src/servicelib/socketio_utils.py +++ b/packages/service-library/src/servicelib/socketio_utils.py @@ -1,4 +1,4 @@ -""" Common utilities for python-socketio library +"""Common utilities for python-socketio library NOTE: we intentionally avoided importing socketio here to avoid adding an extra dependency at @@ -9,7 +9,6 @@ async def cleanup_socketio_async_pubsub_manager(server_manager): - # NOTE: this is ugly. It seems though that python-socketio does not # cleanup its background tasks properly. # https://github.com/miguelgrinberg/python-socketio/discussions/1092 @@ -35,6 +34,7 @@ async def cleanup_socketio_async_pubsub_manager(server_manager): for coro_name in [ "AsyncServer._service_task", "AsyncSocket.schedule_ping", + "AsyncSocket._send_ping", "AsyncPubSubManager._thread", ] ): diff --git a/packages/service-library/src/servicelib/tracing.py b/packages/service-library/src/servicelib/tracing.py index e1b3b348a723..a95f386495e6 100644 --- a/packages/service-library/src/servicelib/tracing.py +++ b/packages/service-library/src/servicelib/tracing.py @@ -1,6 +1,11 @@ +from collections.abc import Callable, Coroutine from contextlib import contextmanager -from typing import TypeAlias +from contextvars import Token +from functools import wraps +from typing import Any, Final, TypeAlias +import pyinstrument +import pyinstrument.renderers from opentelemetry import context as otcontext from opentelemetry import trace from opentelemetry.instrumentation.logging import LoggingInstrumentor @@ -8,6 +13,10 @@ TracingContext: TypeAlias = otcontext.Context | None +_TRACER_NAME: Final[str] = "servicelib.tracing" +_PROFILE_ATTRIBUTE_NAME: Final[str] = "pyinstrument.profile" +_OSPARC_TRACE_ID_HEADER: Final[str] = "x-osparc-trace-id" + def _is_tracing() -> bool: return trace.get_current_span().is_recording() @@ -21,7 +30,7 @@ def get_context() -> TracingContext: @contextmanager def use_tracing_context(context: TracingContext): - token: object | None = None + token: Token[otcontext.Context] | None = None if context is not None: token = otcontext.attach(context) try: @@ -34,3 +43,53 @@ def use_tracing_context(context: TracingContext): def setup_log_tracing(tracing_settings: TracingSettings): _ = tracing_settings LoggingInstrumentor().instrument(set_logging_format=False) + + +def get_trace_id_header() -> dict[str, str] | None: + """Generates a dictionary containing the trace ID header if tracing is active.""" + span = trace.get_current_span() + if span.is_recording(): + trace_id = span.get_span_context().trace_id + trace_id_hex = format( + trace_id, "032x" + ) # Convert trace_id to 32-character hex string + return {_OSPARC_TRACE_ID_HEADER: trace_id_hex} + return None + + +def with_profiled_span( + func: Callable[..., Coroutine[Any, Any, Any]], +) -> Callable[..., Coroutine[Any, Any, Any]]: + """Decorator that wraps an async function in an OpenTelemetry span with pyinstrument profiling.""" + + @wraps(func) + async def wrapper(*args: Any, **kwargs: Any) -> Any: + if not _is_tracing(): + return await func(*args, **kwargs) + + tracer = trace.get_tracer(_TRACER_NAME) + span_name = f"{func.__module__}.{func.__qualname__}" + + with tracer.start_as_current_span(span_name) as span: + profiler = pyinstrument.Profiler(async_mode="enabled") + profiler.start() + + try: + return await func(*args, **kwargs) + + except Exception as e: + span.record_exception(e) + span.set_status(trace.Status(trace.StatusCode.ERROR, f"{e}")) + raise + + finally: + profiler.stop() + renderer = pyinstrument.renderers.ConsoleRenderer( + unicode=True, color=False, show_all=True + ) + span.set_attribute( + _PROFILE_ATTRIBUTE_NAME, + profiler.output(renderer=renderer), + ) + + return wrapper diff --git a/packages/service-library/src/servicelib/utils.py b/packages/service-library/src/servicelib/utils.py index e6de282068cc..e6ae4b147c51 100644 --- a/packages/service-library/src/servicelib/utils.py +++ b/packages/service-library/src/servicelib/utils.py @@ -1,4 +1,4 @@ -""" General utils +"""General utils IMPORTANT: lowest level module I order to avoid cyclic dependences, please @@ -245,7 +245,7 @@ async def limited_as_completed( future.set_name(f"{tasks_group_prefix}-{future.get_name()}") pending_futures.add(future) - except (StopIteration, StopAsyncIteration): # noqa: PERF203 + except (StopIteration, StopAsyncIteration): completed_all_awaitables = True if not pending_futures: return @@ -294,8 +294,7 @@ async def limited_gather( log: logging.Logger = _DEFAULT_LOGGER, limit: int = _DEFAULT_LIMITED_CONCURRENCY, tasks_group_prefix: str | None = None, -) -> list[T]: - ... +) -> list[T]: ... @overload @@ -305,8 +304,7 @@ async def limited_gather( log: logging.Logger = _DEFAULT_LOGGER, limit: int = _DEFAULT_LIMITED_CONCURRENCY, tasks_group_prefix: str | None = None, -) -> list[T | BaseException]: - ... +) -> list[T | BaseException]: ... async def limited_gather( diff --git a/packages/service-library/src/servicelib/utils_meta.py b/packages/service-library/src/servicelib/utils_meta.py index 6ee48fd4d56d..1109e3089c59 100644 --- a/packages/service-library/src/servicelib/utils_meta.py +++ b/packages/service-library/src/servicelib/utils_meta.py @@ -1,13 +1,16 @@ -""" Utilities to implement _meta.py - -""" +"""Utilities to implement _meta.py""" +import re from importlib.metadata import distribution from models_library.basic_types import VersionStr from packaging.version import Version from pydantic import TypeAdapter +_APP_NAME_PATTERN = re.compile( + r"^[a-z0-9]+(-[a-z0-9]+)*$" +) # matches lowercase string with words and non-negative integers separated by dashes (no whitespace) + class PackageInfo: """Thin wrapper around pgk_resources.Distribution to access package distribution metadata @@ -29,11 +32,32 @@ def __init__(self, package_name: str): package_name: as defined in 'setup.name' """ self._distribution = distribution(package_name) + # property checks + if re.match(_APP_NAME_PATTERN, self.app_name) is None: + msg = ( + f"Invalid package name {self.app_name}. " + "It must be all lowercase and words separated by dashes ('-')." + ) + raise ValueError(msg) @property def project_name(self) -> str: return self._distribution.metadata["Name"] + @property + def app_name(self) -> str: + """ + Returns the application name as a lowercase string with words separated by dashes ('-'). + """ + return self._distribution.metadata["Name"] + + @property + def prometheus_friendly_app_name(self) -> str: + """ + Returns a version of the app name which is compatible with Prometheus metrics naming conventions (no dashes). + """ + return self.app_name.replace("-", "_") + @property def version(self) -> Version: return Version(self._distribution.version) diff --git a/packages/service-library/tests/aiohttp/conftest.py b/packages/service-library/tests/aiohttp/conftest.py index 1891ee17d157..f2055a80b0c3 100644 --- a/packages/service-library/tests/aiohttp/conftest.py +++ b/packages/service-library/tests/aiohttp/conftest.py @@ -1,2 +1,20 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument + + +from collections.abc import Iterator + +import pytest +from opentelemetry.sdk.trace.export import SimpleSpanProcessor +from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter +from pytest_mock import MockerFixture + + +@pytest.fixture +def mock_otel_collector(mocker: MockerFixture) -> Iterator[InMemorySpanExporter]: + memory_exporter = InMemorySpanExporter() + span_processor = SimpleSpanProcessor(memory_exporter) + mocker.patch( + "servicelib.aiohttp.tracing._create_span_processor", return_value=span_processor + ) + yield memory_exporter diff --git a/packages/service-library/tests/aiohttp/long_running_tasks/conftest.py b/packages/service-library/tests/aiohttp/long_running_tasks/conftest.py index 8fe29473cfcb..3bf527ab2c82 100644 --- a/packages/service-library/tests/aiohttp/long_running_tasks/conftest.py +++ b/packages/service-library/tests/aiohttp/long_running_tasks/conftest.py @@ -12,17 +12,26 @@ from pydantic import BaseModel, TypeAdapter from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import long_running_tasks, status -from servicelib.aiohttp.long_running_tasks.server import TaskId from servicelib.aiohttp.requests_validation import parse_request_query_parameters_as -from servicelib.long_running_tasks._task import TaskContext +from servicelib.long_running_tasks.models import ( + TaskGet, + TaskId, + TaskProgress, + TaskStatus, +) +from servicelib.long_running_tasks.task import TaskContext, TaskRegistry from tenacity.asyncio import AsyncRetrying from tenacity.retry import retry_if_exception_type from tenacity.stop import stop_after_delay from tenacity.wait import wait_fixed +class _TestingError(Exception): + pass + + async def _string_list_task( - task_progress: long_running_tasks.server.TaskProgress, + progress: TaskProgress, num_strings: int, sleep_time: float, fail: bool, @@ -31,10 +40,10 @@ async def _string_list_task( for index in range(num_strings): generated_strings.append(f"{index}") await asyncio.sleep(sleep_time) - task_progress.update(message="generated item", percent=index / num_strings) + await progress.update(message="generated item", percent=index / num_strings) if fail: msg = "We were asked to fail!!" - raise RuntimeError(msg) + raise _TestingError(msg) # NOTE: this code is used just for the sake of not returning the default 200 return web.json_response( @@ -42,6 +51,9 @@ async def _string_list_task( ) +TaskRegistry.register(_string_list_task, allowed_errors=(_TestingError,)) + + @pytest.fixture def task_context(faker: Faker) -> TaskContext: return {"user_id": faker.pyint(), "product": faker.pystr()} @@ -68,7 +80,7 @@ async def generate_list_strings(request: web.Request) -> web.Response: query_params = parse_request_query_parameters_as(_LongTaskQueryParams, request) return await long_running_tasks.server.start_long_running_task( request, - _string_list_task, + _string_list_task.__name__, num_strings=query_params.num_strings, sleep_time=query_params.sleep_time, fail=query_params.fail, @@ -93,7 +105,7 @@ async def _caller(client: TestClient, **query_kwargs) -> TaskId: data, error = await assert_status(resp, status.HTTP_202_ACCEPTED) assert data assert not error - task_get = TypeAdapter(long_running_tasks.server.TaskGet).validate_python(data) + task_get = TypeAdapter(TaskGet).validate_python(data) return task_get.task_id return _caller @@ -123,7 +135,7 @@ async def _waiter( data, error = await assert_status(result, status.HTTP_200_OK) assert data assert not error - task_status = long_running_tasks.server.TaskStatus.model_validate(data) + task_status = TaskStatus.model_validate(data) assert task_status assert task_status.done diff --git a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks.py b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks.py index 71d5501b2ed2..49604fd3a15e 100644 --- a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks.py +++ b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks.py @@ -20,22 +20,38 @@ from pydantic import TypeAdapter from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import long_running_tasks, status -from servicelib.aiohttp.long_running_tasks.server import TaskGet, TaskId from servicelib.aiohttp.rest_middlewares import append_rest_middlewares -from servicelib.long_running_tasks._task import TaskContext +from servicelib.long_running_tasks.models import TaskGet, TaskId, TaskStatus +from servicelib.long_running_tasks.task import TaskContext +from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings from tenacity.asyncio import AsyncRetrying from tenacity.retry import retry_if_exception_type from tenacity.stop import stop_after_delay from tenacity.wait import wait_fixed +pytest_simcore_core_services_selection = [ + "rabbit", +] + @pytest.fixture -def app(server_routes: web.RouteTableDef) -> web.Application: +def app( + server_routes: web.RouteTableDef, + use_in_memory_redis: RedisSettings, + rabbit_service: RabbitSettings, +) -> web.Application: app = web.Application() app.add_routes(server_routes) # this adds enveloping and error middlewares append_rest_middlewares(app, api_version="") - long_running_tasks.server.setup(app, router_prefix="/futures") + long_running_tasks.server.setup( + app, + redis_settings=use_in_memory_redis, + rabbit_settings=rabbit_service, + lrt_namespace="test", + router_prefix="/futures", + ) return app @@ -70,7 +86,7 @@ async def test_workflow( data, error = await assert_status(result, status.HTTP_200_OK) assert data assert not error - task_status = long_running_tasks.server.TaskStatus.model_validate(data) + task_status = TaskStatus.model_validate(data) assert task_status progress_updates.append( (task_status.task_progress.message, task_status.task_progress.percent) @@ -97,7 +113,7 @@ async def test_workflow( # now get the result result_url = client.app.router["get_task_result"].url_for(task_id=task_id) result = await client.get(f"{result_url}") - task_result, error = await assert_status(result, status.HTTP_201_CREATED) + task_result, error = await assert_status(result, status.HTTP_200_OK) assert task_result assert not error assert task_result == [f"{x}" for x in range(10)] @@ -111,7 +127,7 @@ async def test_workflow( [ ("GET", "get_task_status"), ("GET", "get_task_result"), - ("DELETE", "cancel_and_delete_task"), + ("DELETE", "remove_task"), ], ) async def test_get_task_wrong_task_id_raises_not_found( @@ -127,6 +143,7 @@ async def test_failing_task_returns_error( client: TestClient, start_long_running_task: Callable[[TestClient, Any], Awaitable[TaskId]], wait_for_task: Callable[[TestClient, TaskId, TaskContext], Awaitable[None]], + caplog: pytest.LogCaptureFixture, ): assert client.app task_id = await start_long_running_task(client, fail=f"{True}") @@ -138,10 +155,17 @@ async def test_failing_task_returns_error( data, error = await assert_status(result, status.HTTP_500_INTERNAL_SERVER_ERROR) assert not data assert error - assert "errors" in error - assert len(error["errors"]) == 1 - assert error["errors"][0]["code"] == "RuntimeError" - assert error["errors"][0]["message"] == "We were asked to fail!!" + + # The error should contain a supportId field for tracking + assert "supportId" in error + assert isinstance(error["supportId"], str) + assert len(error["supportId"]) > 0 + + # The actual error details should be logged, not returned in response + log_messages = caplog.text + assert "OEC" in log_messages + assert "_TestingError" in log_messages + assert "We were asked to fail!!" in log_messages async def test_get_results_before_tasks_finishes_returns_404( @@ -164,7 +188,7 @@ async def test_cancel_task( task_id = await start_long_running_task(client) # cancel the task - delete_url = client.app.router["cancel_and_delete_task"].url_for(task_id=task_id) + delete_url = client.app.router["remove_task"].url_for(task_id=task_id) result = await client.delete(f"{delete_url}") data, error = await assert_status(result, status.HTTP_204_NO_CONTENT) assert not data @@ -216,7 +240,9 @@ async def test_list_tasks( # the task name is properly formatted assert all( - task.task_name == "POST /long_running_task:start?num_strings=10&sleep_time=0.2" + task.task_name.startswith( + "POST /long_running_task:start?num_strings=10&sleep_time=" + ) for task in list_of_tasks ) # now wait for them to finish diff --git a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_client.py b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_client.py index b211cc3d1cad..9e8c9204acef 100644 --- a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_client.py +++ b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_client.py @@ -15,16 +15,32 @@ long_running_task_request, ) from servicelib.aiohttp.rest_middlewares import append_rest_middlewares +from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings from yarl import URL +pytest_simcore_core_services_selection = [ + "rabbit", +] + @pytest.fixture -def app(server_routes: web.RouteTableDef) -> web.Application: +def app( + server_routes: web.RouteTableDef, + use_in_memory_redis: RedisSettings, + rabbit_service: RabbitSettings, +) -> web.Application: app = web.Application() app.add_routes(server_routes) # this adds enveloping and error middlewares append_rest_middlewares(app, api_version="") - long_running_tasks.server.setup(app, router_prefix="/futures") + long_running_tasks.server.setup( + app, + redis_settings=use_in_memory_redis, + rabbit_settings=rabbit_service, + lrt_namespace="test", + router_prefix="/futures", + ) return app @@ -50,7 +66,7 @@ async def test_long_running_task_request_raises_400( client: TestClient, long_running_task_url: URL ): # missing parameters raises - with pytest.raises(ClientResponseError): + with pytest.raises(ClientResponseError): # noqa: PT012 async for _ in long_running_task_request( client.session, long_running_task_url, None ): @@ -87,7 +103,7 @@ async def test_long_running_task_request_timeout( ): assert client.app task: LRTask | None = None - with pytest.raises(asyncio.TimeoutError): + with pytest.raises(asyncio.TimeoutError): # noqa: PT012 async for task in long_running_task_request( client.session, long_running_task_url.with_query(num_strings=10, sleep_time=1), diff --git a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_with_task_context.py b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_with_task_context.py index 0b37c9416692..cef4a845ab8d 100644 --- a/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_with_task_context.py +++ b/packages/service-library/tests/aiohttp/long_running_tasks/test_long_running_tasks_with_task_context.py @@ -22,12 +22,17 @@ from servicelib.aiohttp.long_running_tasks._server import ( RQT_LONG_RUNNING_TASKS_CONTEXT_KEY, ) -from servicelib.aiohttp.long_running_tasks.server import TaskGet, TaskId from servicelib.aiohttp.requests_validation import parse_request_query_parameters_as from servicelib.aiohttp.rest_middlewares import append_rest_middlewares from servicelib.aiohttp.typing_extension import Handler -from servicelib.long_running_tasks._task import TaskContext - +from servicelib.long_running_tasks.models import TaskGet, TaskId +from servicelib.long_running_tasks.task import TaskContext +from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings + +pytest_simcore_core_services_selection = [ + "rabbit", +] # WITH TASK CONTEXT # NOTE: as the long running task framework may be used in any number of services # in some cases there might be specific so-called task contexts. @@ -61,7 +66,10 @@ async def _test_task_context_decorator( @pytest.fixture def app_with_task_context( - server_routes: web.RouteTableDef, task_context_decorator + server_routes: web.RouteTableDef, + task_context_decorator, + use_in_memory_redis: RedisSettings, + rabbit_service: RabbitSettings, ) -> web.Application: app = web.Application() app.add_routes(server_routes) @@ -69,6 +77,9 @@ def app_with_task_context( append_rest_middlewares(app, api_version="") long_running_tasks.server.setup( app, + redis_settings=use_in_memory_redis, + rabbit_settings=rabbit_service, + lrt_namespace="test", router_prefix="/futures_with_task_context", task_request_context_decorator=task_context_decorator, ) @@ -151,7 +162,7 @@ async def test_get_task_result( await assert_status(resp, status.HTTP_404_NOT_FOUND) # calling with context should find the task resp = await client_with_task_context.get(f"{result_url.with_query(task_context)}") - await assert_status(resp, status.HTTP_201_CREATED) + await assert_status(resp, status.HTTP_200_OK) async def test_cancel_task( @@ -161,7 +172,7 @@ async def test_cancel_task( ): assert client_with_task_context.app task_id = await start_long_running_task(client_with_task_context) - cancel_url = client_with_task_context.app.router["cancel_and_delete_task"].url_for( + cancel_url = client_with_task_context.app.router["remove_task"].url_for( task_id=task_id ) # calling cancel without task context should find nothing diff --git a/packages/service-library/tests/aiohttp/test_application_setup.py b/packages/service-library/tests/aiohttp/test_application_setup.py index 94af1c07a335..47b83da7576f 100644 --- a/packages/service-library/tests/aiohttp/test_application_setup.py +++ b/packages/service-library/tests/aiohttp/test_application_setup.py @@ -1,52 +1,30 @@ -# pylint:disable=unused-variable -# pylint:disable=unused-argument -# pylint:disable=redefined-outer-name +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable -from unittest.mock import Mock + +import logging import pytest from aiohttp import web +from pytest_mock import MockerFixture, MockType from servicelib.aiohttp.application_keys import APP_CONFIG_KEY from servicelib.aiohttp.application_setup import ( DependencyError, ModuleCategory, SkipModuleSetupError, app_module_setup, + ensure_single_setup, is_setup_completed, ) -log = Mock() - - -@app_module_setup("package.bar", ModuleCategory.ADDON, logger=log) -def setup_bar(app: web.Application, arg1, *, raise_skip: bool = False): - return True - - -@app_module_setup("package.foo", ModuleCategory.ADDON, logger=log) -def setup_foo(app: web.Application, arg1, kargs=33, *, raise_skip: bool = False): - if raise_skip: - raise SkipModuleSetupError(reason="explicit skip") - return True - - -@app_module_setup( - "package.zee", ModuleCategory.ADDON, config_enabled="main.zee_enabled", logger=log -) -def setup_zee(app: web.Application, arg1, kargs=55): - return True - -@app_module_setup( - "package.needs_foo", - ModuleCategory.SYSTEM, - depends=[ - "package.foo", - ], - logger=log, -) -def setup_needs_foo(app: web.Application, arg1, kargs=55): - return True +@pytest.fixture +def mock_logger(mocker: MockerFixture) -> MockType: + logger_mock: MockType = mocker.create_autospec(logging.Logger, instance=True) + return logger_mock @pytest.fixture @@ -59,55 +37,150 @@ def app_config() -> dict: @pytest.fixture -def app(app_config): +def app_settings_key() -> web.AppKey: + return web.AppKey("test_app_settings", object) + + +@pytest.fixture +def app(app_config: dict) -> web.Application: _app = web.Application() _app[APP_CONFIG_KEY] = app_config return _app -def test_setup_config_enabled(app_config, app): +def test_setup_config_enabled( + app_config: dict, app: web.Application, app_settings_key: web.AppKey +): + + @app_module_setup( + "package.zee", + ModuleCategory.ADDON, + app_settings_key=app_settings_key, + # legacy support for config_enabled + config_enabled="main.zee_enabled", + ) + def setup_zee(app: web.Application, arg) -> bool: + assert arg + return True + assert setup_zee(app, 1) assert setup_zee.metadata()["config_enabled"] == "main.zee_enabled" app_config["main"]["zee_enabled"] = False + assert not setup_zee(app, 2) -def test_setup_dependencies(app_config, app): +def test_setup_dependencies(app: web.Application, app_settings_key: web.AppKey): + + @app_module_setup( + "package.foo", ModuleCategory.ADDON, app_settings_key=app_settings_key + ) + def setup_foo(app: web.Application) -> bool: + return True + @app_module_setup( + "package.needs_foo", + ModuleCategory.SYSTEM, + app_settings_key=app_settings_key, + depends=[ + # This module needs foo to be setup first + "package.foo", + ], + ) + def setup_needs_foo(app: web.Application) -> bool: + return True + + # setup_foo is not called yet with pytest.raises(DependencyError): - setup_needs_foo(app, 1) + setup_needs_foo(app) - assert setup_foo(app, 1) - assert setup_needs_foo(app, 2) + # ok + assert setup_foo(app) + assert setup_needs_foo(app) + # meta assert setup_needs_foo.metadata()["dependencies"] == [ setup_foo.metadata()["module_name"], ] -def test_marked_setup(app_config, app): - assert setup_foo(app, 1) +def test_marked_setup( + app_config: dict, app: web.Application, app_settings_key: web.AppKey +): + @app_module_setup( + "package.foo", ModuleCategory.ADDON, app_settings_key=app_settings_key + ) + def setup_foo(app: web.Application) -> bool: + return True + assert setup_foo(app) assert setup_foo.metadata()["module_name"] == "package.foo" assert is_setup_completed(setup_foo.metadata()["module_name"], app) app_config["foo"]["enabled"] = False - assert not setup_foo(app, 2) + assert not setup_foo(app) + + +def test_skip_setup( + app: web.Application, mock_logger: MockType, app_settings_key: web.AppKey +): + @app_module_setup( + "package.foo", + ModuleCategory.ADDON, + app_settings_key=app_settings_key, + logger=mock_logger, + ) + def setup_foo(app: web.Application, *, raise_skip: bool = False) -> bool: + if raise_skip: + raise SkipModuleSetupError(reason="explicit skip") + return True + + assert not setup_foo(app, raise_skip=True) + assert setup_foo(app) + + assert mock_logger.info.called + args = [call.args[-1] for call in mock_logger.info.mock_calls] + assert any("explicit skip" in arg for arg in args) + + +def setup_basic(app: web.Application) -> bool: + return True + + +def setup_that_raises(app: web.Application) -> bool: + error_msg = "Setup failed" + raise ValueError(error_msg) + + +def test_ensure_single_setup_runs_once(app: web.Application, mock_logger: MockType): + decorated = ensure_single_setup("test.module", logger=mock_logger)(setup_basic) + + # First call succeeds + assert decorated(app) + assert is_setup_completed("test.module", app) + + # Second call skips + assert not decorated(app) + +def test_ensure_single_setup_error_handling( + app: web.Application, mock_logger: MockType +): + decorated = ensure_single_setup("test.error", logger=mock_logger)(setup_that_raises) -def test_skip_setup(app_config, app): - try: - log.reset_mock() + with pytest.raises(ValueError, match="Setup failed"): + decorated(app) + assert not is_setup_completed("test.error", app) - assert not setup_foo(app, 1, raise_skip=True) - # FIXME: mock logger - # assert log.warning.called - # warn_msg = log.warning.call_args()[0] - # assert "package.foo" in warn_msg - # assert "explicit skip" in warn_msg +def test_ensure_single_setup_multiple_modules( + app: web.Application, mock_logger: MockType +): + decorated1 = ensure_single_setup("module1", logger=mock_logger)(setup_basic) + decorated2 = ensure_single_setup("module2", logger=mock_logger)(setup_basic) - assert setup_foo(app, 1) - finally: - log.reset_mock() + assert decorated1(app) + assert decorated2(app) + assert is_setup_completed("module1", app) + assert is_setup_completed("module2", app) diff --git a/packages/service-library/tests/aiohttp/test_client_session.py b/packages/service-library/tests/aiohttp/test_client_session.py index 74b91655c31a..792c254ee73b 100644 --- a/packages/service-library/tests/aiohttp/test_client_session.py +++ b/packages/service-library/tests/aiohttp/test_client_session.py @@ -6,7 +6,7 @@ from collections.abc import Callable, Iterator from typing import Any -import pytest +import pytest_asyncio from aiohttp import web from aiohttp.client import ClientSession from aiohttp.test_utils import TestServer @@ -18,8 +18,8 @@ ) -@pytest.fixture -def server(event_loop, aiohttp_server: Callable) -> Iterator[TestServer]: +@pytest_asyncio.fixture(loop_scope="function", scope="function") +async def server(aiohttp_server: Callable) -> Iterator[TestServer]: async def echo(request): got = await request.json() return web.json_response(data=got) @@ -31,7 +31,7 @@ async def echo(request): assert not app.get(APP_CLIENT_SESSION_KEY) - test_server = event_loop.run_until_complete(aiohttp_server(app)) + test_server = await aiohttp_server(app) assert isinstance(app[APP_CLIENT_SESSION_KEY], ClientSession) assert not app[APP_CLIENT_SESSION_KEY].closed diff --git a/packages/service-library/tests/aiohttp/test_monitor_slow_callbacks.py b/packages/service-library/tests/aiohttp/test_monitor_slow_callbacks.py index 6c428eb485d4..cb30d1f73385 100644 --- a/packages/service-library/tests/aiohttp/test_monitor_slow_callbacks.py +++ b/packages/service-library/tests/aiohttp/test_monitor_slow_callbacks.py @@ -8,33 +8,35 @@ from collections.abc import Iterable import pytest +import pytest_asyncio from servicelib.aiohttp import monitor_slow_callbacks -from servicelib.aiohttp.aiopg_utils import DatabaseError from tenacity import retry from tenacity.stop import stop_after_attempt from tenacity.wait import wait_fixed -async def slow_task(delay): - time.sleep(delay) # noqa: ASYNC101 +async def _slow_function(delay): + time.sleep(delay) # noqa: ASYNC251 @retry(wait=wait_fixed(1), stop=stop_after_attempt(2)) -async def fails_to_reach_pg_db(): - raise DatabaseError +async def _raising_function(): + msg = "This function is expected to raise an error" + raise RuntimeError(msg) -@pytest.fixture -def incidents_manager(event_loop) -> dict: +@pytest_asyncio.fixture(loop_scope="function", scope="function") +async def incidents_manager() -> dict: incidents = [] monitor_slow_callbacks.enable(slow_duration_secs=0.2, incidents=incidents) - asyncio.ensure_future(slow_task(0.3), loop=event_loop) # noqa: RUF006 - asyncio.ensure_future(slow_task(0.3), loop=event_loop) # noqa: RUF006 - asyncio.ensure_future(slow_task(0.4), loop=event_loop) # noqa: RUF006 + event_loop = asyncio.get_running_loop() + asyncio.ensure_future(_slow_function(0.3), loop=event_loop) # noqa: RUF006 + asyncio.ensure_future(_slow_function(0.3), loop=event_loop) # noqa: RUF006 + asyncio.ensure_future(_slow_function(0.4), loop=event_loop) # noqa: RUF006 incidents_pg = None # aiopg_utils.monitor_pg_responsiveness.enable() - asyncio.ensure_future(fails_to_reach_pg_db(), loop=event_loop) # noqa: RUF006 + asyncio.ensure_future(_raising_function(), loop=event_loop) # noqa: RUF006 return {"slow_callback": incidents, "posgres_responsive": incidents_pg} @@ -46,6 +48,10 @@ def disable_monitoring() -> Iterable[None]: asyncio.events.Handle._run = original_handler # noqa: SLF001 +@pytest.mark.skip( + reason="log_slow_callbacks is not supported out-of-the-box with uvloop." + " SEE https://github.com/ITISFoundation/osparc-simcore/issues/8047" +) async def test_slow_task_incident(disable_monitoring: None, incidents_manager: dict): await asyncio.sleep(2) assert len(incidents_manager["slow_callback"]) == 3 diff --git a/packages/service-library/tests/aiohttp/test_requests_validation.py b/packages/service-library/tests/aiohttp/test_requests_validation.py index 97c2b317b6ac..a901cc4d8745 100644 --- a/packages/service-library/tests/aiohttp/test_requests_validation.py +++ b/packages/service-library/tests/aiohttp/test_requests_validation.py @@ -7,11 +7,13 @@ from uuid import UUID import pytest +import pytest_asyncio from aiohttp import web from aiohttp.test_utils import TestClient, make_mocked_request from common_library.json_serialization import json_dumps from faker import Faker from models_library.rest_base import RequestParameters, StrictRequestParameters +from models_library.rest_error import EnvelopedError from models_library.rest_ordering import ( OrderBy, OrderDirection, @@ -98,8 +100,8 @@ def create_fake(cls, faker: Faker): return cls(x=faker.pyint(), y=faker.pybool(), z=Sub.create_fake(faker)) -@pytest.fixture -def client(event_loop, aiohttp_client: Callable, faker: Faker) -> TestClient: +@pytest_asyncio.fixture(loop_scope="function", scope="function") +async def client(aiohttp_client: Callable, faker: Faker) -> TestClient: """ Some app that: @@ -115,18 +117,10 @@ async def _handler(request: web.Request) -> web.Response: {**dict(request.app), **dict(request)} ) - path_params = parse_request_path_parameters_as( - MyRequestPathParams, request, use_enveloped_error_v1=False - ) - query_params = parse_request_query_parameters_as( - MyRequestQueryParams, request, use_enveloped_error_v1=False - ) - headers_params = parse_request_headers_as( - MyRequestHeadersParams, request, use_enveloped_error_v1=False - ) - body = await parse_request_body_as( - MyBody, request, use_enveloped_error_v1=False - ) + path_params = parse_request_path_parameters_as(MyRequestPathParams, request) + query_params = parse_request_query_parameters_as(MyRequestQueryParams, request) + headers_params = parse_request_headers_as(MyRequestHeadersParams, request) + body = await parse_request_body_as(MyBody, request) # --------------------------- return web.json_response( @@ -162,7 +156,7 @@ async def _middleware(request: web.Request, handler): # adds handler app.add_routes([web.get("/projects/{project_uuid}", _handler)]) - return event_loop.run_until_complete(aiohttp_client(app)) + return await aiohttp_client(app) @pytest.fixture @@ -229,19 +223,12 @@ async def test_parse_request_with_invalid_path_params( assert r.status == status.HTTP_422_UNPROCESSABLE_ENTITY, f"{await r.text()}" response_body = await r.json() - assert response_body["error"].pop("resource") - assert response_body == { - "error": { - "msg": "Invalid parameter/s 'project_uuid' in request path", - "details": [ - { - "loc": "project_uuid", - "msg": "Input should be a valid UUID, invalid character: expected an optional prefix of `urn:uuid:` followed by [0-9a-fA-F-], found `i` at 1", - "type": "uuid_parsing", - } - ], - } - } + + error_model = EnvelopedError.model_validate(response_body).error + assert error_model.message == "Invalid parameter/s 'project_uuid' in request path" + assert error_model.status == status.HTTP_422_UNPROCESSABLE_ENTITY + assert error_model.errors[0].field == "project_uuid" + assert error_model.errors[0].code == "uuid_parsing" async def test_parse_request_with_invalid_query_params( @@ -260,19 +247,11 @@ async def test_parse_request_with_invalid_query_params( assert r.status == status.HTTP_422_UNPROCESSABLE_ENTITY, f"{await r.text()}" response_body = await r.json() - assert response_body["error"].pop("resource") - assert response_body == { - "error": { - "msg": "Invalid parameter/s 'label' in request query", - "details": [ - { - "loc": "label", - "msg": "Field required", - "type": "missing", - } - ], - } - } + error_model = EnvelopedError.model_validate(response_body).error + assert error_model.message == "Invalid parameter/s 'label' in request query" + assert error_model.status == status.HTTP_422_UNPROCESSABLE_ENTITY + assert error_model.errors[0].field == "label" + assert error_model.errors[0].code == "missing" async def test_parse_request_with_invalid_body( @@ -292,25 +271,11 @@ async def test_parse_request_with_invalid_body( response_body = await r.json() - assert response_body["error"].pop("resource") - - assert response_body == { - "error": { - "msg": "Invalid field/s 'x, z' in request body", - "details": [ - { - "loc": "x", - "msg": "Field required", - "type": "missing", - }, - { - "loc": "z", - "msg": "Field required", - "type": "missing", - }, - ], - } - } + error_model = EnvelopedError.model_validate(response_body).error + assert error_model.message == "Invalid field/s 'x, z' in request body" + assert error_model.status == status.HTTP_422_UNPROCESSABLE_ENTITY + assert error_model.errors[0].field == "x" + assert error_model.errors[0].code == "missing" async def test_parse_request_with_invalid_json_body( @@ -348,19 +313,15 @@ async def test_parse_request_with_invalid_headers_params( assert r.status == status.HTTP_422_UNPROCESSABLE_ENTITY, f"{await r.text()}" response_body = await r.json() - assert response_body["error"].pop("resource") - assert response_body == { - "error": { - "msg": "Invalid parameter/s 'X-Simcore-User-Agent' in request headers", - "details": [ - { - "loc": "X-Simcore-User-Agent", - "msg": "Field required", - "type": "missing", - } - ], - } - } + + error_model = EnvelopedError.model_validate(response_body).error + assert ( + error_model.message + == "Invalid parameter/s 'X-Simcore-User-Agent' in request headers" + ) + assert error_model.status == status.HTTP_422_UNPROCESSABLE_ENTITY + assert error_model.errors[0].field == "X-Simcore-User-Agent" + assert error_model.errors[0].code == "missing" def test_parse_request_query_parameters_as_with_order_by_query_models(): diff --git a/packages/service-library/tests/aiohttp/test_rest_middlewares.py b/packages/service-library/tests/aiohttp/test_rest_middlewares.py index de5e80b85ae5..26884dbc11cd 100644 --- a/packages/service-library/tests/aiohttp/test_rest_middlewares.py +++ b/packages/service-library/tests/aiohttp/test_rest_middlewares.py @@ -14,6 +14,7 @@ from aiohttp import web from aiohttp.test_utils import TestClient from common_library.json_serialization import json_dumps +from pytest_mock import MockerFixture from servicelib.aiohttp import status from servicelib.aiohttp.rest_middlewares import ( envelope_middleware_factory, @@ -127,6 +128,10 @@ async def raise_success_with_text(_request: web.Request): # NOTE: explicitly NOT enveloped! raise web.HTTPOk(reason="I'm ok", text=json.dumps({"ok": True})) + @staticmethod + async def raise_success_with_raw_text(_request: web.Request): + raise web.HTTPOk(text="I'm ok") # NOT ALLOWED! + @pytest.fixture async def client( @@ -157,6 +162,10 @@ async def client( ("/v1/raise_success", Handlers.raise_success), ("/v1/raise_success_with_reason", Handlers.raise_success_with_reason), ("/v1/raise_success_with_text", Handlers.raise_success_with_text), + ( + "/v1/raise_success_with_raw_text", + Handlers.raise_success_with_raw_text, + ), ] ] ) @@ -239,7 +248,7 @@ async def test_raised_unhandled_exception( # # ERROR servicelib.aiohttp.rest_middlewares:rest_middlewares.py:75 We apologize ... [OEC:128594540599840]. # { - # "exception_details": "Unexpected error", + # "exception_string": "Unexpected error", # "error_code": "OEC:128594540599840", # "context": { # "request.remote": "127.0.0.1", @@ -261,7 +270,7 @@ async def test_raised_unhandled_exception( assert response.method in caplog.text assert response.url.path in caplog.text - assert "exception_details" in caplog.text + assert "exception_string" in caplog.text assert "request.remote" in caplog.text assert "context" in caplog.text assert SomeUnexpectedError.__name__ in caplog.text @@ -269,3 +278,117 @@ async def test_raised_unhandled_exception( # log OEC assert "OEC:" in caplog.text + + +async def test_not_implemented_error_is_501(client: TestClient): + """Test that NotImplementedError is correctly mapped to HTTP 501 NOT IMPLEMENTED.""" + response = await client.get( + "/v1/raise_exception", params={"exc": NotImplementedError.__name__} + ) + assert response.status == status.HTTP_501_NOT_IMPLEMENTED + + # Check that the response is properly enveloped + payload = await response.json() + assert is_enveloped(payload) + + # Verify error details + data, error = unwrap_envelope(payload) + assert not data + assert error + assert error.get("status") == status.HTTP_501_NOT_IMPLEMENTED + + +async def test_timeout_error_is_504(client: TestClient): + """Test that TimeoutError is correctly mapped to HTTP 504 GATEWAY TIMEOUT.""" + response = await client.get( + "/v1/raise_exception", params={"exc": asyncio.TimeoutError.__name__} + ) + assert response.status == status.HTTP_504_GATEWAY_TIMEOUT + + # Check that the response is properly enveloped + payload = await response.json() + assert is_enveloped(payload) + + # Verify error details + data, error = unwrap_envelope(payload) + assert not data + assert error + assert error.get("status") == status.HTTP_504_GATEWAY_TIMEOUT + + +async def test_exception_in_non_api_route(client: TestClient): + """Test how exceptions are handled in routes not under the API path.""" + response = await client.get("/free/raise_exception") + + # This should be a raw exception, not processed by our middleware + assert response.status == status.HTTP_500_INTERNAL_SERVER_ERROR + + # Should not be enveloped since it's outside the API path + text = await response.text() + try: + # If it happens to be JSON, check it's not enveloped + payload = json.loads(text) + assert not is_enveloped(payload) + except json.JSONDecodeError: + # If it's not JSON, that's expected too + pass + + +async def test_http_ok_with_text_is_enveloped(client: TestClient): + """Test that HTTPOk with text is properly enveloped.""" + response = await client.get("/v1/raise_success_with_text") + assert response.status == status.HTTP_200_OK + assert response.reason == "I'm ok" + + # Should be enveloped + payload = await response.json() + assert is_enveloped(payload) + + # Check the content was preserved + data, error = unwrap_envelope(payload) + assert not error + assert data + assert data.get("ok") is True + + +async def test_http_ok_with_raw_text_is_not_allowed(client: TestClient): + response = await client.get("/v1/raise_success_with_raw_text") + assert response.status == status.HTTP_500_INTERNAL_SERVER_ERROR + + +async def test_exception_in_handler_returns_500( + client: TestClient, mocker: MockerFixture +): + """Test that exceptions in the handler functions are caught and return 500.""" + + # Mock _handle_aiohttp_web_http_successful to raise an exception + def mocked_handler(*args, **kwargs): + msg = "Simulated error in handler" + raise ValueError(msg) + + mocker.patch( + "servicelib.aiohttp.rest_middlewares._handle_aiohttp_web_http_successful", + side_effect=mocked_handler, + ) + + # Trigger a successful HTTP response that will be processed by our mocked handler + response = await client.get( + "/v1/raise_exception", params={"exc": web.HTTPOk.__name__} + ) + + # Should return 500 since our handler raised an exception + assert response.status == status.HTTP_500_INTERNAL_SERVER_ERROR + + # Check that the response is properly enveloped + payload = await response.json() + assert is_enveloped(payload) + + # Verify error details + data, error = unwrap_envelope(payload) + assert not data + assert error + assert error.get("status") == status.HTTP_500_INTERNAL_SERVER_ERROR + + # Make sure there are no detailed error logs in production mode + assert not error.get("errors") + assert not error.get("logs") diff --git a/packages/service-library/tests/aiohttp/test_rest_responses.py b/packages/service-library/tests/aiohttp/test_rest_responses.py index 8c80f86b2cdf..25cb9e3025d1 100644 --- a/packages/service-library/tests/aiohttp/test_rest_responses.py +++ b/packages/service-library/tests/aiohttp/test_rest_responses.py @@ -62,7 +62,6 @@ def test_collected_http_errors_map(status_code: int, http_error_cls: type[HTTPEr assert issubclass(http_error_cls, HTTPError) -@pytest.mark.parametrize("skip_details", [True, False]) @pytest.mark.parametrize("error_code", [None, create_error_code(Exception("fake"))]) @pytest.mark.parametrize( "http_error_cls", @@ -88,7 +87,7 @@ def test_collected_http_errors_map(status_code: int, http_error_cls: type[HTTPEr ], ) def tests_exception_to_response( - skip_details: bool, error_code: ErrorCodeStr | None, http_error_cls: type[HTTPError] + error_code: ErrorCodeStr | None, http_error_cls: type[HTTPError] ): expected_status_reason = "SHORT REASON" expected_error_message = "Something whent wrong !" @@ -99,8 +98,6 @@ def tests_exception_to_response( error_message=expected_error_message, status_reason=expected_status_reason, http_error_cls=http_error_cls, - skip_internal_error_details=skip_details - and (http_error_cls == web.HTTPInternalServerError), error_code=error_code, ) diff --git a/packages/service-library/tests/aiohttp/test_tracing.py b/packages/service-library/tests/aiohttp/test_tracing.py index 2621751f344a..c9fb30d7de85 100644 --- a/packages/service-library/tests/aiohttp/test_tracing.py +++ b/packages/service-library/tests/aiohttp/test_tracing.py @@ -3,15 +3,19 @@ # pylint: disable=unused-variable import importlib -from collections.abc import Callable, Iterator +from collections.abc import Callable +from functools import partial from typing import Any import pip import pytest from aiohttp import web from aiohttp.test_utils import TestClient +from opentelemetry import trace +from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter from pydantic import ValidationError from servicelib.aiohttp.tracing import get_tracing_lifespan +from servicelib.tracing import _OSPARC_TRACE_ID_HEADER from settings_library.tracing import TracingSettings @@ -51,16 +55,16 @@ def set_and_clean_settings_env_vars( indirect=True, ) async def test_valid_tracing_settings( + mock_otel_collector: InMemorySpanExporter, aiohttp_client: Callable, set_and_clean_settings_env_vars: Callable, tracing_settings_in, - uninstrument_opentelemetry: Iterator[None], ) -> TestClient: app = web.Application() service_name = "simcore_service_webserver" tracing_settings = TracingSettings() async for _ in get_tracing_lifespan( - app, service_name=service_name, tracing_settings=tracing_settings + app=app, service_name=service_name, tracing_settings=tracing_settings )(app): pass @@ -75,10 +79,10 @@ async def test_valid_tracing_settings( indirect=True, ) async def test_invalid_tracing_settings( + mock_otel_collector: InMemorySpanExporter, aiohttp_client: Callable, set_and_clean_settings_env_vars: Callable, tracing_settings_in, - uninstrument_opentelemetry: Iterator[None], ) -> TestClient: with pytest.raises(ValidationError): TracingSettings() @@ -124,11 +128,11 @@ def manage_package(request): indirect=True, ) async def test_tracing_setup_package_detection( + mock_otel_collector: InMemorySpanExporter, aiohttp_client: Callable, set_and_clean_settings_env_vars: Callable[[], None], tracing_settings_in: Callable[[], dict[str, Any]], manage_package, - uninstrument_opentelemetry: Iterator[None], ): package_name = manage_package importlib.import_module(package_name) @@ -137,14 +141,63 @@ async def test_tracing_setup_package_detection( service_name = "simcore_service_webserver" tracing_settings = TracingSettings() async for _ in get_tracing_lifespan( - app, + app=app, service_name=service_name, tracing_settings=tracing_settings, )(app): # idempotency async for _ in get_tracing_lifespan( - app, + app=app, service_name=service_name, tracing_settings=tracing_settings, )(app): pass + + +@pytest.mark.parametrize( + "tracing_settings_in", + [ + ("http://opentelemetry-collector", 4318), + ], + indirect=True, +) +@pytest.mark.parametrize( + "server_response", [web.Response(text="Hello, world!"), web.HTTPNotFound()] +) +async def test_trace_id_in_response_header( + mock_otel_collector: InMemorySpanExporter, + aiohttp_client: Callable, + set_and_clean_settings_env_vars: Callable, + tracing_settings_in, + server_response: web.Response | web.HTTPException, +) -> None: + app = web.Application() + service_name = "simcore_service_webserver" + tracing_settings = TracingSettings() + + async def handler(handler_data: dict, request: web.Request) -> web.Response: + current_span = trace.get_current_span() + handler_data[_OSPARC_TRACE_ID_HEADER] = format( + current_span.get_span_context().trace_id, "032x" + ) + if isinstance(server_response, web.HTTPException): + raise server_response + return server_response + + handler_data = dict() + app.router.add_get("/", partial(handler, handler_data)) + + async for _ in get_tracing_lifespan( + app=app, + service_name=service_name, + tracing_settings=tracing_settings, + add_response_trace_id_header=True, + )(app): + client = await aiohttp_client(app) + response = await client.get("/") + assert _OSPARC_TRACE_ID_HEADER in response.headers + trace_id = response.headers[_OSPARC_TRACE_ID_HEADER] + assert len(trace_id) == 32 # Ensure trace ID is a 32-character hex string + assert ( + trace_id == handler_data[_OSPARC_TRACE_ID_HEADER] + ) # Ensure trace IDs match diff --git a/packages/service-library/tests/aiohttp/with_postgres/test_aiopg_utils.py b/packages/service-library/tests/aiohttp/with_postgres/test_aiopg_utils.py index 1a2d453b4e6b..6a141f50a29b 100644 --- a/packages/service-library/tests/aiohttp/with_postgres/test_aiopg_utils.py +++ b/packages/service-library/tests/aiohttp/with_postgres/test_aiopg_utils.py @@ -78,7 +78,7 @@ async def test_create_pg_engine(postgres_service_with_fake_data: DataSourceName) dsn = postgres_service_with_fake_data # using raw call and dsn.asdict to fill create_engine arguments! - engine1 = await aiopg.sa.create_engine(minsize=1, maxsize=5, **asdict(dsn)) + engine1 = await aiopg.sa.create_engine(minsize=2, maxsize=5, **asdict(dsn)) # just creating engine engine2 = await create_pg_engine(dsn) @@ -114,7 +114,7 @@ async def test_engine_when_idle_for_some_time(): database="db", application_name="test-app", ) - engine = await create_pg_engine(dsn, minsize=1, maxsize=1) + engine = await create_pg_engine(dsn, minsize=2, maxsize=2) init_pg_tables(dsn, metadata) assert not engine.closed # does not mean anything!!! # pylint: disable=no-value-for-parameter diff --git a/packages/service-library/tests/conftest.py b/packages/service-library/tests/conftest.py index 979a3731071d..f06739423b20 100644 --- a/packages/service-library/tests/conftest.py +++ b/packages/service-library/tests/conftest.py @@ -1,10 +1,12 @@ +# pylint: disable=contextmanager-generator-missing-cleanup # pylint: disable=redefined-outer-name # pylint: disable=unused-argument # pylint: disable=unused-import +import asyncio import sys from collections.abc import AsyncIterable, AsyncIterator, Callable -from contextlib import AbstractAsyncContextManager, asynccontextmanager +from contextlib import AbstractAsyncContextManager, asynccontextmanager, suppress from copy import deepcopy from pathlib import Path from typing import Any @@ -12,17 +14,19 @@ import pytest import servicelib from faker import Faker -from pytest_mock import MockerFixture from servicelib.redis import RedisClientSDK, RedisClientsManager, RedisManagerDBConfig from settings_library.redis import RedisDatabase, RedisSettings pytest_plugins = [ + "pytest_simcore.asyncio_event_loops", "pytest_simcore.docker_compose", "pytest_simcore.docker_registry", "pytest_simcore.docker_swarm", "pytest_simcore.docker", "pytest_simcore.environment_configs", "pytest_simcore.file_extra", + "pytest_simcore.logging", + "pytest_simcore.long_running_tasks", "pytest_simcore.pytest_global_environs", "pytest_simcore.rabbit_service", "pytest_simcore.redis_service", @@ -67,12 +71,10 @@ def fake_data_dict(faker: Faker) -> dict[str, Any]: return data -@pytest.fixture -async def get_redis_client_sdk( - mock_redis_socket_timeout: None, - mocker: MockerFixture, - redis_service: RedisSettings, -) -> AsyncIterable[ +@asynccontextmanager +async def _get_redis_client_sdk( + redis_settings: RedisSettings, +) -> AsyncIterator[ Callable[[RedisDatabase], AbstractAsyncContextManager[RedisClientSDK]] ]: @asynccontextmanager @@ -80,17 +82,19 @@ async def _( database: RedisDatabase, decode_response: bool = True, # noqa: FBT002 ) -> AsyncIterator[RedisClientSDK]: - redis_resources_dns = redis_service.build_redis_dsn(database) + redis_resources_dns = redis_settings.build_redis_dsn(database) client = RedisClientSDK( redis_resources_dns, decode_responses=decode_response, client_name="pytest" ) + await client.setup() assert client assert client.redis_dsn == redis_resources_dns assert client.client_name == "pytest" yield client - await client.shutdown() + with suppress(TimeoutError): + await asyncio.wait_for(client.shutdown(), timeout=5.0) async def _cleanup_redis_data(clients_manager: RedisClientsManager) -> None: for db in RedisDatabase: @@ -98,7 +102,7 @@ async def _cleanup_redis_data(clients_manager: RedisClientsManager) -> None: async with RedisClientsManager( {RedisManagerDBConfig(database=db) for db in RedisDatabase}, - redis_service, + redis_settings, client_name="pytest", ) as clients_manager: await _cleanup_redis_data(clients_manager) @@ -106,58 +110,21 @@ async def _cleanup_redis_data(clients_manager: RedisClientsManager) -> None: await _cleanup_redis_data(clients_manager) -@pytest.fixture() -def uninstrument_opentelemetry(): - yield - try: - from opentelemetry.instrumentation.redis import RedisInstrumentor - - RedisInstrumentor().uninstrument() - except ImportError: - pass - try: - from opentelemetry.instrumentation.botocore import BotocoreInstrumentor - - BotocoreInstrumentor().uninstrument() - except ImportError: - pass - try: - from opentelemetry.instrumentation.requests import RequestsInstrumentor - - RequestsInstrumentor().uninstrument() - except ImportError: - pass - try: - from opentelemetry.instrumentation.aiopg import AiopgInstrumentor - - AiopgInstrumentor().uninstrument() - except ImportError: - pass - try: - from opentelemetry.instrumentation.asyncpg import AsyncPGInstrumentor - - AsyncPGInstrumentor().uninstrument() - except ImportError: - pass - try: - from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor - - FastAPIInstrumentor().uninstrument() - except ImportError: - pass - try: - from opentelemetry.instrumentation.aiohttp_client import ( - AioHttpClientInstrumentor, - ) +@pytest.fixture +async def get_redis_client_sdk( + use_in_memory_redis: RedisSettings, +) -> AsyncIterable[ + Callable[[RedisDatabase], AbstractAsyncContextManager[RedisClientSDK]] +]: + async with _get_redis_client_sdk(use_in_memory_redis) as client: + yield client - AioHttpClientInstrumentor().uninstrument() - except ImportError: - pass - try: - from opentelemetry.instrumentation.aiohttp_server import ( - AioHttpServerInstrumentor, - ) - AioHttpServerInstrumentor().uninstrument() - except ImportError: - pass +@pytest.fixture +async def get_in_process_redis_client_sdk( + redis_service: RedisSettings, +) -> AsyncIterable[ + Callable[[RedisDatabase], AbstractAsyncContextManager[RedisClientSDK]] +]: + async with _get_redis_client_sdk(redis_service) as client: + yield client diff --git a/packages/service-library/tests/deferred_tasks/conftest.py b/packages/service-library/tests/deferred_tasks/conftest.py index 00881e614715..e5d8849e7e17 100644 --- a/packages/service-library/tests/deferred_tasks/conftest.py +++ b/packages/service-library/tests/deferred_tasks/conftest.py @@ -8,11 +8,11 @@ @pytest.fixture async def redis_client_sdk_deferred_tasks( - get_redis_client_sdk: Callable[ + get_in_process_redis_client_sdk: Callable[ [RedisDatabase, bool], AbstractAsyncContextManager[RedisClientSDK] - ] + ], ) -> AsyncIterator[RedisClientSDK]: - async with get_redis_client_sdk( + async with get_in_process_redis_client_sdk( RedisDatabase.DEFERRED_TASKS, decode_response=False ) as client: yield client diff --git a/packages/service-library/tests/deferred_tasks/example_app.py b/packages/service-library/tests/deferred_tasks/example_app.py index 9adb654e8964..991aa2efe8e2 100644 --- a/packages/service-library/tests/deferred_tasks/example_app.py +++ b/packages/service-library/tests/deferred_tasks/example_app.py @@ -95,6 +95,7 @@ def __init__( ) async def setup(self) -> None: + await self._redis_client.setup() await self._manager.setup() diff --git a/packages/service-library/tests/deferred_tasks/test__base_deferred_handler.py b/packages/service-library/tests/deferred_tasks/test__base_deferred_handler.py index cc19133b6b29..d25c7824752c 100644 --- a/packages/service-library/tests/deferred_tasks/test__base_deferred_handler.py +++ b/packages/service-library/tests/deferred_tasks/test__base_deferred_handler.py @@ -34,7 +34,6 @@ pytest_simcore_core_services_selection = [ "rabbit", - "redis", ] @@ -43,20 +42,23 @@ class MockKeys(StrAutoEnum): GET_TIMEOUT = auto() START_DEFERRED = auto() ON_DEFERRED_CREATED = auto() - RUN_DEFERRED = auto() + RUN_DEFERRED_BEFORE_HANDLER = auto() + RUN_DEFERRED_AFTER_HANDLER = auto() ON_DEFERRED_RESULT = auto() ON_FINISHED_WITH_ERROR = auto() + ON_CANCELLED = auto() @pytest.fixture async def redis_client_sdk( - redis_service: RedisSettings, + use_in_memory_redis: RedisSettings, ) -> AsyncIterable[RedisClientSDK]: sdk = RedisClientSDK( - redis_service.build_redis_dsn(RedisDatabase.DEFERRED_TASKS), + use_in_memory_redis.build_redis_dsn(RedisDatabase.DEFERRED_TASKS), decode_responses=False, client_name="pytest", ) + await sdk.setup() yield sdk await sdk.shutdown() @@ -122,14 +124,19 @@ async def on_created( @classmethod async def run(cls, context: DeferredContext) -> Any: + mocks[MockKeys.RUN_DEFERRED_BEFORE_HANDLER](context) result = await run(context) - mocks[MockKeys.RUN_DEFERRED](context) + mocks[MockKeys.RUN_DEFERRED_AFTER_HANDLER](context) return result @classmethod async def on_result(cls, result: Any, context: DeferredContext) -> None: mocks[MockKeys.ON_DEFERRED_RESULT](result, context) + @classmethod + async def on_cancelled(cls, context: DeferredContext) -> None: + mocks[MockKeys.ON_CANCELLED](context) + @classmethod async def on_finished_with_error( cls, error: TaskResultError, context: DeferredContext @@ -229,8 +236,8 @@ async def _run_ok(_: DeferredContext) -> Any: await _assert_mock_call(mocks, key=MockKeys.ON_DEFERRED_CREATED, count=1) assert TaskUID(mocks[MockKeys.ON_DEFERRED_CREATED].call_args_list[0].args[0]) - await _assert_mock_call(mocks, key=MockKeys.RUN_DEFERRED, count=1) - mocks[MockKeys.RUN_DEFERRED].assert_called_once_with(context) + await _assert_mock_call(mocks, key=MockKeys.RUN_DEFERRED_AFTER_HANDLER, count=1) + mocks[MockKeys.RUN_DEFERRED_AFTER_HANDLER].assert_called_once_with(context) await _assert_mock_call(mocks, key=MockKeys.ON_DEFERRED_RESULT, count=1) mocks[MockKeys.ON_DEFERRED_RESULT].assert_called_once_with(run_return, context) @@ -282,7 +289,7 @@ async def _run_raises(_: DeferredContext) -> None: count=retry_count, ) - await _assert_mock_call(mocks, key=MockKeys.RUN_DEFERRED, count=0) + await _assert_mock_call(mocks, key=MockKeys.RUN_DEFERRED_AFTER_HANDLER, count=0) await _assert_mock_call(mocks, key=MockKeys.ON_DEFERRED_RESULT, count=0) await _assert_log_message( @@ -319,8 +326,10 @@ async def _run_to_cancel(_: DeferredContext) -> None: await _assert_mock_call(mocks, key=MockKeys.ON_DEFERRED_CREATED, count=1) task_uid = TaskUID(mocks[MockKeys.ON_DEFERRED_CREATED].call_args_list[0].args[0]) + await _assert_mock_call(mocks, key=MockKeys.RUN_DEFERRED_BEFORE_HANDLER, count=1) await mocked_deferred_handler.cancel(task_uid) + await _assert_mock_call(mocks, key=MockKeys.ON_CANCELLED, count=1) await _assert_mock_call(mocks, key=MockKeys.ON_FINISHED_WITH_ERROR, count=0) assert ( @@ -330,7 +339,7 @@ async def _run_to_cancel(_: DeferredContext) -> None: == 0 ) - await _assert_mock_call(mocks, key=MockKeys.RUN_DEFERRED, count=0) + await _assert_mock_call(mocks, key=MockKeys.RUN_DEFERRED_AFTER_HANDLER, count=0) await _assert_mock_call(mocks, key=MockKeys.ON_DEFERRED_RESULT, count=0) await _assert_log_message( @@ -450,7 +459,7 @@ async def _run_that_times_out(_: DeferredContext) -> None: for entry in mocks[MockKeys.ON_FINISHED_WITH_ERROR].call_args_list: assert "builtins.TimeoutError" in entry.args[0].error - await _assert_mock_call(mocks, key=MockKeys.RUN_DEFERRED, count=0) + await _assert_mock_call(mocks, key=MockKeys.RUN_DEFERRED_AFTER_HANDLER, count=0) await _assert_mock_call(mocks, key=MockKeys.ON_DEFERRED_RESULT, count=0) diff --git a/packages/service-library/tests/deferred_tasks/test__redis_task_tracker.py b/packages/service-library/tests/deferred_tasks/test__redis_task_tracker.py index 366759e22d3b..515ed901e98d 100644 --- a/packages/service-library/tests/deferred_tasks/test__redis_task_tracker.py +++ b/packages/service-library/tests/deferred_tasks/test__redis_task_tracker.py @@ -22,7 +22,8 @@ def task_schedule() -> TaskScheduleModel: return TypeAdapter(TaskScheduleModel).validate_python( { "timeout": timedelta(seconds=1), - "execution_attempts": 1, + "total_attempts": 1, + "execution_attempts": 2, "class_unique_reference": "mock", "start_context": {}, "state": TaskState.SCHEDULED, diff --git a/packages/service-library/tests/deferred_tasks/test__utils.py b/packages/service-library/tests/deferred_tasks/test__utils.py index db3f32554b32..8ac30f83a720 100644 --- a/packages/service-library/tests/deferred_tasks/test__utils.py +++ b/packages/service-library/tests/deferred_tasks/test__utils.py @@ -57,7 +57,7 @@ async def _() -> AsyncIterator[RabbitBroker]: @pytest.fixture def rabbit_exchange() -> RabbitExchange: - return RabbitExchange("test_exchange") + return RabbitExchange("test_exchange", durable=True, auto_delete=True) async def _assert_call_count( @@ -256,7 +256,12 @@ async def test_fan_out_exchange_message_delivery( handler_1_call_count = Mock() handler_2_call_count = Mock() - fan_out_exchange = RabbitExchange("test_fan_out_exchange", type=ExchangeType.FANOUT) + fan_out_exchange = RabbitExchange( + "test_fan_out_exchange", + type=ExchangeType.FANOUT, + durable=True, + auto_delete=True, + ) @rabbit_broker.subscriber(queue="handler_1", exchange=fan_out_exchange, retry=True) async def handler_1(sleep_duration: float) -> None: diff --git a/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py b/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py index 7d11d2571539..0bb6254542e9 100644 --- a/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py +++ b/packages/service-library/tests/deferred_tasks/test_deferred_tasks.py @@ -3,7 +3,6 @@ import asyncio import contextlib -import datetime import itertools import json import random @@ -19,7 +18,6 @@ from common_library.json_serialization import json_dumps from common_library.serialization import model_dump_with_secrets from pydantic import NonNegativeFloat, NonNegativeInt -from pytest_mock import MockerFixture from servicelib.rabbitmq import RabbitMQClient from servicelib.redis import RedisClientSDK from servicelib.sequences_utils import partition_gen @@ -333,8 +331,7 @@ async def rabbit_client( class ClientWithPingProtocol(Protocol): - async def ping(self) -> bool: - ... + async def ping(self) -> bool: ... class ServiceManager: @@ -386,19 +383,10 @@ async def pause_redis(self) -> AsyncIterator[None]: yield -@pytest.fixture -def mock_default_socket_timeout(mocker: MockerFixture) -> None: - mocker.patch( - "servicelib.redis._client.DEFAULT_SOCKET_TIMEOUT", - datetime.timedelta(seconds=0.25), - ) - - @pytest.mark.parametrize("max_workers", [10]) @pytest.mark.parametrize("deferred_tasks_to_start", [100]) @pytest.mark.parametrize("service", ["rabbit", "redis"]) async def test_workflow_with_third_party_services_outages( - mock_default_socket_timeout: None, paused_container: Callable[[str], AbstractAsyncContextManager[None]], redis_client_sdk_deferred_tasks: RedisClientSDK, rabbit_client: RabbitMQClient, diff --git a/packages/service-library/tests/fastapi/conftest.py b/packages/service-library/tests/fastapi/conftest.py index f8811ca04f5a..9074321fa9b3 100644 --- a/packages/service-library/tests/fastapi/conftest.py +++ b/packages/service-library/tests/fastapi/conftest.py @@ -11,7 +11,10 @@ from fastapi import APIRouter, FastAPI from fastapi.params import Query from httpx import ASGITransport, AsyncClient +from opentelemetry.sdk.trace.export import SimpleSpanProcessor +from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter from pydantic.types import PositiveFloat +from pytest_mock import MockerFixture @pytest.fixture @@ -55,3 +58,13 @@ def go() -> int: return cast(int, s.getsockname()[1]) return go + + +@pytest.fixture +def mock_otel_collector(mocker: MockerFixture) -> InMemorySpanExporter: + memory_exporter = InMemorySpanExporter() + span_processor = SimpleSpanProcessor(memory_exporter) + mocker.patch( + "servicelib.fastapi.tracing._create_span_processor", return_value=span_processor + ) + return memory_exporter diff --git a/packages/service-library/tests/fastapi/long_running_tasks/conftest.py b/packages/service-library/tests/fastapi/long_running_tasks/conftest.py index d43a7e445c10..f10a27c322ac 100644 --- a/packages/service-library/tests/fastapi/long_running_tasks/conftest.py +++ b/packages/service-library/tests/fastapi/long_running_tasks/conftest.py @@ -9,13 +9,24 @@ from fastapi import FastAPI from httpx import ASGITransport, AsyncClient from servicelib.fastapi import long_running_tasks +from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient +from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings @pytest.fixture -async def bg_task_app(router_prefix: str) -> FastAPI: +async def bg_task_app( + router_prefix: str, redis_service: RedisSettings, rabbit_service: RabbitSettings +) -> FastAPI: app = FastAPI() - long_running_tasks.server.setup(app, router_prefix=router_prefix) + long_running_tasks.server.setup( + app, + redis_settings=redis_service, + rabbit_settings=rabbit_service, + lrt_namespace="test", + router_prefix=router_prefix, + ) return app @@ -27,3 +38,14 @@ async def async_client(bg_task_app: FastAPI) -> AsyncIterable[AsyncClient]: headers={"Content-Type": "application/json"}, ) as client: yield client + + +@pytest.fixture +async def rabbitmq_rpc_client( + rabbit_service: RabbitSettings, +) -> AsyncIterable[RabbitMQRPCClient]: + client = await RabbitMQRPCClient.create( + client_name="test-lrt-rpc-client", settings=rabbit_service + ) + yield client + await client.close() diff --git a/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_client.py b/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_client.py index 02d392126cbf..42f76a58f724 100644 --- a/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_client.py +++ b/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_client.py @@ -8,7 +8,7 @@ @pytest.mark.parametrize( "error_class, error_args", [ - (HTTPError, dict(message="")), + (HTTPError, {"message": ""}), ], ) async def test_retry_on_errors( diff --git a/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks.py b/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks.py index 84146c6b0dc6..1b72713dbd5c 100644 --- a/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks.py +++ b/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks.py @@ -12,27 +12,48 @@ import asyncio import json from collections.abc import AsyncIterator, Awaitable, Callable -from typing import Final +from typing import Annotated, Final import pytest from asgi_lifespan import LifespanManager from fastapi import APIRouter, Depends, FastAPI, status from httpx import AsyncClient from pydantic import TypeAdapter -from servicelib.fastapi import long_running_tasks -from servicelib.long_running_tasks._models import TaskGet, TaskId -from servicelib.long_running_tasks._task import TaskContext +from servicelib.fastapi.long_running_tasks._manager import FastAPILongRunningManager +from servicelib.fastapi.long_running_tasks.client import setup as setup_client +from servicelib.fastapi.long_running_tasks.server import ( + get_long_running_manager, +) +from servicelib.fastapi.long_running_tasks.server import setup as setup_server +from servicelib.long_running_tasks import lrt_api +from servicelib.long_running_tasks.models import ( + TaskGet, + TaskId, + TaskProgress, + TaskStatus, +) +from servicelib.long_running_tasks.task import TaskContext, TaskRegistry +from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings from tenacity.asyncio import AsyncRetrying from tenacity.retry import retry_if_exception_type from tenacity.stop import stop_after_delay from tenacity.wait import wait_fixed from yarl import URL +pytest_simcore_core_services_selection = [ + "rabbit", +] + ITEM_PUBLISH_SLEEP: Final[float] = 0.1 +class _TestingError(Exception): + pass + + async def _string_list_task( - task_progress: long_running_tasks.server.TaskProgress, + progress: TaskProgress, num_strings: int, sleep_time: float, fail: bool, @@ -41,13 +62,17 @@ async def _string_list_task( for index in range(num_strings): generated_strings.append(f"{index}") await asyncio.sleep(sleep_time) - task_progress.update(message="generated item", percent=index / num_strings) + await progress.update(message="generated item", percent=index / num_strings) if fail: - raise RuntimeError("We were asked to fail!!") + msg = "We were asked to fail!!" + raise _TestingError(msg) return generated_strings +TaskRegistry.register(_string_list_task, allowed_errors=(_TestingError,)) + + @pytest.fixture def server_routes() -> APIRouter: routes = APIRouter() @@ -58,31 +83,41 @@ def server_routes() -> APIRouter: async def create_string_list_task( num_strings: int, sleep_time: float, + long_running_manager: Annotated[ + FastAPILongRunningManager, Depends(get_long_running_manager) + ], + *, fail: bool = False, - task_manager: long_running_tasks.server.TasksManager = Depends( - long_running_tasks.server.get_tasks_manager - ), - ) -> long_running_tasks.server.TaskId: - task_id = long_running_tasks.server.start_task( - task_manager, - _string_list_task, + ) -> TaskId: + return await lrt_api.start_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + _string_list_task.__name__, num_strings=num_strings, sleep_time=sleep_time, fail=fail, ) - return task_id return routes @pytest.fixture -async def app(server_routes: APIRouter) -> AsyncIterator[FastAPI]: +async def app( + server_routes: APIRouter, + use_in_memory_redis: RedisSettings, + rabbit_service: RabbitSettings, +) -> AsyncIterator[FastAPI]: # overrides fastapi/conftest.py:app app = FastAPI(title="test app") app.include_router(server_routes) - long_running_tasks.server.setup(app) - long_running_tasks.client.setup(app) - async with LifespanManager(app): + setup_server( + app, + redis_settings=use_in_memory_redis, + rabbit_settings=rabbit_service, + lrt_namespace="test", + ) + setup_client(app) + async with LifespanManager(app, startup_timeout=30, shutdown_timeout=30): yield app @@ -94,10 +129,7 @@ async def _caller(app: FastAPI, client: AsyncClient, **query_kwargs) -> TaskId: ) resp = await client.post(f"{url}") assert resp.status_code == status.HTTP_202_ACCEPTED - task_id = TypeAdapter(long_running_tasks.server.TaskId).validate_python( - resp.json() - ) - return task_id + return TypeAdapter(TaskId).validate_python(resp.json()) return _caller @@ -124,9 +156,7 @@ async def _waiter( with attempt: result = await client.get(f"{status_url}") assert result.status_code == status.HTTP_200_OK - task_status = long_running_tasks.server.TaskStatus.model_validate( - result.json() - ) + task_status = TaskStatus.model_validate(result.json()) assert task_status assert task_status.done @@ -151,9 +181,7 @@ async def test_workflow( with attempt: result = await client.get(f"{status_url}") assert result.status_code == status.HTTP_200_OK - task_status = long_running_tasks.server.TaskStatus.model_validate( - result.json() - ) + task_status = TaskStatus.model_validate(result.json()) assert task_status progress_updates.append( (task_status.task_progress.message, task_status.task_progress.percent) @@ -194,7 +222,7 @@ async def test_workflow( [ ("GET", "get_task_status"), ("GET", "get_task_result"), - ("DELETE", "cancel_and_delete_task"), + ("DELETE", "remove_task"), ], ) async def test_get_task_wrong_task_id_raises_not_found( @@ -218,7 +246,8 @@ async def test_failing_task_returns_error( await wait_for_task(app, client, task_id, {}) # get the result result_url = app.url_path_for("get_task_result", task_id=task_id) - with pytest.raises(RuntimeError) as exec_info: + + with pytest.raises(_TestingError) as exec_info: await client.get(f"{result_url}") assert f"{exec_info.value}" == "We were asked to fail!!" @@ -243,7 +272,7 @@ async def test_cancel_task( task_id = await start_long_running_task(app, client) # cancel the task - delete_url = app.url_path_for("cancel_and_delete_task", task_id=task_id) + delete_url = app.url_path_for("remove_task", task_id=task_id) result = await client.delete(f"{delete_url}") assert result.status_code == status.HTTP_204_NO_CONTENT diff --git a/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks_context_manager.py b/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks_context_manager.py index b0db697a6ad9..aeab8876dd9c 100644 --- a/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks_context_manager.py +++ b/packages/service-library/tests/fastapi/long_running_tasks/test_long_running_tasks_context_manager.py @@ -3,7 +3,7 @@ import asyncio from collections.abc import AsyncIterable -from typing import Final +from typing import Annotated, Final import pytest from asgi_lifespan import LifespanManager @@ -11,26 +11,33 @@ from httpx import AsyncClient from pydantic import AnyHttpUrl, PositiveFloat, TypeAdapter from servicelib.fastapi.long_running_tasks._context_manager import _ProgressManager +from servicelib.fastapi.long_running_tasks._manager import FastAPILongRunningManager from servicelib.fastapi.long_running_tasks.client import ( - Client, - ProgressMessage, - ProgressPercent, + HttpClient, periodic_task_result, ) from servicelib.fastapi.long_running_tasks.client import setup as setup_client -from servicelib.fastapi.long_running_tasks.server import ( - TaskId, - TaskProgress, - TasksManager, - get_tasks_manager, -) +from servicelib.fastapi.long_running_tasks.server import get_long_running_manager from servicelib.fastapi.long_running_tasks.server import setup as setup_server -from servicelib.fastapi.long_running_tasks.server import ( - start_task, -) -from servicelib.long_running_tasks._errors import ( +from servicelib.long_running_tasks import lrt_api +from servicelib.long_running_tasks.errors import ( + GenericClientError, TaskClientTimeoutError, + TaskExceptionError, ) +from servicelib.long_running_tasks.models import ( + ProgressMessage, + ProgressPercent, + TaskId, + TaskProgress, +) +from servicelib.long_running_tasks.task import TaskRegistry +from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings + +pytest_simcore_core_services_selection = [ + "rabbit", +] TASK_SLEEP_INTERVAL: Final[PositiveFloat] = 0.1 @@ -38,21 +45,33 @@ async def _assert_task_removed( - async_client: AsyncClient, task_id: TaskId, router_prefix: str + http_client: HttpClient, task_id: TaskId, router_prefix: str ) -> None: - result = await async_client.get(f"{router_prefix}/tasks/{task_id}") - assert result.status_code == status.HTTP_404_NOT_FOUND + with pytest.raises(GenericClientError, match=f"No task with {task_id} found"): + await http_client.get_task_status(task_id) -async def a_test_task(task_progress: TaskProgress) -> int: +async def a_test_task(progress: TaskProgress) -> int: + _ = progress await asyncio.sleep(TASK_SLEEP_INTERVAL) return 42 -async def a_failing_test_task(task_progress: TaskProgress) -> None: +TaskRegistry.register(a_test_task) + + +class _TestingError(Exception): + pass + + +async def a_failing_test_task(progress: TaskProgress) -> None: + _ = progress await asyncio.sleep(TASK_SLEEP_INTERVAL) msg = "I am failing as requested" - raise RuntimeError(msg) + raise _TestingError(msg) + + +TaskRegistry.register(a_failing_test_task) @pytest.fixture @@ -61,33 +80,52 @@ def user_routes() -> APIRouter: @router.get("/api/success", status_code=status.HTTP_200_OK) async def create_task_user_defined_route( - tasks_manager: TasksManager = Depends(get_tasks_manager), + long_running_manager: Annotated[ + FastAPILongRunningManager, Depends(get_long_running_manager) + ], ) -> TaskId: - task_id = start_task(tasks_manager, task=a_test_task) - return task_id + return await lrt_api.start_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + a_test_task.__name__, + ) @router.get("/api/failing", status_code=status.HTTP_200_OK) async def create_task_which_fails( - task_manager: TasksManager = Depends(get_tasks_manager), + long_running_manager: Annotated[ + FastAPILongRunningManager, Depends(get_long_running_manager) + ], ) -> TaskId: - task_id = start_task(task_manager, task=a_failing_test_task) - return task_id + return await lrt_api.start_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + a_failing_test_task.__name__, + ) return router @pytest.fixture async def bg_task_app( - user_routes: APIRouter, router_prefix: str + user_routes: APIRouter, + router_prefix: str, + use_in_memory_redis: RedisSettings, + rabbit_service: RabbitSettings, ) -> AsyncIterable[FastAPI]: app = FastAPI() app.include_router(user_routes) - setup_server(app, router_prefix=router_prefix) + setup_server( + app, + router_prefix=router_prefix, + redis_settings=use_in_memory_redis, + rabbit_settings=rabbit_service, + lrt_namespace="test", + ) setup_client(app, router_prefix=router_prefix) - async with LifespanManager(app): + async with LifespanManager(app, startup_timeout=30, shutdown_timeout=30): yield app @@ -96,39 +134,48 @@ def mock_task_id() -> TaskId: return TypeAdapter(TaskId).validate_python("fake_task_id") +@pytest.fixture() +def http_client(bg_task_app: FastAPI, async_client: AsyncClient) -> HttpClient: + url = TypeAdapter(AnyHttpUrl).validate_python("http://backgroud.testserver.io/") + return HttpClient(app=bg_task_app, async_client=async_client, base_url=f"{url}") + + +async def _create_and_get_taskid(async_client: AsyncClient, *, endpoint: str) -> TaskId: + result = await async_client.get(f"/api/{endpoint}") + assert result.status_code == status.HTTP_200_OK, result.text + task_id: TaskId = result.json() + return task_id + + async def test_task_result( - bg_task_app: FastAPI, async_client: AsyncClient, router_prefix: str + async_client: AsyncClient, + http_client: HttpClient, + router_prefix: str, ) -> None: - result = await async_client.get("/api/success") - assert result.status_code == status.HTTP_200_OK, result.text - task_id = result.json() + task_id = await _create_and_get_taskid(async_client, endpoint="success") - url = TypeAdapter(AnyHttpUrl).validate_python("http://backgroud.testserver.io/") - client = Client(app=bg_task_app, async_client=async_client, base_url=url) async with periodic_task_result( - client, + http_client, task_id, task_timeout=10, status_poll_interval=TASK_SLEEP_INTERVAL / 3, ) as result: assert result == 42 - await _assert_task_removed(async_client, task_id, router_prefix) + await _assert_task_removed(http_client, task_id, router_prefix) async def test_task_result_times_out( - bg_task_app: FastAPI, async_client: AsyncClient, router_prefix: str + async_client: AsyncClient, + http_client: HttpClient, + router_prefix: str, ) -> None: - result = await async_client.get("/api/success") - assert result.status_code == status.HTTP_200_OK, result.text - task_id = result.json() + task_id = await _create_and_get_taskid(async_client, endpoint="success") - url = TypeAdapter(AnyHttpUrl).validate_python("http://backgroud.testserver.io/") - client = Client(app=bg_task_app, async_client=async_client, base_url=url) timeout = TASK_SLEEP_INTERVAL / 10 with pytest.raises(TaskClientTimeoutError) as exec_info: async with periodic_task_result( - client, + http_client, task_id, task_timeout=timeout, status_poll_interval=TASK_SLEEP_INTERVAL / 3, @@ -139,27 +186,26 @@ async def test_task_result_times_out( == f"Timed out after {timeout} seconds while awaiting '{task_id}' to complete" ) - await _assert_task_removed(async_client, task_id, router_prefix) + await _assert_task_removed(http_client, task_id, router_prefix) async def test_task_result_task_result_is_an_error( - bg_task_app: FastAPI, async_client: AsyncClient, router_prefix: str + bg_task_app: FastAPI, + async_client: AsyncClient, + http_client: HttpClient, + router_prefix: str, ) -> None: - result = await async_client.get("/api/failing") - assert result.status_code == status.HTTP_200_OK, result.text - task_id = result.json() + task_id = await _create_and_get_taskid(async_client, endpoint="failing") - url = TypeAdapter(AnyHttpUrl).validate_python("http://backgroud.testserver.io/") - client = Client(app=bg_task_app, async_client=async_client, base_url=url) - with pytest.raises(RuntimeError, match="I am failing as requested"): + with pytest.raises(TaskExceptionError, match="I am failing as requested"): async with periodic_task_result( - client, + http_client, task_id, task_timeout=10, status_poll_interval=TASK_SLEEP_INTERVAL / 3, ): pass - await _assert_task_removed(async_client, task_id, router_prefix) + await _assert_task_removed(http_client, task_id, router_prefix) @pytest.mark.parametrize("repeat", [1, 2, 10]) diff --git a/packages/service-library/tests/fastapi/test_cancellation_middleware.py b/packages/service-library/tests/fastapi/test_cancellation_middleware.py index add93851f540..1eac0cc1ae63 100644 --- a/packages/service-library/tests/fastapi/test_cancellation_middleware.py +++ b/packages/service-library/tests/fastapi/test_cancellation_middleware.py @@ -2,6 +2,7 @@ import asyncio import logging +import threading from collections.abc import Iterator from threading import Thread from unittest.mock import AsyncMock @@ -9,6 +10,7 @@ import httpx import pytest import uvicorn +import uvloop from fastapi import APIRouter, BackgroundTasks, FastAPI from pytest_simcore.helpers.logging_tools import log_context from servicelib.fastapi.cancellation_middleware import RequestCancellationMiddleware @@ -18,8 +20,11 @@ @pytest.fixture -def server_done_event() -> asyncio.Event: - return asyncio.Event() +def server_done_event() -> threading.Event: + # This allows communicate an event between the thread where the server is running + # and the test thread. It is used to signal that the server has completed its task + # WARNING: do not user asyncio.Event here as it is not thread-safe! + return threading.Event() @pytest.fixture @@ -29,7 +34,7 @@ def server_cancelled_mock() -> AsyncMock: @pytest.fixture def fastapi_router( - server_done_event: asyncio.Event, server_cancelled_mock: AsyncMock + server_done_event: threading.Event, server_cancelled_mock: AsyncMock ) -> APIRouter: router = APIRouter() @@ -77,22 +82,29 @@ async def sleep_with_background_task( def fastapi_app(fastapi_router: APIRouter) -> FastAPI: app = FastAPI() app.include_router(fastapi_router) - app.add_middleware(RequestCancellationMiddleware) + + app.add_middleware(RequestCancellationMiddleware) # Middleware under test return app @pytest.fixture def uvicorn_server(fastapi_app: FastAPI) -> Iterator[URL]: - random_port = unused_port() + + server_host = "127.0.0.1" + server_port = unused_port() + server_url = f"http://{server_host}:{server_port}" + with log_context( logging.INFO, - msg=f"with uvicorn server on 127.0.0.1:{random_port}", + msg=f"with uvicorn server on {server_url}", ) as ctx: + config = uvicorn.Config( fastapi_app, - host="127.0.0.1", - port=random_port, + host=server_host, + port=server_port, log_level="error", + loop="uvloop", ) server = uvicorn.Server(config) @@ -102,20 +114,16 @@ def uvicorn_server(fastapi_app: FastAPI) -> Iterator[URL]: @retry(wait=wait_fixed(0.1), stop=stop_after_delay(10), reraise=True) def wait_for_server_ready() -> None: - with httpx.Client() as client: - response = client.get(f"http://127.0.1:{random_port}/") - assert ( - response.is_success - ), f"Server did not start successfully: {response.status_code} {response.text}" + response = httpx.get(f"{server_url}/") + assert ( + response.is_success + ), f"Server did not start successfully: {response.status_code} {response.text}" wait_for_server_ready() - ctx.logger.info( - "server ready at: %s", - f"http://127.0.0.1:{random_port}", - ) + ctx.logger.info("server ready at: %s", server_url) - yield URL(f"http://127.0.0.1:{random_port}") + yield URL(server_url) server.should_exit = True thread.join(timeout=10) @@ -123,43 +131,50 @@ def wait_for_server_ready() -> None: async def test_server_cancels_when_client_disconnects( uvicorn_server: URL, - server_done_event: asyncio.Event, + server_done_event: threading.Event, server_cancelled_mock: AsyncMock, ): + # Implementation of RequestCancellationMiddleware is under test here + assert isinstance(asyncio.get_running_loop(), uvloop.Loop) + async with httpx.AsyncClient(base_url=f"{uvicorn_server}") as client: - # check standard call still complete as expected + # 1. check standard call still complete as expected with log_context(logging.INFO, msg="client calling endpoint"): response = await client.get("/sleep", params={"sleep_time": 0.1}) + assert response.status_code == 200 assert response.json() == {"message": "Slept for 0.1 seconds"} - async with asyncio.timeout(10): - await server_done_event.wait() + + server_done_event.wait(10) server_done_event.clear() - # check slow call get cancelled + # 2. check slow call get cancelled with log_context( logging.INFO, msg="client calling endpoint for cancellation" ) as ctx: with pytest.raises(httpx.ReadTimeout): - response = await client.get( - "/sleep", params={"sleep_time": 10}, timeout=0.1 + await client.get( + "/sleep", + params={"sleep_time": 10}, + timeout=0.1, # <--- this will enforce the client to disconnect from the server ! ) ctx.logger.info("client disconnected from server") - async with asyncio.timeout(5): - await server_done_event.wait() + # request should have been cancelled after the ReadTimoeut! + server_done_event.wait(5) server_cancelled_mock.assert_called_once() server_cancelled_mock.reset_mock() server_done_event.clear() + # 3. check background tasks get cancelled as well sadly # NOTE: shows that FastAPI BackgroundTasks get cancelled too! - # check background tasks get cancelled as well sadly with log_context(logging.INFO, msg="client calling endpoint for cancellation"): response = await client.get( "/sleep-with-background-task", params={"sleep_time": 2}, ) assert response.status_code == 200 - async with asyncio.timeout(5): - await server_done_event.wait() + + # request should have been cancelled after the ReadTimoeut! + server_done_event.wait(5) server_cancelled_mock.assert_called_once() diff --git a/packages/service-library/tests/fastapi/test_postgres_lifespan.py b/packages/service-library/tests/fastapi/test_postgres_lifespan.py index 0c656c371876..07cc3077df1d 100644 --- a/packages/service-library/tests/fastapi/test_postgres_lifespan.py +++ b/packages/service-library/tests/fastapi/test_postgres_lifespan.py @@ -83,7 +83,6 @@ async def test_lifespan_postgres_database_in_an_app( mock_create_async_engine_and_database_ready: MockType, app_lifespan: LifespanManager, ): - app = FastAPI(lifespan=app_lifespan) async with ASGILifespanManager( @@ -93,7 +92,7 @@ async def test_lifespan_postgres_database_in_an_app( ) as asgi_manager: # Verify that the async engine was created mock_create_async_engine_and_database_ready.assert_called_once_with( - app.state.settings.CATALOG_POSTGRES + app.state.settings.CATALOG_POSTGRES, app.title ) # Verify that the async engine is in the lifespan manager state diff --git a/packages/service-library/tests/fastapi/test_tracing.py b/packages/service-library/tests/fastapi/test_tracing.py index 8e58dfd75dd9..148f4e43776d 100644 --- a/packages/service-library/tests/fastapi/test_tracing.py +++ b/packages/service-library/tests/fastapi/test_tracing.py @@ -4,15 +4,27 @@ import importlib import random import string -from collections.abc import Callable, Iterator +from collections.abc import Callable +from functools import partial from typing import Any import pip import pytest from fastapi import FastAPI +from fastapi.exceptions import HTTPException +from fastapi.responses import PlainTextResponse +from fastapi.testclient import TestClient +from opentelemetry import trace +from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter from pydantic import ValidationError from servicelib.fastapi.tracing import ( get_tracing_instrumentation_lifespan, + initialize_fastapi_app_tracing, +) +from servicelib.tracing import ( + _OSPARC_TRACE_ID_HEADER, + _PROFILE_ATTRIBUTE_NAME, + with_profiled_span, ) from settings_library.tracing import TracingSettings @@ -60,9 +72,9 @@ def set_and_clean_settings_env_vars( ) async def test_valid_tracing_settings( mocked_app: FastAPI, + mock_otel_collector: InMemorySpanExporter, set_and_clean_settings_env_vars: Callable[[], None], tracing_settings_in: Callable[[], dict[str, Any]], - uninstrument_opentelemetry: Iterator[None], ): tracing_settings = TracingSettings() async for _ in get_tracing_instrumentation_lifespan( @@ -94,9 +106,9 @@ async def test_valid_tracing_settings( ) async def test_invalid_tracing_settings( mocked_app: FastAPI, + mock_otel_collector: InMemorySpanExporter, set_and_clean_settings_env_vars: Callable[[], None], tracing_settings_in: Callable[[], dict[str, Any]], - uninstrument_opentelemetry: Iterator[None], ): app = mocked_app with pytest.raises((BaseException, ValidationError, TypeError)): # noqa: PT012 @@ -149,9 +161,9 @@ def manage_package(request): ) async def test_tracing_setup_package_detection( mocked_app: FastAPI, + mock_otel_collector: InMemorySpanExporter, set_and_clean_settings_env_vars: Callable[[], None], tracing_settings_in: Callable[[], dict[str, Any]], - uninstrument_opentelemetry: Iterator[None], manage_package, ): package_name = manage_package @@ -167,3 +179,108 @@ async def test_tracing_setup_package_detection( service_name="Mock-Openetlemetry-Pytest", )(app=mocked_app): pass + + +@pytest.mark.parametrize( + "tracing_settings_in", + [ + ("http://opentelemetry-collector", 4318), + ], + indirect=True, +) +@pytest.mark.parametrize( + "server_response", + [ + PlainTextResponse("ok"), + HTTPException(status_code=400, detail="error"), + ], +) +async def test_trace_id_in_response_header( + mock_otel_collector: InMemorySpanExporter, + mocked_app: FastAPI, + set_and_clean_settings_env_vars: Callable, + tracing_settings_in: Callable, + server_response: PlainTextResponse | HTTPException, +) -> None: + tracing_settings = TracingSettings() + + handler_data = dict() + + async def handler(handler_data: dict): + current_span = trace.get_current_span() + handler_data[_OSPARC_TRACE_ID_HEADER] = format( + current_span.get_span_context().trace_id, "032x" + ) + if isinstance(server_response, HTTPException): + raise server_response + return server_response + + mocked_app.get("/")(partial(handler, handler_data)) + + async for _ in get_tracing_instrumentation_lifespan( + tracing_settings=tracing_settings, + service_name="Mock-OpenTelemetry-Pytest", + )(app=mocked_app): + initialize_fastapi_app_tracing(mocked_app, add_response_trace_id_header=True) + client = TestClient(mocked_app) + response = client.get("/") + assert _OSPARC_TRACE_ID_HEADER in response.headers + trace_id = response.headers[_OSPARC_TRACE_ID_HEADER] + assert len(trace_id) == 32 # Ensure trace ID is a 32-character hex string + assert trace_id == handler_data[_OSPARC_TRACE_ID_HEADER] + + +@pytest.mark.parametrize( + "tracing_settings_in", + [ + ("http://opentelemetry-collector", 4318), + ], + indirect=True, +) +@pytest.mark.parametrize( + "server_response", + [ + PlainTextResponse("ok"), + HTTPException(status_code=400, detail="error"), + ], +) +async def test_with_profile_span( + mock_otel_collector: InMemorySpanExporter, + mocked_app: FastAPI, + set_and_clean_settings_env_vars: Callable[[], None], + tracing_settings_in: Callable, + server_response: PlainTextResponse | HTTPException, +): + tracing_settings = TracingSettings() + + handler_data = dict() + + @with_profiled_span + async def handler(handler_data: dict): + current_span = trace.get_current_span() + handler_data[_OSPARC_TRACE_ID_HEADER] = format( + current_span.get_span_context().trace_id, "032x" + ) + if isinstance(server_response, HTTPException): + raise server_response + return server_response + + mocked_app.get("/")(partial(handler, handler_data)) + + async for _ in get_tracing_instrumentation_lifespan( + tracing_settings=tracing_settings, + service_name="Mock-OpenTelemetry-Pytest", + )(app=mocked_app): + initialize_fastapi_app_tracing(mocked_app, add_response_trace_id_header=True) + client = TestClient(mocked_app) + _ = client.get("/") + trace_id = handler_data.get(_OSPARC_TRACE_ID_HEADER) + assert trace_id is not None + + spans = mock_otel_collector.get_finished_spans() + assert any( + span.context.trace_id == int(trace_id, 16) + and _PROFILE_ATTRIBUTE_NAME in span.attributes.keys() + for span in spans + if span.context is not None and span.attributes is not None + ) diff --git a/packages/service-library/tests/long_running_tasks/conftest.py b/packages/service-library/tests/long_running_tasks/conftest.py new file mode 100644 index 000000000000..df4bb2fd9bc2 --- /dev/null +++ b/packages/service-library/tests/long_running_tasks/conftest.py @@ -0,0 +1,85 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument + +import asyncio +import logging +from collections.abc import AsyncIterable, AsyncIterator, Awaitable, Callable +from datetime import timedelta + +import pytest +from faker import Faker +from pytest_mock import MockerFixture +from servicelib.logging_utils import log_catch +from servicelib.long_running_tasks.manager import ( + LongRunningManager, +) +from servicelib.long_running_tasks.models import LRTNamespace, TaskContext +from servicelib.long_running_tasks.task import TasksManager +from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient +from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings +from utils import TEST_CHECK_STALE_INTERVAL_S + +_logger = logging.getLogger(__name__) + + +class _TestingLongRunningManager(LongRunningManager): + @staticmethod + def get_task_context(request) -> TaskContext: + _ = request + return {} + + +@pytest.fixture +async def get_long_running_manager( + fast_long_running_tasks_cancellation: None, faker: Faker +) -> AsyncIterator[ + Callable[ + [RedisSettings, RabbitSettings, LRTNamespace | None], + Awaitable[LongRunningManager], + ] +]: + managers: list[LongRunningManager] = [] + + async def _( + redis_settings: RedisSettings, + rabbit_settings: RabbitSettings, + lrt_namespace: LRTNamespace | None, + ) -> LongRunningManager: + manager = _TestingLongRunningManager( + stale_task_check_interval=timedelta(seconds=TEST_CHECK_STALE_INTERVAL_S), + stale_task_detect_timeout=timedelta(seconds=TEST_CHECK_STALE_INTERVAL_S), + redis_settings=redis_settings, + rabbit_settings=rabbit_settings, + lrt_namespace=lrt_namespace or f"test{faker.uuid4()}", + ) + await manager.setup() + managers.append(manager) + return manager + + yield _ + + for manager in managers: + with log_catch(_logger, reraise=False): + await asyncio.wait_for(manager.teardown(), timeout=5) + + +@pytest.fixture +async def rabbitmq_rpc_client( + rabbit_service: RabbitSettings, +) -> AsyncIterable[RabbitMQRPCClient]: + client = await RabbitMQRPCClient.create( + client_name="test-lrt-rpc-client", settings=rabbit_service + ) + yield client + await client.close() + + +@pytest.fixture +def disable_stale_tasks_monitor(mocker: MockerFixture) -> None: + # no need to autoremove stale tasks in these tests + async def _to_replace(self: TasksManager) -> None: + self._started_event_task_stale_tasks_monitor.set() + + mocker.patch.object(TasksManager, "_stale_tasks_monitor", _to_replace) diff --git a/packages/service-library/tests/long_running_tasks/test_long_running_tasks__redis_store.py b/packages/service-library/tests/long_running_tasks/test_long_running_tasks__redis_store.py new file mode 100644 index 000000000000..fc08de586864 --- /dev/null +++ b/packages/service-library/tests/long_running_tasks/test_long_running_tasks__redis_store.py @@ -0,0 +1,117 @@ +# pylint:disable=redefined-outer-name + +from collections.abc import AsyncIterable, Callable +from contextlib import AbstractAsyncContextManager +from copy import deepcopy + +import pytest +from pydantic import TypeAdapter +from servicelib.long_running_tasks._redis_store import ( + _MARKED_FOR_REMOVAL_FIELD, + RedisStore, +) +from servicelib.long_running_tasks.models import TaskData +from servicelib.redis._client import RedisClientSDK +from settings_library.redis import RedisDatabase, RedisSettings + + +def test_ensure_task_data_field_name_and_type(): + # NOTE: ensure thse do not change, if you want to change them remeber that the db is invalid + assert _MARKED_FOR_REMOVAL_FIELD == "marked_for_removal" + field = TaskData.model_fields[_MARKED_FOR_REMOVAL_FIELD] + assert field.annotation is bool + + +@pytest.fixture +def task_data() -> TaskData: + return TypeAdapter(TaskData).validate_python( + TaskData.model_json_schema()["examples"][0] + ) + + +@pytest.fixture +async def store( + use_in_memory_redis: RedisSettings, + get_redis_client_sdk: Callable[ + [RedisDatabase], AbstractAsyncContextManager[RedisClientSDK] + ], +) -> AsyncIterable[RedisStore]: + store = RedisStore(redis_settings=use_in_memory_redis, namespace="test") + + await store.setup() + yield store + await store.shutdown() + + # triggers cleanup of all redis data + async with get_redis_client_sdk(RedisDatabase.LONG_RUNNING_TASKS): + pass + + +async def test_workflow(store: RedisStore, task_data: TaskData) -> None: + # task data + assert await store.list_tasks_data() == [] + assert await store.get_task_data("missing") is None + + await store.add_task_data(task_data.task_id, task_data) + + assert await store.list_tasks_data() == [task_data] + + await store.delete_task_data(task_data.task_id) + + assert await store.list_tasks_data() == [] + + # cancelled tasks + await store.add_task_data(task_data.task_id, task_data) + + assert await store.is_marked_for_removal(task_data.task_id) is False + + await store.mark_for_removal(task_data.task_id) + + assert await store.is_marked_for_removal(task_data.task_id) is True + + +@pytest.fixture +async def redis_stores( + use_in_memory_redis: RedisSettings, + get_redis_client_sdk: Callable[ + [RedisDatabase], AbstractAsyncContextManager[RedisClientSDK] + ], +) -> AsyncIterable[list[RedisStore]]: + stores: list[RedisStore] = [ + RedisStore(redis_settings=use_in_memory_redis, namespace=f"test-{i}") + for i in range(5) + ] + for store in stores: + await store.setup() + + yield stores + + for store in stores: + await store.shutdown() + + # triggers cleanup of all redis data + async with get_redis_client_sdk(RedisDatabase.LONG_RUNNING_TASKS): + pass + + +async def test_workflow_multiple_redis_stores_with_different_namespaces( + redis_stores: list[RedisStore], task_data: TaskData +): + + for store in redis_stores: + assert await store.list_tasks_data() == [] + + for store in redis_stores: + await store.add_task_data(task_data.task_id, task_data) + await store.mark_for_removal(task_data.task_id) + + marked_as_removed_task_data = deepcopy(task_data) + marked_as_removed_task_data.marked_for_removal = True + for store in redis_stores: + assert await store.list_tasks_data() == [marked_as_removed_task_data] + + for store in redis_stores: + await store.delete_task_data(task_data.task_id) + + for store in redis_stores: + assert await store.list_tasks_data() == [] diff --git a/packages/service-library/tests/long_running_tasks/test_long_running_tasks__serialization.py b/packages/service-library/tests/long_running_tasks/test_long_running_tasks__serialization.py new file mode 100644 index 000000000000..3b7562e55503 --- /dev/null +++ b/packages/service-library/tests/long_running_tasks/test_long_running_tasks__serialization.py @@ -0,0 +1,50 @@ +from typing import Any + +import pytest +from aiohttp.web import HTTPException, HTTPInternalServerError +from servicelib.aiohttp.long_running_tasks._server import AiohttpHTTPExceptionSerializer +from servicelib.long_running_tasks._serialization import ( + dumps, + loads, + register_custom_serialization, +) + +register_custom_serialization(HTTPException, AiohttpHTTPExceptionSerializer) + + +class PositionalArguments: + def __init__(self, arg1, arg2, *args): + self.arg1 = arg1 + self.arg2 = arg2 + self.args = args + + +class MixedArguments: + def __init__(self, arg1, arg2, kwarg1=None, kwarg2=None): + self.arg1 = arg1 + self.arg2 = arg2 + self.kwarg1 = kwarg1 + self.kwarg2 = kwarg2 + + +@pytest.mark.parametrize( + "obj", + [ + HTTPInternalServerError(reason="Uh-oh!", text="Failure!"), + PositionalArguments("arg1", "arg2", "arg3", "arg4"), + MixedArguments("arg1", "arg2", kwarg1="kwarg1", kwarg2="kwarg2"), + "a_string", + 1, + ], +) +def test_serialization(obj: Any): + str_data = dumps(obj) + + try: + reconstructed_obj = loads(str_data) + except Exception as exc: # pylint:disable=broad-exception-caught + reconstructed_obj = exc + + assert type(reconstructed_obj) is type(obj) + if hasattr(obj, "__dict__"): + assert reconstructed_obj.__dict__ == obj.__dict__ diff --git a/packages/service-library/tests/long_running_tasks/test_long_running_tasks_client_long_running_manager.py b/packages/service-library/tests/long_running_tasks/test_long_running_tasks_client_long_running_manager.py new file mode 100644 index 000000000000..27369fe08d64 --- /dev/null +++ b/packages/service-library/tests/long_running_tasks/test_long_running_tasks_client_long_running_manager.py @@ -0,0 +1,82 @@ +# pylint:disable=redefined-outer-name + +from collections.abc import AsyncIterable, Callable +from contextlib import AbstractAsyncContextManager +from copy import deepcopy + +import pytest +from pydantic import TypeAdapter +from servicelib.long_running_tasks._redis_store import RedisStore +from servicelib.long_running_tasks.long_running_client_helper import ( + LongRunningClientHelper, +) +from servicelib.long_running_tasks.models import LRTNamespace, TaskData +from servicelib.redis._client import RedisClientSDK +from settings_library.redis import RedisDatabase, RedisSettings + + +@pytest.fixture +def task_data() -> TaskData: + return TypeAdapter(TaskData).validate_python( + TaskData.model_json_schema()["examples"][0] + ) + + +@pytest.fixture +def lrt_namespace() -> LRTNamespace: + return "TEST-NAMESPACE" + + +@pytest.fixture +async def store( + use_in_memory_redis: RedisSettings, + get_redis_client_sdk: Callable[ + [RedisDatabase], AbstractAsyncContextManager[RedisClientSDK] + ], + lrt_namespace: LRTNamespace, +) -> AsyncIterable[RedisStore]: + store = RedisStore(redis_settings=use_in_memory_redis, namespace=lrt_namespace) + + await store.setup() + yield store + await store.shutdown() + + # triggers cleanup of all redis data + async with get_redis_client_sdk(RedisDatabase.LONG_RUNNING_TASKS): + pass + + +@pytest.fixture +async def long_running_client_helper( + use_in_memory_redis: RedisSettings, +) -> AsyncIterable[LongRunningClientHelper]: + helper = LongRunningClientHelper(redis_settings=use_in_memory_redis) + + await helper.setup() + yield helper + await helper.shutdown() + + +async def test_cleanup_namespace( + store: RedisStore, + task_data: TaskData, + long_running_client_helper: LongRunningClientHelper, + lrt_namespace: LRTNamespace, +) -> None: + # create entries in both sides + await store.add_task_data(task_data.task_id, task_data) + await store.mark_for_removal(task_data.task_id) + + # entries exit + marked_for_removal = deepcopy(task_data) + marked_for_removal.marked_for_removal = True + assert await store.list_tasks_data() == [marked_for_removal] + + # removes + await long_running_client_helper.cleanup(lrt_namespace) + + # entris were removed + assert await store.list_tasks_data() == [] + + # ensore it does not raise errors if there is nothing to remove + await long_running_client_helper.cleanup(lrt_namespace) diff --git a/packages/service-library/tests/long_running_tasks/test_long_running_tasks_lrt_api.py b/packages/service-library/tests/long_running_tasks/test_long_running_tasks_lrt_api.py new file mode 100644 index 000000000000..88e464ee5b01 --- /dev/null +++ b/packages/service-library/tests/long_running_tasks/test_long_running_tasks_lrt_api.py @@ -0,0 +1,316 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument + +import asyncio +import secrets +from collections.abc import Awaitable, Callable +from typing import Any, Final + +import pytest +from models_library.api_schemas_long_running_tasks.base import TaskProgress +from pydantic import NonNegativeInt +from pytest_simcore.helpers.long_running_tasks import assert_task_is_no_longer_present +from servicelib.long_running_tasks import lrt_api +from servicelib.long_running_tasks.manager import LongRunningManager +from servicelib.long_running_tasks.models import LRTNamespace, TaskContext +from servicelib.long_running_tasks.task import TaskId, TaskRegistry +from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient +from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings +from tenacity import ( + AsyncRetrying, + TryAgain, + retry_if_exception_type, + stop_after_delay, + wait_fixed, +) + +pytest_simcore_core_services_selection = [ + "rabbit", +] + +_RETRY_PARAMS: dict[str, Any] = { + "reraise": True, + "wait": wait_fixed(0.1), + "stop": stop_after_delay(60), + "retry": retry_if_exception_type((AssertionError, TryAgain)), +} + + +async def _task_echo_input(progress: TaskProgress, to_return: Any) -> Any: + return to_return + + +class _TestingError(Exception): + pass + + +async def _task_always_raise(progress: TaskProgress) -> None: + msg = "This task always raises an error" + raise _TestingError(msg) + + +async def _task_takes_too_long(progress: TaskProgress) -> None: + # Simulate a long-running task that is taking too much time + await asyncio.sleep(1e9) + + +TaskRegistry.register(_task_echo_input) +TaskRegistry.register(_task_always_raise, allowed_errors=(_TestingError,)) +TaskRegistry.register(_task_takes_too_long) + + +@pytest.fixture +def managers_count() -> NonNegativeInt: + return 5 + + +@pytest.fixture +async def long_running_managers( + disable_stale_tasks_monitor: None, + managers_count: NonNegativeInt, + use_in_memory_redis: RedisSettings, + rabbit_service: RabbitSettings, + get_long_running_manager: Callable[ + [RedisSettings, RabbitSettings, LRTNamespace | None], + Awaitable[LongRunningManager], + ], +) -> list[LongRunningManager]: + maanagers: list[LongRunningManager] = [] + for _ in range(managers_count): + long_running_manager = await get_long_running_manager( + use_in_memory_redis, rabbit_service, "some-service" + ) + maanagers.append(long_running_manager) + + return maanagers + + +def _get_long_running_manager( + long_running_managers: list[LongRunningManager], +) -> LongRunningManager: + return secrets.choice(long_running_managers) + + +async def _assert_task_status( + rabbitmq_rpc_client: RabbitMQRPCClient, + long_running_manager: LongRunningManager, + task_id: TaskId, + *, + is_done: bool +) -> None: + result = await lrt_api.get_task_status( + rabbitmq_rpc_client, long_running_manager.lrt_namespace, TaskContext(), task_id + ) + assert result.done is is_done + + +async def _assert_task_status_on_random_manager( + rabbitmq_rpc_client: RabbitMQRPCClient, + long_running_managers: list[LongRunningManager], + task_ids: list[TaskId], + *, + is_done: bool = True +) -> None: + for task_id in task_ids: + result = await lrt_api.get_task_status( + rabbitmq_rpc_client, + _get_long_running_manager(long_running_managers).lrt_namespace, + TaskContext(), + task_id, + ) + assert result.done is is_done + + +async def _assert_task_status_done_on_all_managers( + rabbitmq_rpc_client: RabbitMQRPCClient, + long_running_managers: list[LongRunningManager], + task_id: TaskId, + *, + is_done: bool = True +) -> None: + async for attempt in AsyncRetrying(**_RETRY_PARAMS): + with attempt: + await _assert_task_status( + rabbitmq_rpc_client, + _get_long_running_manager(long_running_managers), + task_id, + is_done=is_done, + ) + + # check can do this form any task manager + for manager in long_running_managers: + await _assert_task_status( + rabbitmq_rpc_client, manager, task_id, is_done=is_done + ) + + +async def _assert_list_tasks_from_all_managers( + rabbitmq_rpc_client: RabbitMQRPCClient, + long_running_managers: list[LongRunningManager], + task_context: TaskContext, + task_count: int, +) -> None: + for manager in long_running_managers: + tasks = await lrt_api.list_tasks( + rabbitmq_rpc_client, manager.lrt_namespace, task_context + ) + assert len(tasks) == task_count + + +_TASK_CONTEXT: Final[list[TaskContext | None]] = [{"a": "context"}, None] +_IS_UNIQUE: Final[list[bool]] = [False, True] +_TASK_COUNT: Final[list[int]] = [5] + + +@pytest.mark.parametrize("task_count", _TASK_COUNT) +@pytest.mark.parametrize("task_context", _TASK_CONTEXT) +@pytest.mark.parametrize("is_unique", _IS_UNIQUE) +@pytest.mark.parametrize("to_return", [{"key": "value"}]) +async def test_workflow_with_result( + disable_stale_tasks_monitor: None, + fast_long_running_tasks_cancellation: None, + long_running_managers: list[LongRunningManager], + rabbitmq_rpc_client: RabbitMQRPCClient, + task_count: int, + is_unique: bool, + task_context: TaskContext | None, + to_return: Any, +): + saved_context = task_context or {} + task_count = 1 if is_unique else task_count + + task_ids: list[TaskId] = [] + for _ in range(task_count): + task_id = await lrt_api.start_task( + _get_long_running_manager(long_running_managers).rpc_client, + _get_long_running_manager(long_running_managers).lrt_namespace, + _task_echo_input.__name__, + unique=is_unique, + task_name=None, + task_context=task_context, + fire_and_forget=False, + to_return=to_return, + ) + task_ids.append(task_id) + + for task_id in task_ids: + await _assert_task_status_done_on_all_managers( + rabbitmq_rpc_client, long_running_managers, task_id + ) + + await _assert_list_tasks_from_all_managers( + rabbitmq_rpc_client, long_running_managers, saved_context, task_count=task_count + ) + + # avoids tasks getting garbage collected + await _assert_task_status_on_random_manager( + rabbitmq_rpc_client, long_running_managers, task_ids, is_done=True + ) + + for task_id in task_ids: + result = await lrt_api.get_task_result( + rabbitmq_rpc_client, + _get_long_running_manager(long_running_managers).lrt_namespace, + saved_context, + task_id, + ) + assert result == to_return + + await assert_task_is_no_longer_present( + _get_long_running_manager(long_running_managers), task_id, saved_context + ) + + +@pytest.mark.parametrize("task_count", _TASK_COUNT) +@pytest.mark.parametrize("task_context", _TASK_CONTEXT) +@pytest.mark.parametrize("is_unique", _IS_UNIQUE) +async def test_workflow_raises_error( + disable_stale_tasks_monitor: None, + fast_long_running_tasks_cancellation: None, + long_running_managers: list[LongRunningManager], + rabbitmq_rpc_client: RabbitMQRPCClient, + task_count: int, + is_unique: bool, + task_context: TaskContext | None, +): + saved_context = task_context or {} + task_count = 1 if is_unique else task_count + + task_ids: list[TaskId] = [] + for _ in range(task_count): + task_id = await lrt_api.start_task( + _get_long_running_manager(long_running_managers).rpc_client, + _get_long_running_manager(long_running_managers).lrt_namespace, + _task_always_raise.__name__, + unique=is_unique, + task_name=None, + task_context=task_context, + fire_and_forget=False, + ) + task_ids.append(task_id) + + for task_id in task_ids: + await _assert_task_status_done_on_all_managers( + rabbitmq_rpc_client, long_running_managers, task_id + ) + + await _assert_list_tasks_from_all_managers( + rabbitmq_rpc_client, long_running_managers, saved_context, task_count=task_count + ) + + # avoids tasks getting garbage collected + await _assert_task_status_on_random_manager( + rabbitmq_rpc_client, long_running_managers, task_ids, is_done=True + ) + + for task_id in task_ids: + with pytest.raises(_TestingError, match="This task always raises an error"): + await lrt_api.get_task_result( + rabbitmq_rpc_client, + _get_long_running_manager(long_running_managers).lrt_namespace, + saved_context, + task_id, + ) + + await assert_task_is_no_longer_present( + _get_long_running_manager(long_running_managers), task_id, saved_context + ) + + +@pytest.mark.parametrize("task_context", _TASK_CONTEXT) +@pytest.mark.parametrize("is_unique", _IS_UNIQUE) +async def test_remove_task( + disable_stale_tasks_monitor: None, + fast_long_running_tasks_cancellation: None, + long_running_managers: list[LongRunningManager], + rabbitmq_rpc_client: RabbitMQRPCClient, + is_unique: bool, + task_context: TaskContext | None, +): + task_id = await lrt_api.start_task( + _get_long_running_manager(long_running_managers).rpc_client, + _get_long_running_manager(long_running_managers).lrt_namespace, + _task_takes_too_long.__name__, + unique=is_unique, + task_name=None, + task_context=task_context, + fire_and_forget=False, + ) + saved_context = task_context or {} + + await _assert_task_status_done_on_all_managers( + rabbitmq_rpc_client, long_running_managers, task_id, is_done=False + ) + + await lrt_api.remove_task( + rabbitmq_rpc_client, + _get_long_running_manager(long_running_managers).lrt_namespace, + saved_context, + task_id, + ) + + await assert_task_is_no_longer_present( + _get_long_running_manager(long_running_managers), task_id, saved_context + ) diff --git a/packages/service-library/tests/long_running_tasks/test_long_running_tasks_models.py b/packages/service-library/tests/long_running_tasks/test_long_running_tasks_models.py index f21417da788e..a92765eb010c 100644 --- a/packages/service-library/tests/long_running_tasks/test_long_running_tasks_models.py +++ b/packages/service-library/tests/long_running_tasks/test_long_running_tasks_models.py @@ -1,4 +1,4 @@ -from servicelib.long_running_tasks._models import TaskProgress +from servicelib.long_running_tasks.models import TaskProgress def test_progress_has_no_more_than_3_digits(): diff --git a/packages/service-library/tests/long_running_tasks/test_long_running_tasks_task.py b/packages/service-library/tests/long_running_tasks/test_long_running_tasks_task.py index 6d3b9c837f26..0808878818a9 100644 --- a/packages/service-library/tests/long_running_tasks/test_long_running_tasks_task.py +++ b/packages/service-library/tests/long_running_tasks/test_long_running_tasks_task.py @@ -6,29 +6,48 @@ import asyncio import urllib.parse -from collections.abc import AsyncIterator +from collections.abc import Awaitable, Callable from datetime import datetime -from typing import Any, Final +from typing import Any import pytest from faker import Faker -from servicelib.long_running_tasks._errors import ( +from models_library.api_schemas_long_running_tasks.base import ProgressMessage +from servicelib.long_running_tasks import lrt_api +from servicelib.long_running_tasks._serialization import ( + loads, +) +from servicelib.long_running_tasks.errors import ( TaskAlreadyRunningError, - TaskCancelledError, TaskNotCompletedError, TaskNotFoundError, + TaskNotRegisteredError, + TaskRaisedUnserializableError, +) +from servicelib.long_running_tasks.manager import ( + LongRunningManager, ) -from servicelib.long_running_tasks._models import ( - ProgressPercent, +from servicelib.long_running_tasks.models import ( + LRTNamespace, + ResultField, + TaskContext, TaskProgress, TaskStatus, ) -from servicelib.long_running_tasks._task import TasksManager, start_task -from tenacity import TryAgain +from servicelib.long_running_tasks.task import TaskRegistry +from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient +from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings +from tenacity import TryAgain, retry, stop_after_attempt from tenacity.asyncio import AsyncRetrying from tenacity.retry import retry_if_exception_type from tenacity.stop import stop_after_delay from tenacity.wait import wait_fixed +from utils import TEST_CHECK_STALE_INTERVAL_S + +pytest_simcore_core_services_selection = [ + "rabbit", +] _RETRY_PARAMS: dict[str, Any] = { "reraise": True, @@ -38,329 +57,602 @@ } +class _TetingError(Exception): + pass + + async def a_background_task( - task_progress: TaskProgress, + progress: TaskProgress, raise_when_finished: bool, total_sleep: int, ) -> int: """sleeps and raises an error or returns 42""" for i in range(total_sleep): await asyncio.sleep(1) - task_progress.update(percent=ProgressPercent((i + 1) / total_sleep)) + await progress.update(percent=(i + 1) / total_sleep) if raise_when_finished: msg = "raised this error as instructed" - raise RuntimeError(msg) + raise _TetingError(msg) return 42 -async def fast_background_task(task_progress: TaskProgress) -> int: - """this task does nothing and returns a constant""" +async def fast_background_task(progress: TaskProgress) -> int: return 42 -async def failing_background_task(task_progress: TaskProgress): - """this task does nothing and returns a constant""" +async def failing_background_task(progress: TaskProgress): msg = "failing asap" - raise RuntimeError(msg) + raise _TetingError(msg) + + +async def failing_unpicklable_background_task(progress: TaskProgress): + @retry( + stop=stop_after_attempt(2), + reraise=False, + ) + async def _innter_fail() -> None: + msg = "always fails with retry" + raise _TetingError(msg) + await _innter_fail() -TEST_CHECK_STALE_INTERVAL_S: Final[float] = 1 + +TaskRegistry.register(a_background_task) +TaskRegistry.register(fast_background_task) +TaskRegistry.register(failing_background_task) +TaskRegistry.register(failing_unpicklable_background_task) + + +@pytest.fixture +def empty_context() -> TaskContext: + return {} @pytest.fixture -async def tasks_manager() -> AsyncIterator[TasksManager]: - tasks_manager = TasksManager( - stale_task_check_interval_s=TEST_CHECK_STALE_INTERVAL_S, - stale_task_detect_timeout_s=TEST_CHECK_STALE_INTERVAL_S, +async def long_running_manager( + use_in_memory_redis: RedisSettings, + rabbit_service: RabbitSettings, + get_long_running_manager: Callable[ + [RedisSettings, RabbitSettings, LRTNamespace | None], + Awaitable[LongRunningManager], + ], +) -> LongRunningManager: + return await get_long_running_manager( + use_in_memory_redis, rabbit_service, "rabbit-namespace" ) - yield tasks_manager - await tasks_manager.close() @pytest.mark.parametrize("check_task_presence_before", [True, False]) async def test_task_is_auto_removed( - tasks_manager: TasksManager, check_task_presence_before: bool + long_running_manager: LongRunningManager, + check_task_presence_before: bool, + empty_context: TaskContext, ): - task_id = start_task( - tasks_manager, - a_background_task, + task_id = await lrt_api.start_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + a_background_task.__name__, raise_when_finished=False, total_sleep=10 * TEST_CHECK_STALE_INTERVAL_S, + task_context=empty_context, ) if check_task_presence_before: # immediately after starting the task is still there - task_status = tasks_manager.get_task_status(task_id, with_task_context=None) + task_status = await long_running_manager.tasks_manager.get_task_status( + task_id, with_task_context=empty_context + ) assert task_status # wait for task to be automatically removed # meaning no calls via the manager methods are received async for attempt in AsyncRetrying(**_RETRY_PARAMS): with attempt: - for tasks in tasks_manager._tasks_groups.values(): # noqa: SLF001 - if task_id in tasks: - msg = "wait till no element is found any longer" - raise TryAgain(msg) + if ( + await long_running_manager.tasks_manager._tasks_data.get_task_data( # noqa: SLF001 + task_id + ) + is not None + ): + msg = "wait till no element is found any longer" + raise TryAgain(msg) with pytest.raises(TaskNotFoundError): - tasks_manager.get_task_status(task_id, with_task_context=None) + await long_running_manager.tasks_manager.get_task_status( + task_id, with_task_context=empty_context + ) with pytest.raises(TaskNotFoundError): - tasks_manager.get_task_result(task_id, with_task_context=None) + await long_running_manager.tasks_manager.get_task_result( + task_id, with_task_context=empty_context + ) -async def test_checked_task_is_not_auto_removed(tasks_manager: TasksManager): - task_id = start_task( - tasks_manager, - a_background_task, +@pytest.mark.parametrize("wait_multiplier", [1, 2, 3, 4, 5, 6]) +async def test_checked_task_is_not_auto_removed( + long_running_manager: LongRunningManager, + empty_context: TaskContext, + wait_multiplier: int, +): + task_id = await lrt_api.start_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + a_background_task.__name__, raise_when_finished=False, - total_sleep=5 * TEST_CHECK_STALE_INTERVAL_S, + total_sleep=wait_multiplier * TEST_CHECK_STALE_INTERVAL_S, + task_context=empty_context, ) async for attempt in AsyncRetrying(**_RETRY_PARAMS): with attempt: - status = tasks_manager.get_task_status(task_id, with_task_context=None) + status = await long_running_manager.tasks_manager.get_task_status( + task_id, with_task_context=empty_context + ) assert status.done, f"task {task_id} not complete" - result = tasks_manager.get_task_result(task_id, with_task_context=None) + result = await long_running_manager.tasks_manager.get_task_result( + task_id, with_task_context=empty_context + ) assert result -async def test_fire_and_forget_task_is_not_auto_removed(tasks_manager: TasksManager): - task_id = start_task( - tasks_manager, - a_background_task, +def _get_resutlt(result_field: ResultField) -> Any: + assert result_field.str_result + return loads(result_field.str_result) + + +async def test_fire_and_forget_task_is_not_auto_removed( + long_running_manager: LongRunningManager, empty_context: TaskContext +): + task_id = await lrt_api.start_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + a_background_task.__name__, raise_when_finished=False, total_sleep=5 * TEST_CHECK_STALE_INTERVAL_S, fire_and_forget=True, + task_context=empty_context, ) await asyncio.sleep(3 * TEST_CHECK_STALE_INTERVAL_S) # the task shall still be present even if we did not check the status before - status = tasks_manager.get_task_status(task_id, with_task_context=None) + status = await long_running_manager.tasks_manager.get_task_status( + task_id, with_task_context=empty_context + ) assert not status.done, "task was removed although it is fire and forget" # the task shall finish - await asyncio.sleep(3 * TEST_CHECK_STALE_INTERVAL_S) + await asyncio.sleep(4 * TEST_CHECK_STALE_INTERVAL_S) # get the result - task_result = tasks_manager.get_task_result(task_id, with_task_context=None) - assert task_result == 42 + task_result = await long_running_manager.tasks_manager.get_task_result( + task_id, with_task_context=empty_context + ) + assert _get_resutlt(task_result) == 42 -async def test_get_result_of_unfinished_task_raises(tasks_manager: TasksManager): - task_id = start_task( - tasks_manager, - a_background_task, +async def test_get_result_of_unfinished_task_raises( + long_running_manager: LongRunningManager, empty_context: TaskContext +): + task_id = await lrt_api.start_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + a_background_task.__name__, raise_when_finished=False, total_sleep=5 * TEST_CHECK_STALE_INTERVAL_S, + task_context=empty_context, ) with pytest.raises(TaskNotCompletedError): - tasks_manager.get_task_result(task_id, with_task_context=None) + await long_running_manager.tasks_manager.get_task_result( + task_id, with_task_context=empty_context + ) -async def test_unique_task_already_running(tasks_manager: TasksManager): - async def unique_task(task_progress: TaskProgress): +async def test_unique_task_already_running( + long_running_manager: LongRunningManager, empty_context: TaskContext +): + async def unique_task(progress: TaskProgress): + _ = progress await asyncio.sleep(1) - start_task(tasks_manager=tasks_manager, task=unique_task, unique=True) + TaskRegistry.register(unique_task) + + await lrt_api.start_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + unique_task.__name__, + unique=True, + task_context=empty_context, + ) # ensure unique running task regardless of how many times it gets started with pytest.raises(TaskAlreadyRunningError) as exec_info: - start_task(tasks_manager=tasks_manager, task=unique_task, unique=True) + await lrt_api.start_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + unique_task.__name__, + unique=True, + task_context=empty_context, + ) assert "must be unique, found: " in f"{exec_info.value}" + TaskRegistry.unregister(unique_task) -async def test_start_multiple_not_unique_tasks(tasks_manager: TasksManager): - async def not_unique_task(task_progress: TaskProgress): + +async def test_start_multiple_not_unique_tasks( + long_running_manager: LongRunningManager, empty_context: TaskContext +): + async def not_unique_task(progress: TaskProgress): await asyncio.sleep(1) + TaskRegistry.register(not_unique_task) + for _ in range(5): - start_task(tasks_manager=tasks_manager, task=not_unique_task) + await lrt_api.start_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + not_unique_task.__name__, + task_context=empty_context, + ) + TaskRegistry.unregister(not_unique_task) -def test_get_task_id(faker): - obj1 = TasksManager.create_task_id(faker.word()) # noqa: SLF001 - obj2 = TasksManager.create_task_id(faker.word()) # noqa: SLF001 + +@pytest.mark.parametrize("is_unique", [True, False]) +async def test_get_task_id( + long_running_manager: LongRunningManager, faker: Faker, is_unique: bool +): + obj1 = long_running_manager.tasks_manager._get_task_id( # noqa: SLF001 + faker.word(), is_unique=is_unique + ) + obj2 = long_running_manager.tasks_manager._get_task_id( # noqa: SLF001 + faker.word(), is_unique=is_unique + ) assert obj1 != obj2 -async def test_get_status(tasks_manager: TasksManager): - task_id = start_task( - tasks_manager=tasks_manager, - task=a_background_task, +async def test_get_status( + long_running_manager: LongRunningManager, empty_context: TaskContext +): + task_id = await lrt_api.start_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + a_background_task.__name__, raise_when_finished=False, total_sleep=10, + task_context=empty_context, + ) + task_status = await long_running_manager.tasks_manager.get_task_status( + task_id, with_task_context=empty_context ) - task_status = tasks_manager.get_task_status(task_id, with_task_context=None) assert isinstance(task_status, TaskStatus) - assert task_status.task_progress.message == "" + assert isinstance(task_status.task_progress.message, ProgressMessage) assert task_status.task_progress.percent == 0.0 - assert task_status.done == False + assert task_status.done is False assert isinstance(task_status.started, datetime) -async def test_get_status_missing(tasks_manager: TasksManager): +async def test_get_status_missing( + long_running_manager: LongRunningManager, empty_context: TaskContext +): with pytest.raises(TaskNotFoundError) as exec_info: - tasks_manager.get_task_status("missing_task_id", with_task_context=None) + await long_running_manager.tasks_manager.get_task_status( + "missing_task_id", with_task_context=empty_context + ) assert f"{exec_info.value}" == "No task with missing_task_id found" -async def test_get_result(tasks_manager: TasksManager): - task_id = start_task(tasks_manager=tasks_manager, task=fast_background_task) - await asyncio.sleep(0.1) - result = tasks_manager.get_task_result(task_id, with_task_context=None) - assert result == 42 +async def test_get_result( + long_running_manager: LongRunningManager, empty_context: TaskContext +): + task_id = await lrt_api.start_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + fast_background_task.__name__, + task_context=empty_context, + ) + + async for attempt in AsyncRetrying(**_RETRY_PARAMS): + with attempt: + status = await long_running_manager.tasks_manager.get_task_status( + task_id, with_task_context=empty_context + ) + assert status.done is True + + result = await long_running_manager.tasks_manager.get_task_result( + task_id, with_task_context=empty_context + ) + assert _get_resutlt(result) == 42 -async def test_get_result_missing(tasks_manager: TasksManager): +async def test_get_result_missing( + long_running_manager: LongRunningManager, empty_context: TaskContext +): with pytest.raises(TaskNotFoundError) as exec_info: - tasks_manager.get_task_result("missing_task_id", with_task_context=None) + await long_running_manager.tasks_manager.get_task_result( + "missing_task_id", with_task_context=empty_context + ) assert f"{exec_info.value}" == "No task with missing_task_id found" -async def test_get_result_finished_with_error(tasks_manager: TasksManager): - task_id = start_task(tasks_manager=tasks_manager, task=failing_background_task) +async def test_get_result_finished_with_error( + long_running_manager: LongRunningManager, empty_context: TaskContext +): + task_id = await lrt_api.start_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + failing_background_task.__name__, + task_context=empty_context, + ) # wait for result async for attempt in AsyncRetrying(**_RETRY_PARAMS): with attempt: - assert tasks_manager.get_task_status(task_id, with_task_context=None).done - - with pytest.raises(RuntimeError, match="failing asap"): - tasks_manager.get_task_result(task_id, with_task_context=None) + assert ( + await long_running_manager.tasks_manager.get_task_status( + task_id, with_task_context=empty_context + ) + ).done + + result = await long_running_manager.tasks_manager.get_task_result( + task_id, with_task_context=empty_context + ) + assert result.str_error is not None # nosec + with pytest.raises(_TetingError, match="failing asap"): + loads(result.str_error) -async def test_get_result_task_was_cancelled_multiple_times( - tasks_manager: TasksManager, +async def test_get_result_finished_with_unpicklable_error( + long_running_manager: LongRunningManager, empty_context: TaskContext +): + task_id = await lrt_api.start_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + failing_unpicklable_background_task.__name__, + task_context=empty_context, + ) + # wait for result + async for attempt in AsyncRetrying(**_RETRY_PARAMS): + with attempt: + assert ( + await long_running_manager.tasks_manager.get_task_status( + task_id, with_task_context=empty_context + ) + ).done + + result = await long_running_manager.tasks_manager.get_task_result( + task_id, with_task_context=empty_context + ) + assert result.str_error is not None # nosec + with pytest.raises(TaskRaisedUnserializableError, match="cannot pickle"): + loads(result.str_error) + + +async def test_cancel_task_from_different_manager( + rabbit_service: RabbitSettings, + use_in_memory_redis: RedisSettings, + get_long_running_manager: Callable[ + [RedisSettings, RabbitSettings, LRTNamespace | None], + Awaitable[LongRunningManager], + ], + empty_context: TaskContext, ): - task_id = start_task( - tasks_manager=tasks_manager, - task=a_background_task, + manager_1 = await get_long_running_manager( + use_in_memory_redis, rabbit_service, "test-namespace" + ) + manager_2 = await get_long_running_manager( + use_in_memory_redis, rabbit_service, "test-namespace" + ) + manager_3 = await get_long_running_manager( + use_in_memory_redis, rabbit_service, "test-namespace" + ) + + task_id = await lrt_api.start_task( + manager_1.rpc_client, + manager_1.lrt_namespace, + a_background_task.__name__, raise_when_finished=False, - total_sleep=10, + total_sleep=1, + task_context=empty_context, ) - for _ in range(5): - await tasks_manager.cancel_task(task_id, with_task_context=None) - with pytest.raises( - TaskCancelledError, match=f"Task {task_id} was cancelled before completing" - ): - tasks_manager.get_task_result(task_id, with_task_context=None) + # wati for task to complete + for manager in (manager_1, manager_2, manager_3): + status = await manager.tasks_manager.get_task_status(task_id, empty_context) + assert status.done is False + + async for attempt in AsyncRetrying(**_RETRY_PARAMS): + with attempt: + for manager in (manager_1, manager_2, manager_3): + status = await manager.tasks_manager.get_task_status( + task_id, empty_context + ) + assert status.done is True + + # check all provide the same result + for manager in (manager_1, manager_2, manager_3): + task_result = await manager.tasks_manager.get_task_result( + task_id, empty_context + ) + assert _get_resutlt(task_result) == 42 -async def test_remove_task(tasks_manager: TasksManager): - task_id = start_task( - tasks_manager=tasks_manager, - task=a_background_task, +async def test_remove_task( + long_running_manager: LongRunningManager, empty_context: TaskContext +): + task_id = await lrt_api.start_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + a_background_task.__name__, raise_when_finished=False, total_sleep=10, + task_context=empty_context, + ) + await long_running_manager.tasks_manager.get_task_status( + task_id, with_task_context=empty_context + ) + await long_running_manager.tasks_manager.remove_task( + task_id, with_task_context=empty_context, wait_for_removal=True ) - tasks_manager.get_task_status(task_id, with_task_context=None) - await tasks_manager.remove_task(task_id, with_task_context=None) with pytest.raises(TaskNotFoundError): - tasks_manager.get_task_status(task_id, with_task_context=None) + await long_running_manager.tasks_manager.get_task_status( + task_id, with_task_context=empty_context + ) with pytest.raises(TaskNotFoundError): - tasks_manager.get_task_result(task_id, with_task_context=None) + await long_running_manager.tasks_manager.get_task_result( + task_id, with_task_context=empty_context + ) -async def test_remove_task_with_task_context(tasks_manager: TasksManager): - TASK_CONTEXT = {"some_context": "some_value"} - task_id = start_task( - tasks_manager=tasks_manager, - task=a_background_task, +async def test_remove_task_with_task_context( + long_running_manager: LongRunningManager, empty_context: TaskContext +): + task_id = await lrt_api.start_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + a_background_task.__name__, raise_when_finished=False, total_sleep=10, - task_context=TASK_CONTEXT, + task_context=empty_context, ) # getting status fails if wrong task context given with pytest.raises(TaskNotFoundError): - tasks_manager.get_task_status( + await long_running_manager.tasks_manager.get_task_status( task_id, with_task_context={"wrong_task_context": 12} ) - tasks_manager.get_task_status(task_id, with_task_context=TASK_CONTEXT) + await long_running_manager.tasks_manager.get_task_status( + task_id, with_task_context=empty_context + ) # removing task fails if wrong task context given with pytest.raises(TaskNotFoundError): - await tasks_manager.remove_task( - task_id, with_task_context={"wrong_task_context": 12} + await long_running_manager.tasks_manager.remove_task( + task_id, with_task_context={"wrong_task_context": 12}, wait_for_removal=True ) - await tasks_manager.remove_task(task_id, with_task_context=TASK_CONTEXT) + await long_running_manager.tasks_manager.remove_task( + task_id, with_task_context=empty_context, wait_for_removal=True + ) -async def test_remove_unknown_task(tasks_manager: TasksManager): +async def test_remove_unknown_task( + long_running_manager: LongRunningManager, empty_context: TaskContext +): with pytest.raises(TaskNotFoundError): - await tasks_manager.remove_task("invalid_id", with_task_context=None) - - await tasks_manager.remove_task( - "invalid_id", with_task_context=None, reraise_errors=False - ) + await long_running_manager.tasks_manager.remove_task( + "invalid_id", with_task_context=empty_context, wait_for_removal=True + ) -async def test_cancel_task_with_task_context(tasks_manager: TasksManager): - TASK_CONTEXT = {"some_context": "some_value"} - task_id = start_task( - tasks_manager=tasks_manager, - task=a_background_task, +async def test__cancelled_tasks_worker_equivalent_of_cancellation_from_a_different_process( + long_running_manager: LongRunningManager, empty_context: TaskContext +): + task_id = await lrt_api.start_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + a_background_task.__name__, raise_when_finished=False, total_sleep=10, - task_context=TASK_CONTEXT, + task_context=empty_context, + ) + await long_running_manager.tasks_manager._tasks_data.mark_for_removal( # noqa: SLF001 + task_id ) - # getting status fails if wrong task context given - with pytest.raises(TaskNotFoundError): - tasks_manager.get_task_status( - task_id, with_task_context={"wrong_task_context": 12} - ) - # getting status fails if wrong task context given - with pytest.raises(TaskNotFoundError): - await tasks_manager.cancel_task( - task_id, with_task_context={"wrong_task_context": 12} - ) - await tasks_manager.cancel_task(task_id, with_task_context=TASK_CONTEXT) - -async def test_list_tasks(tasks_manager: TasksManager): - assert tasks_manager.list_tasks(with_task_context=None) == [] + async for attempt in AsyncRetrying(**_RETRY_PARAMS): + with attempt: # noqa: SIM117 + with pytest.raises(TaskNotFoundError): + assert ( + await long_running_manager.tasks_manager.get_task_status( + task_id, empty_context + ) + is None + ) + + +async def test_list_tasks( + disable_stale_tasks_monitor: None, + long_running_manager: LongRunningManager, + empty_context: TaskContext, +): + assert ( + await long_running_manager.tasks_manager.list_tasks( + with_task_context=empty_context + ) + == [] + ) # start a bunch of tasks NUM_TASKS = 10 task_ids = [] for _ in range(NUM_TASKS): task_ids.append( # noqa: PERF401 - start_task( - tasks_manager=tasks_manager, - task=a_background_task, + await lrt_api.start_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + a_background_task.__name__, raise_when_finished=False, total_sleep=10, + task_context=empty_context, + ) + ) + assert ( + len( + await long_running_manager.tasks_manager.list_tasks( + with_task_context=empty_context ) ) - assert len(tasks_manager.list_tasks(with_task_context=None)) == NUM_TASKS + == NUM_TASKS + ) for task_index, task_id in enumerate(task_ids): - await tasks_manager.remove_task(task_id, with_task_context=None) - assert len(tasks_manager.list_tasks(with_task_context=None)) == NUM_TASKS - ( - task_index + 1 + await long_running_manager.tasks_manager.remove_task( + task_id, with_task_context=empty_context, wait_for_removal=True ) + assert len( + await long_running_manager.tasks_manager.list_tasks( + with_task_context=empty_context + ) + ) == NUM_TASKS - (task_index + 1) -async def test_list_tasks_filtering(tasks_manager: TasksManager): - start_task( - tasks_manager=tasks_manager, - task=a_background_task, +async def test_list_tasks_filtering( + long_running_manager: LongRunningManager, empty_context: TaskContext +): + await lrt_api.start_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + a_background_task.__name__, raise_when_finished=False, total_sleep=10, + task_context=empty_context, ) - start_task( - tasks_manager=tasks_manager, - task=a_background_task, + await lrt_api.start_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + a_background_task.__name__, raise_when_finished=False, total_sleep=10, task_context={"user_id": 213}, ) - start_task( - tasks_manager=tasks_manager, - task=a_background_task, + await lrt_api.start_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + a_background_task.__name__, raise_when_finished=False, total_sleep=10, task_context={"user_id": 213, "product": "osparc"}, ) - assert len(tasks_manager.list_tasks(with_task_context=None)) == 3 - assert len(tasks_manager.list_tasks(with_task_context={"user_id": 213})) == 1 assert ( len( - tasks_manager.list_tasks( + await long_running_manager.tasks_manager.list_tasks( + with_task_context=empty_context + ) + ) + == 3 + ) + assert ( + len( + await long_running_manager.tasks_manager.list_tasks( + with_task_context={"user_id": 213} + ) + ) + == 1 + ) + assert ( + len( + await long_running_manager.tasks_manager.list_tasks( with_task_context={"user_id": 213, "product": "osparc"} ) ) @@ -368,7 +660,7 @@ async def test_list_tasks_filtering(tasks_manager: TasksManager): ) assert ( len( - tasks_manager.list_tasks( + await long_running_manager.tasks_manager.list_tasks( with_task_context={"user_id": 120, "product": "osparc"} ) ) @@ -376,13 +668,26 @@ async def test_list_tasks_filtering(tasks_manager: TasksManager): ) -async def test_define_task_name(tasks_manager: TasksManager, faker: Faker): +async def test_define_task_name(long_running_manager: LongRunningManager, faker: Faker): task_name = faker.name() - task_id = start_task( - tasks_manager=tasks_manager, - task=a_background_task, + task_id = await lrt_api.start_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + a_background_task.__name__, raise_when_finished=False, total_sleep=10, task_name=task_name, ) - assert task_id.startswith(urllib.parse.quote(task_name, safe="")) + assert urllib.parse.quote(task_name, safe="") in task_id + + +async def test_start_not_registered_task( + rabbitmq_rpc_client: RabbitMQRPCClient, + long_running_manager: LongRunningManager, +): + with pytest.raises(TaskNotRegisteredError): + await lrt_api.start_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + "not_registered_task", + ) diff --git a/packages/service-library/tests/long_running_tasks/utils.py b/packages/service-library/tests/long_running_tasks/utils.py new file mode 100644 index 000000000000..e473dd7e1daf --- /dev/null +++ b/packages/service-library/tests/long_running_tasks/utils.py @@ -0,0 +1,3 @@ +from typing import Final + +TEST_CHECK_STALE_INTERVAL_S: Final[float] = 1 diff --git a/packages/service-library/tests/rabbitmq/test_rabbitmq.py b/packages/service-library/tests/rabbitmq/test_rabbitmq.py index d4c6c4b8ebb1..979d11d26777 100644 --- a/packages/service-library/tests/rabbitmq/test_rabbitmq.py +++ b/packages/service-library/tests/rabbitmq/test_rabbitmq.py @@ -314,7 +314,7 @@ async def _always_returning_fail(_: Any) -> bool: @pytest.mark.parametrize("topics", _TOPICS) -@pytest.mark.no_cleanup_check_rabbitmq_server_has_no_errors() +@pytest.mark.no_cleanup_check_rabbitmq_server_has_no_errors async def test_publish_with_no_registered_subscriber( on_message_spy: mock.Mock, create_rabbitmq_client: Callable[[str], RabbitMQClient], @@ -476,7 +476,7 @@ def _raise_once_then_true(*args, **kwargs): @pytest.fixture async def ensure_queue_deletion( - create_rabbitmq_client: Callable[[str], RabbitMQClient] + create_rabbitmq_client: Callable[[str], RabbitMQClient], ) -> AsyncIterator[Callable[[QueueName], None]]: created_queues = set() @@ -723,7 +723,7 @@ async def test_rabbit_adding_topics_to_a_fanout_exchange( await _assert_message_received(mocked_message_parser, 0) -@pytest.mark.no_cleanup_check_rabbitmq_server_has_no_errors() +@pytest.mark.no_cleanup_check_rabbitmq_server_has_no_errors async def test_rabbit_not_using_the_same_exchange_type_raises( create_rabbitmq_client: Callable[[str], RabbitMQClient], random_exchange_name: Callable[[], str], @@ -738,7 +738,7 @@ async def test_rabbit_not_using_the_same_exchange_type_raises( await client.subscribe(exchange_name, mocked_message_parser, topics=[]) -@pytest.mark.no_cleanup_check_rabbitmq_server_has_no_errors() +@pytest.mark.no_cleanup_check_rabbitmq_server_has_no_errors async def test_unsubscribe_consumer( create_rabbitmq_client: Callable[[str], RabbitMQClient], random_exchange_name: Callable[[], str], diff --git a/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc_interfaces_async_jobs.py b/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc_interfaces_async_jobs.py index 72ecc9a8aa68..51874400b907 100644 --- a/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc_interfaces_async_jobs.py +++ b/packages/service-library/tests/rabbitmq/test_rabbitmq_rpc_interfaces_async_jobs.py @@ -2,21 +2,24 @@ import datetime from collections.abc import AsyncIterator from dataclasses import dataclass, field +from typing import Final import pytest +from common_library.async_tools import cancel_wait_task from faker import Faker from models_library.api_schemas_rpc_async_jobs.async_jobs import ( AsyncJobGet, AsyncJobId, - AsyncJobNameData, AsyncJobResult, AsyncJobStatus, ) from models_library.api_schemas_rpc_async_jobs.exceptions import JobMissingError +from models_library.products import ProductName from models_library.progress_bar import ProgressReport from models_library.rabbitmq_basic_types import RPCMethodName, RPCNamespace +from models_library.users import UserID from pydantic import TypeAdapter -from servicelib.async_utils import cancel_wait_task +from servicelib.celery.models import OwnerMetadata from servicelib.rabbitmq import RabbitMQRPCClient, RemoteMethodNotRegisteredError from servicelib.rabbitmq.rpc_interfaces.async_jobs.async_jobs import ( list_jobs, @@ -28,6 +31,14 @@ "rabbit", ] +_ASYNC_JOB_CLIENT_NAME: Final[str] = "pytest_client_name" + + +class _TestOwnerMetadata(OwnerMetadata): + user_id: UserID + product_name: ProductName + owner: str = _ASYNC_JOB_CLIENT_NAME + @pytest.fixture def method_name(faker: Faker) -> RPCMethodName: @@ -35,10 +46,11 @@ def method_name(faker: Faker) -> RPCMethodName: @pytest.fixture -def job_id_data(faker: Faker) -> AsyncJobNameData: - return AsyncJobNameData( +def owner_metadata(faker: Faker) -> OwnerMetadata: + return _TestOwnerMetadata( user_id=faker.pyint(min_value=1), product_name=faker.word(), + owner=_ASYNC_JOB_CLIENT_NAME, ) @@ -68,9 +80,9 @@ def _get_task(self, job_id: AsyncJobId) -> asyncio.Task: raise JobMissingError(job_id=f"{job_id}") async def status( - self, job_id: AsyncJobId, job_id_data: AsyncJobNameData + self, job_id: AsyncJobId, owner_metadata: OwnerMetadata ) -> AsyncJobStatus: - assert job_id_data + assert owner_metadata task = self._get_task(job_id) return AsyncJobStatus( job_id=job_id, @@ -79,32 +91,29 @@ async def status( ) async def cancel( - self, job_id: AsyncJobId, job_id_data: AsyncJobNameData + self, job_id: AsyncJobId, owner_metadata: OwnerMetadata ) -> None: assert job_id - assert job_id_data + assert owner_metadata task = self._get_task(job_id) task.cancel() async def result( - self, job_id: AsyncJobId, job_id_data: AsyncJobNameData + self, job_id: AsyncJobId, owner_metadata: OwnerMetadata ) -> AsyncJobResult: - assert job_id_data + assert owner_metadata task = self._get_task(job_id) assert task.done() return AsyncJobResult( result={ "data": task.result(), "job_id": job_id, - "job_id_data": job_id_data, + "owner_metadata": owner_metadata, } ) - async def list_jobs( - self, filter_: str, job_id_data: AsyncJobNameData - ) -> list[AsyncJobGet]: - assert job_id_data - assert filter_ is not None + async def list_jobs(self, owner_metadata: OwnerMetadata) -> list[AsyncJobGet]: + assert owner_metadata return [ AsyncJobGet( @@ -114,8 +123,8 @@ async def list_jobs( for t in self.tasks ] - async def submit(self, job_id_data: AsyncJobNameData) -> AsyncJobGet: - assert job_id_data + async def submit(self, owner_metadata: OwnerMetadata) -> AsyncJobGet: + assert owner_metadata job_id = faker.uuid4(cast_to=None) self.tasks.append(asyncio.create_task(_slow_task(), name=f"{job_id}")) return AsyncJobGet(job_id=job_id, job_name="fake_job_name") @@ -145,7 +154,7 @@ async def test_async_jobs_methods( async_job_rpc_server: RabbitMQRPCClient, rpc_client: RabbitMQRPCClient, namespace: RPCNamespace, - job_id_data: AsyncJobNameData, + owner_metadata: OwnerMetadata, job_id: AsyncJobId, method: str, ): @@ -157,7 +166,7 @@ async def test_async_jobs_methods( rpc_client, rpc_namespace=namespace, job_id=job_id, - job_id_data=job_id_data, + owner_metadata=owner_metadata, ) @@ -166,13 +175,12 @@ async def test_list_jobs( rpc_client: RabbitMQRPCClient, namespace: RPCNamespace, method_name: RPCMethodName, - job_id_data: AsyncJobNameData, + owner_metadata: OwnerMetadata, ): await list_jobs( rpc_client, rpc_namespace=namespace, - filter_="", - job_id_data=job_id_data, + owner_metadata=owner_metadata, ) @@ -181,13 +189,13 @@ async def test_submit( rpc_client: RabbitMQRPCClient, namespace: RPCNamespace, method_name: RPCMethodName, - job_id_data: AsyncJobNameData, + owner_metadata: OwnerMetadata, ): await submit( rpc_client, rpc_namespace=namespace, method_name=method_name, - job_id_data=job_id_data, + owner_metadata=owner_metadata, ) @@ -195,14 +203,14 @@ async def test_submit_with_invalid_method_name( async_job_rpc_server: RabbitMQRPCClient, rpc_client: RabbitMQRPCClient, namespace: RPCNamespace, - job_id_data: AsyncJobNameData, + owner_metadata: OwnerMetadata, ): with pytest.raises(RemoteMethodNotRegisteredError): await submit( rpc_client, rpc_namespace=namespace, method_name=RPCMethodName("invalid_method_name"), - job_id_data=job_id_data, + owner_metadata=owner_metadata, ) @@ -211,14 +219,14 @@ async def test_submit_and_wait_properly_timesout( rpc_client: RabbitMQRPCClient, namespace: RPCNamespace, method_name: RPCMethodName, - job_id_data: AsyncJobNameData, + owner_metadata: OwnerMetadata, ): with pytest.raises(TimeoutError): # noqa: PT012 async for _job_composed_result in submit_and_wait( rpc_client, rpc_namespace=namespace, method_name=method_name, - job_id_data=job_id_data, + owner_metadata=owner_metadata, client_timeout=datetime.timedelta(seconds=0.1), ): pass @@ -229,13 +237,13 @@ async def test_submit_and_wait( rpc_client: RabbitMQRPCClient, namespace: RPCNamespace, method_name: RPCMethodName, - job_id_data: AsyncJobNameData, + owner_metadata: OwnerMetadata, ): async for job_composed_result in submit_and_wait( rpc_client, rpc_namespace=namespace, method_name=method_name, - job_id_data=job_id_data, + owner_metadata=owner_metadata, client_timeout=datetime.timedelta(seconds=10), ): if not job_composed_result.done: @@ -243,10 +251,11 @@ async def test_submit_and_wait( await job_composed_result.result() assert job_composed_result.done assert job_composed_result.status.progress.actual_value == 1 - assert await job_composed_result.result() == AsyncJobResult( + result = await job_composed_result.result() + assert result == AsyncJobResult( result={ "data": None, "job_id": job_composed_result.status.job_id, - "job_id_data": job_id_data, + "owner_metadata": owner_metadata, } ) diff --git a/packages/service-library/tests/redis/conftest.py b/packages/service-library/tests/redis/conftest.py index ae6d04c20856..f29c76bdfb22 100644 --- a/packages/service-library/tests/redis/conftest.py +++ b/packages/service-library/tests/redis/conftest.py @@ -12,11 +12,11 @@ @pytest.fixture async def redis_client_sdk( - get_redis_client_sdk: Callable[ + get_in_process_redis_client_sdk: Callable[ [RedisDatabase], AbstractAsyncContextManager[RedisClientSDK] ], ) -> AsyncIterator[RedisClientSDK]: - async with get_redis_client_sdk(RedisDatabase.RESOURCES) as client: + async with get_in_process_redis_client_sdk(RedisDatabase.RESOURCES) as client: yield client @@ -30,3 +30,18 @@ def with_short_default_redis_lock_ttl(mocker: MockerFixture) -> datetime.timedel short_ttl = datetime.timedelta(seconds=0.25) mocker.patch.object(redis_constants, "DEFAULT_LOCK_TTL", short_ttl) return short_ttl + + +@pytest.fixture +def semaphore_name(faker: Faker) -> str: + return faker.pystr() + + +@pytest.fixture +def semaphore_capacity() -> int: + return 3 + + +@pytest.fixture +def short_ttl() -> datetime.timedelta: + return datetime.timedelta(seconds=1) diff --git a/packages/service-library/tests/redis/test_client.py b/packages/service-library/tests/redis/test_client.py index 210c857bb9b4..580c47d0facb 100644 --- a/packages/service-library/tests/redis/test_client.py +++ b/packages/service-library/tests/redis/test_client.py @@ -104,13 +104,11 @@ async def test_redis_lock_with_ttl( assert not await ttl_lock.locked() -async def test_redis_client_sdk_setup_shutdown( - mock_redis_socket_timeout: None, redis_service: RedisSettings -): +async def test_redis_client_sdk_setup_shutdown(redis_service: RedisSettings): # setup redis_resources_dns = redis_service.build_redis_dsn(RedisDatabase.RESOURCES) client = RedisClientSDK(redis_resources_dns, client_name="pytest") - assert client + await client.setup() assert client.redis_dsn == redis_resources_dns # ensure health check task sets the health to True @@ -130,7 +128,6 @@ async def test_redis_client_sdk_setup_shutdown( async def test_regression_fails_on_redis_service_outage( - mock_redis_socket_timeout: None, paused_container: Callable[[str], AbstractAsyncContextManager[None]], redis_client_sdk: RedisClientSDK, ): diff --git a/packages/service-library/tests/redis/test_clients_manager.py b/packages/service-library/tests/redis/test_clients_manager.py index eeb110557e33..4bf5bc454f46 100644 --- a/packages/service-library/tests/redis/test_clients_manager.py +++ b/packages/service-library/tests/redis/test_clients_manager.py @@ -16,7 +16,6 @@ async def test_redis_client_sdks_manager( - mock_redis_socket_timeout: None, redis_service: RedisSettings, ): all_redis_configs: set[RedisManagerDBConfig] = { diff --git a/packages/service-library/tests/redis/test_decorators.py b/packages/service-library/tests/redis/test_decorators.py index e4ca9d51463c..019b6595ac7c 100644 --- a/packages/service-library/tests/redis/test_decorators.py +++ b/packages/service-library/tests/redis/test_decorators.py @@ -267,7 +267,7 @@ async def race_condition_increase(self, by: int) -> None: self.value = current_value counter = RaceConditionCounter() - # ensures it does nto time out before acquiring the lock + # ensures it does not time out before acquiring the lock time_for_all_inc_counter_calls_to_finish = ( with_short_default_redis_lock_ttl * INCREASE_OPERATIONS * 10 ) diff --git a/packages/service-library/tests/redis/test_project_document_version.py b/packages/service-library/tests/redis/test_project_document_version.py new file mode 100644 index 000000000000..47b9ca4da307 --- /dev/null +++ b/packages/service-library/tests/redis/test_project_document_version.py @@ -0,0 +1,50 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument + +from typing import cast +from uuid import UUID + +import pytest +from faker import Faker +from models_library.projects import ProjectID +from servicelib.redis import RedisClientSDK +from servicelib.redis._project_document_version import ( + increment_and_return_project_document_version, +) + +pytest_simcore_core_services_selection = [ + "redis", +] +pytest_simcore_ops_services_selection = [ + "redis-commander", +] + + +@pytest.fixture() +def project_uuid(faker: Faker) -> ProjectID: + return cast(UUID, faker.uuid4(cast_to=None)) + + +async def test_project_document_version_workflow( + redis_client_sdk: RedisClientSDK, project_uuid: ProjectID +): + """Test the complete workflow of getting and incrementing project document versions.""" + + # First increment should return 1 + new_version = await increment_and_return_project_document_version( + redis_client_sdk, project_uuid + ) + assert new_version == 1 + + # Second increment should return 2 + new_version = await increment_and_return_project_document_version( + redis_client_sdk, project_uuid + ) + assert new_version == 2 + + # Multiple increments should work correctly + for expected_version in range(3, 6): + new_version = await increment_and_return_project_document_version( + redis_client_sdk, project_uuid + ) + assert new_version == expected_version diff --git a/packages/service-library/tests/redis/test_project_lock.py b/packages/service-library/tests/redis/test_project_lock.py index aa9d7fd1c740..03fe4f0e4627 100644 --- a/packages/service-library/tests/redis/test_project_lock.py +++ b/packages/service-library/tests/redis/test_project_lock.py @@ -10,11 +10,11 @@ from uuid import UUID import pytest +from common_library.async_tools import cancel_wait_task from faker import Faker from models_library.projects import ProjectID from models_library.projects_access import Owner from models_library.projects_state import ProjectLocked, ProjectStatus -from servicelib.async_utils import cancel_wait_task from servicelib.redis import ( ProjectLockError, RedisClientSDK, diff --git a/packages/service-library/tests/redis/test_semaphore.py b/packages/service-library/tests/redis/test_semaphore.py new file mode 100644 index 000000000000..755ce716bfe2 --- /dev/null +++ b/packages/service-library/tests/redis/test_semaphore.py @@ -0,0 +1,617 @@ +# ruff: noqa: SLF001, EM101, TRY003, PT011, PLR0917 +# pylint: disable=no-value-for-parameter +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable + +import asyncio +import datetime +import logging + +import pytest +from faker import Faker +from pytest_mock import MockerFixture +from servicelib.redis import RedisClientSDK +from servicelib.redis._constants import ( + DEFAULT_SEMAPHORE_TTL, + SEMAPHORE_KEY_PREFIX, +) +from servicelib.redis._errors import SemaphoreLostError +from servicelib.redis._semaphore import ( + DistributedSemaphore, + SemaphoreAcquisitionError, + SemaphoreNotAcquiredError, + distributed_semaphore, +) +from servicelib.redis._utils import handle_redis_returns_union_types + +pytest_simcore_core_services_selection = [ + "redis", +] +pytest_simcore_ops_services_selection = [ + "redis-commander", +] + + +@pytest.fixture +def with_short_default_semaphore_ttl( + mocker: MockerFixture, +) -> datetime.timedelta: + short_ttl = datetime.timedelta(seconds=5) + mocker.patch( + "servicelib.redis._semaphore.DEFAULT_SEMAPHORE_TTL", + short_ttl, + ) + return short_ttl + + +async def test_semaphore_initialization( + redis_client_sdk: RedisClientSDK, + semaphore_name: str, + semaphore_capacity: int, +): + semaphore = DistributedSemaphore( + redis_client=redis_client_sdk, key=semaphore_name, capacity=semaphore_capacity + ) + + assert semaphore.key == semaphore_name + assert semaphore.capacity == semaphore_capacity + assert semaphore.ttl == DEFAULT_SEMAPHORE_TTL + assert semaphore.blocking is True + assert semaphore.instance_id is not None + assert ( + semaphore.semaphore_key + == f"{SEMAPHORE_KEY_PREFIX}{semaphore_name}_cap{semaphore_capacity}" + ) + assert semaphore.tokens_key.startswith(f"{semaphore.semaphore_key}:") + assert semaphore.holders_set.startswith(f"{semaphore.semaphore_key}:") + assert semaphore.holder_key.startswith(f"{semaphore.semaphore_key}:") + + +async def test_invalid_semaphore_initialization( + redis_client_sdk: RedisClientSDK, + semaphore_name: str, +): + with pytest.raises(ValueError, match="Input should be greater than 0"): + DistributedSemaphore( + redis_client=redis_client_sdk, key=semaphore_name, capacity=0 + ) + + with pytest.raises(ValueError, match="Input should be greater than 0"): + DistributedSemaphore( + redis_client=redis_client_sdk, key=semaphore_name, capacity=-1 + ) + + with pytest.raises(ValueError, match="TTL must be positive"): + DistributedSemaphore( + redis_client=redis_client_sdk, + key=semaphore_name, + capacity=1, + ttl=datetime.timedelta(seconds=0), + ) + with pytest.raises(ValueError, match="TTL must be positive"): + DistributedSemaphore( + redis_client=redis_client_sdk, + key=semaphore_name, + capacity=1, + ttl=datetime.timedelta(seconds=0.5), + ) + with pytest.raises(ValueError, match="Timeout must be positive"): + DistributedSemaphore( + redis_client=redis_client_sdk, + key=semaphore_name, + capacity=1, + ttl=datetime.timedelta(seconds=10), + blocking=True, + blocking_timeout=datetime.timedelta(seconds=0), + ) + + +async def _assert_semaphore_redis_state( + redis_client_sdk: RedisClientSDK, + semaphore: DistributedSemaphore, + *, + expected_count: int, + expected_free_tokens: int, + expected_expired: bool = False, +): + """Helper to assert the internal Redis state of the semaphore""" + holders = await handle_redis_returns_union_types( + redis_client_sdk.redis.smembers(semaphore.holders_set) + ) + assert len(holders) == expected_count + if expected_count > 0: + assert semaphore.instance_id in holders + holder_key_exists = await redis_client_sdk.redis.exists(semaphore.holder_key) + if expected_expired: + assert holder_key_exists == 0 + else: + assert holder_key_exists == 1 + tokens = await handle_redis_returns_union_types( + redis_client_sdk.redis.lrange(semaphore.tokens_key, 0, -1) + ) + assert len(tokens) == expected_free_tokens + + +async def test_semaphore_acquire_release_basic( + redis_client_sdk: RedisClientSDK, + semaphore_name: str, + semaphore_capacity: int, + with_short_default_semaphore_ttl: datetime.timedelta, +): + semaphore = DistributedSemaphore( + redis_client=redis_client_sdk, + key=semaphore_name, + capacity=semaphore_capacity, + ttl=with_short_default_semaphore_ttl, + ) + + # Initially not acquired + assert await semaphore.current_count() == 0 + assert await semaphore.available_tokens() == semaphore_capacity + assert await semaphore.is_acquired() is False + await _assert_semaphore_redis_state( + redis_client_sdk, + semaphore, + expected_count=0, + expected_free_tokens=semaphore_capacity, + ) + + # Acquire + result = await semaphore.acquire() + assert result is True + assert await semaphore.current_count() == 1 + assert await semaphore.available_tokens() == semaphore_capacity - 1 + assert await semaphore.is_acquired() is True + await _assert_semaphore_redis_state( + redis_client_sdk, + semaphore, + expected_count=1, + expected_free_tokens=semaphore_capacity - 1, + ) + + # Acquire again on same instance should return True immediately and keep the same count (reentrant) + result = await semaphore.acquire() + assert result is True + assert await semaphore.current_count() == 1 + assert await semaphore.available_tokens() == semaphore_capacity - 1 + assert await semaphore.is_acquired() is True + await _assert_semaphore_redis_state( + redis_client_sdk, + semaphore, + expected_count=1, + expected_free_tokens=semaphore_capacity - 1, + ) + + # reacquire should just work + await semaphore.reacquire() + assert await semaphore.current_count() == 1 + assert await semaphore.available_tokens() == semaphore_capacity - 1 + assert await semaphore.is_acquired() is True + await _assert_semaphore_redis_state( + redis_client_sdk, + semaphore, + expected_count=1, + expected_free_tokens=semaphore_capacity - 1, + ) + + # Release + await semaphore.release() + assert await semaphore.current_count() == 0 + assert await semaphore.available_tokens() == semaphore_capacity + assert await semaphore.is_acquired() is False + await _assert_semaphore_redis_state( + redis_client_sdk, + semaphore, + expected_count=0, + expected_free_tokens=semaphore_capacity, + ) + + # reacquire after release should fail + with pytest.raises(SemaphoreNotAcquiredError): + await semaphore.reacquire() + await _assert_semaphore_redis_state( + redis_client_sdk, + semaphore, + expected_count=0, + expected_free_tokens=semaphore_capacity, + ) + + # so does release again + with pytest.raises(SemaphoreNotAcquiredError): + await semaphore.release() + await _assert_semaphore_redis_state( + redis_client_sdk, + semaphore, + expected_count=0, + expected_free_tokens=semaphore_capacity, + ) + + +async def test_semaphore_acquire_release_with_ttl_expiry( + redis_client_sdk: RedisClientSDK, + semaphore_name: str, + semaphore_capacity: int, + with_short_default_semaphore_ttl: datetime.timedelta, +): + semaphore = DistributedSemaphore( + redis_client=redis_client_sdk, + key=semaphore_name, + capacity=semaphore_capacity, + ttl=with_short_default_semaphore_ttl, + ) + await semaphore.acquire() + assert await semaphore.current_count() == 1 + assert await semaphore.available_tokens() == semaphore_capacity - 1 + await _assert_semaphore_redis_state( + redis_client_sdk, + semaphore, + expected_count=1, + expected_free_tokens=semaphore_capacity - 1, + ) + + # wait for TTL to expire + await asyncio.sleep(with_short_default_semaphore_ttl.total_seconds() + 0.1) + await _assert_semaphore_redis_state( + redis_client_sdk, + semaphore, + expected_count=1, + expected_free_tokens=semaphore_capacity - 1, + expected_expired=True, + ) + + # TTL expired, reacquire should fail + with pytest.raises(SemaphoreLostError): + await semaphore.reacquire() + await _assert_semaphore_redis_state( + redis_client_sdk, + semaphore, + expected_count=1, + expected_free_tokens=semaphore_capacity - 1, + expected_expired=True, + ) + # and release should also fail + with pytest.raises(SemaphoreLostError): + await semaphore.release() + await _assert_semaphore_redis_state( + redis_client_sdk, + semaphore, + expected_count=0, + expected_free_tokens=semaphore_capacity, + ) + + # and release again should also fail with different error + with pytest.raises(SemaphoreNotAcquiredError): + await semaphore.release() + await _assert_semaphore_redis_state( + redis_client_sdk, + semaphore, + expected_count=0, + expected_free_tokens=semaphore_capacity, + ) + + +async def test_semaphore_multiple_instances_capacity_limit( + redis_client_sdk: RedisClientSDK, + semaphore_name: str, +): + capacity = 2 + semaphores = [ + DistributedSemaphore( + redis_client=redis_client_sdk, key=semaphore_name, capacity=capacity + ) + for _ in range(4) + ] + + # Acquire first two should succeed + assert await semaphores[0].acquire() is True + assert await semaphores[0].is_acquired() is True + await _assert_semaphore_redis_state( + redis_client_sdk, + semaphores[0], + expected_count=1, + expected_free_tokens=capacity - 1, + ) + assert await semaphores[1].is_acquired() is False + for sem in semaphores[:4]: + assert await sem.current_count() == 1 + assert await sem.available_tokens() == capacity - 1 + + # acquire second + assert await semaphores[1].acquire() is True + for sem in semaphores[:2]: + assert await sem.is_acquired() is True + assert await sem.current_count() == 2 + assert await sem.available_tokens() == capacity - 2 + await _assert_semaphore_redis_state( + redis_client_sdk, + sem, + expected_count=2, + expected_free_tokens=capacity - 2, + ) + + # Third and fourth should fail in non-blocking mode + for sem in semaphores[2:]: + sem.blocking = False + assert await sem.acquire() is False + assert await sem.is_acquired() is False + assert await sem.current_count() == 2 + assert await sem.available_tokens() == capacity - 2 + + # Release one + await semaphores[0].release() + assert await semaphores[0].is_acquired() is False + for sem in semaphores[:4]: + assert await sem.current_count() == 1 + assert await sem.available_tokens() == capacity - 1 + + # Now third can acquire + assert await semaphores[2].acquire() is True + for sem in semaphores[:4]: + assert await sem.current_count() == 2 + assert await sem.available_tokens() == capacity - 2 + + # Clean up + await semaphores[1].release() + await semaphores[2].release() + + +async def test_semaphore_with_timeout( + redis_client_sdk: RedisClientSDK, + semaphore_name: str, +): + timeout = datetime.timedelta(seconds=1) + semaphore1 = DistributedSemaphore( + redis_client=redis_client_sdk, + key=semaphore_name, + capacity=1, + blocking_timeout=timeout, + ) + assert await semaphore1.acquire() is True + assert await semaphore1.is_acquired() is True + await _assert_semaphore_redis_state( + redis_client_sdk, + semaphore1, + expected_count=1, + expected_free_tokens=0, + ) + semaphore2 = DistributedSemaphore( + redis_client=redis_client_sdk, + key=semaphore_name, + capacity=1, + blocking_timeout=timeout, + ) + # Second should timeout + with pytest.raises(SemaphoreAcquisitionError): + await semaphore2.acquire() + assert await semaphore2.is_acquired() is False + await _assert_semaphore_redis_state( + redis_client_sdk, + semaphore1, + expected_count=1, + expected_free_tokens=0, + ) + + +async def test_semaphore_context_manager( + redis_client_sdk: RedisClientSDK, + semaphore_name: str, +): + async with distributed_semaphore( + redis_client=redis_client_sdk, + key=semaphore_name, + capacity=1, + ) as semaphore1: + assert await semaphore1.is_acquired() is True + assert await semaphore1.current_count() == 1 + assert await semaphore1.available_tokens() == 0 + await _assert_semaphore_redis_state( + redis_client_sdk, + semaphore1, + expected_count=1, + expected_free_tokens=0, + ) + assert await semaphore1.is_acquired() is False + assert await semaphore1.current_count() == 0 + assert await semaphore1.available_tokens() == 1 + await _assert_semaphore_redis_state( + redis_client_sdk, + semaphore1, + expected_count=0, + expected_free_tokens=1, + ) + + +async def test_semaphore_context_manager_with_timeout( + redis_client_sdk: RedisClientSDK, + semaphore_name: str, +): + capacity = 1 + timeout = datetime.timedelta(seconds=0.1) + + # First semaphore acquires + async with distributed_semaphore( + redis_client=redis_client_sdk, + key=semaphore_name, + capacity=capacity, + ) as semaphore1: + assert await semaphore1.is_acquired() is True + assert await semaphore1.current_count() == 1 + assert await semaphore1.available_tokens() == 0 + await _assert_semaphore_redis_state( + redis_client_sdk, + semaphore1, + expected_count=1, + expected_free_tokens=0, + ) + # Second semaphore should raise on timeout + with pytest.raises(SemaphoreAcquisitionError): + async with distributed_semaphore( + redis_client=redis_client_sdk, + key=semaphore_name, + capacity=capacity, + blocking=True, + blocking_timeout=timeout, + ): + ... + + # non-blocking should also raise when used with context manager + with pytest.raises(SemaphoreAcquisitionError): + async with distributed_semaphore( + redis_client=redis_client_sdk, + key=semaphore_name, + capacity=capacity, + blocking=False, + ): + ... + # using the semaphore directly should in non-blocking mode should return False + semaphore2 = DistributedSemaphore( + redis_client=redis_client_sdk, + key=semaphore_name, + capacity=capacity, + blocking=False, + ) + assert await semaphore2.acquire() is False + + # now try infinite timeout + semaphore3 = DistributedSemaphore( + redis_client=redis_client_sdk, + key=semaphore_name, + capacity=capacity, + blocking_timeout=None, # wait forever + ) + acquire_task = asyncio.create_task(semaphore3.acquire()) + await asyncio.sleep(5) # give some time to start acquiring + assert not acquire_task.done() + + +@pytest.mark.parametrize( + "exception", + [RuntimeError, asyncio.CancelledError], + ids=str, +) +async def test_semaphore_context_manager_with_exception( + redis_client_sdk: RedisClientSDK, + semaphore_name: str, + semaphore_capacity: int, + exception: type[Exception | asyncio.CancelledError], +): + async def _raising_context(): + async with distributed_semaphore( + redis_client=redis_client_sdk, + key=semaphore_name, + capacity=semaphore_capacity, + ): + raise exception("Test") + + with pytest.raises(exception, match="Test"): + await _raising_context() + + +async def test_semaphore_context_manager_lost_renewal( + redis_client_sdk: RedisClientSDK, + semaphore_name: str, + with_short_default_semaphore_ttl: datetime.timedelta, +): + with pytest.raises(SemaphoreLostError): # noqa: PT012 + async with distributed_semaphore( + redis_client=redis_client_sdk, + key=semaphore_name, + capacity=1, + ttl=with_short_default_semaphore_ttl, + ) as semaphore: + assert await semaphore.is_acquired() is True + assert await semaphore.current_count() == 1 + assert await semaphore.available_tokens() == 0 + await _assert_semaphore_redis_state( + redis_client_sdk, + semaphore, + expected_count=1, + expected_free_tokens=0, + ) + + # now simulate lost renewal by deleting the holder key + await redis_client_sdk.redis.delete(semaphore.holder_key) + # wait a bit to let the auto-renewal task detect the lost lock + # the sleep will be interrupted by the exception and the context manager will exit + with pytest.raises(asyncio.CancelledError): + await asyncio.sleep( + with_short_default_semaphore_ttl.total_seconds() + 0.5 + ) + raise asyncio.CancelledError + + +async def test_semaphore_context_manager_auto_renewal( + redis_client_sdk: RedisClientSDK, + semaphore_name: str, + with_short_default_semaphore_ttl: datetime.timedelta, +): + async with distributed_semaphore( + redis_client=redis_client_sdk, + key=semaphore_name, + capacity=1, + ttl=with_short_default_semaphore_ttl, + ) as semaphore: + assert await semaphore.is_acquired() is True + assert await semaphore.current_count() == 1 + assert await semaphore.available_tokens() == 0 + await _assert_semaphore_redis_state( + redis_client_sdk, + semaphore, + expected_count=1, + expected_free_tokens=0, + ) + + # wait for a few TTLs to ensure auto-renewal is working + total_wait = with_short_default_semaphore_ttl.total_seconds() * 3 + await asyncio.sleep(total_wait) + + # should still be acquired + assert await semaphore.is_acquired() is True + assert await semaphore.current_count() == 1 + assert await semaphore.available_tokens() == 0 + await _assert_semaphore_redis_state( + redis_client_sdk, + semaphore, + expected_count=1, + expected_free_tokens=0, + ) + + +async def test_semaphore_context_manager_logs_warning_when_hold_too_long( + redis_client_sdk: RedisClientSDK, + semaphore_name: str, + caplog: pytest.LogCaptureFixture, +): + """Test that a warning is logged when holding the semaphore for too long""" + with caplog.at_level(logging.WARNING): + async with distributed_semaphore( + redis_client=redis_client_sdk, + key=semaphore_name, + capacity=1, + expected_lock_overall_time=datetime.timedelta(milliseconds=200), + ): + await asyncio.sleep(0.3) + assert caplog.records + assert "longer than expected" in caplog.messages[-1] + + +async def test_multiple_semaphores_different_keys( + redis_client_sdk: RedisClientSDK, + faker: Faker, +): + """Test that semaphores with different keys don't interfere""" + key1 = faker.pystr() + key2 = faker.pystr() + capacity = 1 + + async with ( + distributed_semaphore( + redis_client=redis_client_sdk, key=key1, capacity=capacity + ), + distributed_semaphore( + redis_client=redis_client_sdk, key=key2, capacity=capacity + ), + ): + ... diff --git a/packages/service-library/tests/redis/test_semaphore_decorator.py b/packages/service-library/tests/redis/test_semaphore_decorator.py new file mode 100644 index 000000000000..fa004da96fe3 --- /dev/null +++ b/packages/service-library/tests/redis/test_semaphore_decorator.py @@ -0,0 +1,720 @@ +# ruff: noqa: SLF001, EM101, TRY003, PT011, PLR0917 +# pylint: disable=no-value-for-parameter +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable + +import asyncio +import datetime +import logging +from contextlib import asynccontextmanager +from typing import Literal + +import pytest +from pytest_mock import MockerFixture +from pytest_simcore.helpers.logging_tools import log_context +from servicelib.redis import RedisClientSDK +from servicelib.redis._constants import SEMAPHORE_KEY_PREFIX +from servicelib.redis._errors import SemaphoreLostError +from servicelib.redis._semaphore import ( + DistributedSemaphore, + SemaphoreAcquisitionError, +) +from servicelib.redis._semaphore_decorator import ( + with_limited_concurrency, + with_limited_concurrency_cm, +) + +pytest_simcore_core_services_selection = [ + "redis", +] +pytest_simcore_ops_services_selection = [ + "redis-commander", +] + + +async def test_basic_functionality( + redis_client_sdk: RedisClientSDK, + semaphore_name: str, +): + call_count = 0 + + @with_limited_concurrency( + redis_client_sdk, + key=semaphore_name, + capacity=1, + ) + async def limited_function(): + nonlocal call_count + call_count += 1 + await asyncio.sleep(0.1) + return call_count + + # Multiple concurrent calls + tasks = [asyncio.create_task(limited_function()) for _ in range(3)] + results = await asyncio.gather(*tasks) + + # All should complete successfully + assert len(results) == 3 + assert all(isinstance(r, int) for r in results) + + +async def test_auto_renewal( + redis_client_sdk: RedisClientSDK, + semaphore_name: str, + semaphore_capacity: int, + short_ttl: datetime.timedelta, +): + work_started = asyncio.Event() + work_completed = asyncio.Event() + + @with_limited_concurrency( + redis_client_sdk, + key=semaphore_name, + capacity=semaphore_capacity, + ttl=short_ttl, + ) + async def long_running_work() -> Literal["success"]: + work_started.set() + # Wait longer than TTL to ensure renewal works + await asyncio.sleep(short_ttl.total_seconds() * 2) + work_completed.set() + return "success" + + task = asyncio.create_task(long_running_work()) + await work_started.wait() + + # Check that semaphore is being held + temp_semaphore = DistributedSemaphore( + redis_client=redis_client_sdk, + key=semaphore_name, + capacity=semaphore_capacity, + ttl=short_ttl, + ) + assert await temp_semaphore.current_count() == 1 + assert await temp_semaphore.available_tokens() == semaphore_capacity - 1 + + # Wait for work to complete + result = await task + assert result == "success" + assert work_completed.is_set() + + # After completion, semaphore should be released + assert await temp_semaphore.current_count() == 0 + assert await temp_semaphore.available_tokens() == semaphore_capacity + + +async def test_auto_renewal_lose_semaphore_raises( + redis_client_sdk: RedisClientSDK, + semaphore_name: str, + semaphore_capacity: int, + short_ttl: datetime.timedelta, +): + work_started = asyncio.Event() + + @with_limited_concurrency( + redis_client_sdk, + key=semaphore_name, + capacity=semaphore_capacity, + ttl=short_ttl, + ) + async def coro_that_should_fail() -> Literal["should not reach here"]: + work_started.set() + # Wait long enough for renewal to be attempted multiple times + await asyncio.sleep(short_ttl.total_seconds() * 100) + return "should not reach here" + + task = asyncio.create_task(coro_that_should_fail()) + await work_started.wait() + + # Wait for the first renewal interval to pass + renewal_interval = short_ttl / 3 + await asyncio.sleep(renewal_interval.total_seconds() * 1.5) + + # Find and delete all holder keys for this semaphore + holder_keys = await redis_client_sdk.redis.keys( + f"{SEMAPHORE_KEY_PREFIX}{semaphore_name}_cap{semaphore_capacity}:holders:*" + ) + assert holder_keys, "Holder keys should exist before deletion" + await redis_client_sdk.redis.delete(*holder_keys) + + # wait another renewal interval to ensure the renewal fails + await asyncio.sleep(renewal_interval.total_seconds() * 1.5) + + # it shall have raised already, do not wait too much + async with asyncio.timeout(renewal_interval.total_seconds()): + with pytest.raises(SemaphoreLostError): + await task + + +async def test_decorator_with_callable_parameters( + redis_client_sdk: RedisClientSDK, +): + executed_keys = [] + + def get_redis_client(*args, **kwargs) -> RedisClientSDK: + return redis_client_sdk + + def get_key(user_id: str, resource: str) -> str: + return f"{user_id}-{resource}" + + def get_capacity(user_id: str, resource: str) -> int: + return 2 + + @with_limited_concurrency( + get_redis_client, + key=get_key, + capacity=get_capacity, + ) + async def process_user_resource(user_id: str, resource: str): + executed_keys.append(f"{user_id}-{resource}") + await asyncio.sleep(0.05) + + # Test with different parameters + await asyncio.gather( + process_user_resource("user1", "wallet1"), + process_user_resource("user1", "wallet2"), + process_user_resource("user2", "wallet1"), + ) + + assert len(executed_keys) == 3 + assert "user1-wallet1" in executed_keys + assert "user1-wallet2" in executed_keys + assert "user2-wallet1" in executed_keys + + +async def test_decorator_capacity_enforcement( + redis_client_sdk: RedisClientSDK, + semaphore_name: str, +): + concurrent_count = 0 + max_concurrent = 0 + + @with_limited_concurrency( + redis_client_sdk, + key=semaphore_name, + capacity=2, + ) + async def limited_function() -> None: + nonlocal concurrent_count, max_concurrent + concurrent_count += 1 + max_concurrent = max(max_concurrent, concurrent_count) + await asyncio.sleep(0.1) + concurrent_count -= 1 + + # Start 5 concurrent tasks + tasks = [asyncio.create_task(limited_function()) for _ in range(5)] + await asyncio.gather(*tasks) + + # Should never exceed capacity of 2 + assert max_concurrent <= 2 + + +async def test_exception_handling( + redis_client_sdk: RedisClientSDK, + semaphore_name: str, +): + @with_limited_concurrency( + redis_client_sdk, + key=semaphore_name, + capacity=1, + ) + async def failing_function(): + raise RuntimeError("Test exception") + + with pytest.raises(RuntimeError, match="Test exception"): + await failing_function() + + # Semaphore should be released even after exception + # Test by trying to acquire again + @with_limited_concurrency( + redis_client_sdk, + key=semaphore_name, + capacity=1, + ) + async def success_function(): + return "success" + + result = await success_function() + assert result == "success" + + +async def test_non_blocking_behavior( + redis_client_sdk: RedisClientSDK, + semaphore_name: str, +): + # Test the blocking timeout behavior + started_event = asyncio.Event() + + @with_limited_concurrency( + redis_client_sdk, + key=semaphore_name, + capacity=1, + blocking=True, + blocking_timeout=datetime.timedelta(seconds=0.1), + ) + async def limited_function() -> None: + started_event.set() + await asyncio.sleep(2) + + # Start first task that will hold the semaphore + task1 = asyncio.create_task(limited_function()) + await started_event.wait() # Wait until semaphore is actually acquired + + # Second task should timeout and raise an exception + with pytest.raises(SemaphoreAcquisitionError): + await limited_function() + + await task1 + + # now doing the same with non-blocking should raise + @with_limited_concurrency( + redis_client_sdk, + key=semaphore_name, + capacity=1, + blocking=False, + blocking_timeout=None, + ) + async def limited_function_non_blocking() -> None: + await asyncio.sleep(2) + + tasks = [asyncio.create_task(limited_function_non_blocking()) for _ in range(3)] + results = await asyncio.gather(*tasks, return_exceptions=True) + assert len(results) == 3 + assert any(isinstance(r, SemaphoreAcquisitionError) for r in results) + + +async def test_user_exceptions_properly_reraised( + redis_client_sdk: RedisClientSDK, + semaphore_name: str, + semaphore_capacity: int, + short_ttl: datetime.timedelta, + mocker: MockerFixture, +): + class UserFunctionError(Exception): + """Custom exception to ensure we're catching the right exception""" + + work_started = asyncio.Event() + + # Track that auto-renewal is actually happening + from servicelib.redis._semaphore import DistributedSemaphore + + spied_renew_fct = mocker.spy(DistributedSemaphore, "reacquire") + + @with_limited_concurrency( + redis_client_sdk, + key=semaphore_name, + capacity=semaphore_capacity, + ttl=short_ttl, # Short TTL to ensure renewal happens + ) + async def failing_function(): + work_started.set() + # Wait long enough for at least one renewal to happen + await asyncio.sleep(short_ttl.total_seconds() * 0.8) + # Then raise our custom exception + raise UserFunctionError("User function failed intentionally") + + # Verify the exception is properly re-raised + with pytest.raises(UserFunctionError, match="User function failed intentionally"): + await failing_function() + + # Ensure work actually started + assert work_started.is_set() + + # Verify auto-renewal was working (at least one renewal should have happened) + assert ( + spied_renew_fct.call_count >= 1 + ), "Auto-renewal should have been called at least once" + + # Verify semaphore was properly released by trying to acquire it again + test_semaphore = DistributedSemaphore( + redis_client=redis_client_sdk, + key=semaphore_name, + capacity=semaphore_capacity, + ttl=short_ttl, + ) + assert ( + await test_semaphore.current_count() == 0 + ), "Semaphore should be released after exception" + + +async def test_cancelled_error_preserved( + redis_client_sdk: RedisClientSDK, + semaphore_name: str, + semaphore_capacity: int, +): + """Test that CancelledError is properly preserved through the decorator""" + + @with_limited_concurrency( + redis_client_sdk, + key=semaphore_name, + capacity=semaphore_capacity, + ) + async def function_raising_cancelled_error(): + raise asyncio.CancelledError + + # Verify CancelledError is preserved + with pytest.raises(asyncio.CancelledError): + await function_raising_cancelled_error() + + +@pytest.mark.heavy_load +async def test_with_large_capacity( + redis_client_sdk: RedisClientSDK, + semaphore_name: str, +): + large_capacity = 100 + concurrent_count = 0 + max_concurrent = 0 + sleep_time_s = 10 + num_tasks = 500 + + @with_limited_concurrency( + redis_client_sdk, + key=semaphore_name, + capacity=large_capacity, + blocking=True, + blocking_timeout=None, + ) + async def limited_function(task_id: int) -> None: + nonlocal concurrent_count, max_concurrent + concurrent_count += 1 + max_concurrent = max(max_concurrent, concurrent_count) + with log_context(logging.INFO, f"{task_id=}") as ctx: + ctx.logger.info("started %s with %s", task_id, concurrent_count) + await asyncio.sleep(sleep_time_s) + ctx.logger.info("done %s with %s", task_id, concurrent_count) + concurrent_count -= 1 + + # Start tasks equal to the large capacity + tasks = [asyncio.create_task(limited_function(i)) for i in range(num_tasks)] + done, pending = await asyncio.wait( + tasks, + timeout=float(num_tasks) / float(large_capacity) * 10.0 * float(sleep_time_s), + ) + assert not pending, f"Some tasks did not complete: {len(pending)} pending" + assert len(done) == num_tasks + + # Should never exceed the large capacity + assert max_concurrent <= large_capacity + + +async def test_long_locking_logs_warning( + redis_client_sdk: RedisClientSDK, + semaphore_name: str, + caplog: pytest.LogCaptureFixture, +): + @with_limited_concurrency( + redis_client_sdk, + key=semaphore_name, + capacity=1, + blocking=True, + blocking_timeout=None, + expected_lock_overall_time=datetime.timedelta(milliseconds=200), + ) + async def limited_function() -> None: + with log_context(logging.INFO, "task"): + await asyncio.sleep(0.4) + + with caplog.at_level(logging.WARNING): + await limited_function() + assert caplog.records + assert "longer than expected" in caplog.messages[-1] + + +async def test_semaphore_fair_queuing( + redis_client_sdk: RedisClientSDK, + semaphore_name: str, +): + entered_order: list[int] = [] + + @with_limited_concurrency( + redis_client_sdk, + key=semaphore_name, + capacity=1, + ) + async def limited_function(call_id: int): + entered_order.append(call_id) + await asyncio.sleep(0.2) + return call_id + + # Launch tasks in a specific order + num_tasks = 10 + tasks = [] + for i in range(num_tasks): + tasks.append(asyncio.create_task(limited_function(i))) + await asyncio.sleep(0.1) # Small delay to help preserve order + results = await asyncio.gather(*tasks) + + # All should complete successfully and in order + assert results == list(range(num_tasks)) + # The order in which they entered the critical section should match the order of submission + assert entered_order == list( + range(num_tasks) + ), f"Expected fair queuing, got {entered_order}" + + +async def test_context_manager_basic_functionality( + redis_client_sdk: RedisClientSDK, + semaphore_name: str, +): + concurrent_count = 0 + max_concurrent = 0 + + @with_limited_concurrency_cm( + redis_client_sdk, + key=semaphore_name, + capacity=2, + blocking_timeout=None, + ) + @asynccontextmanager + async def limited_context_manager(): + nonlocal concurrent_count, max_concurrent + concurrent_count += 1 + max_concurrent = max(max_concurrent, concurrent_count) + try: + yield + await asyncio.sleep(0.1) + finally: + concurrent_count -= 1 + + async def use_context_manager() -> int: + async with limited_context_manager(): + await asyncio.sleep(0.1) + return 1 + + # Start concurrent context managers + tasks = [asyncio.create_task(use_context_manager()) for _ in range(20)] + results = await asyncio.gather(*tasks) + # All should complete successfully + assert len(results) == 20 + assert all(isinstance(r, int) for r in results) + + # Should never exceed capacity of 2 + assert max_concurrent <= 2 + + +async def test_context_manager_exception_handling( + redis_client_sdk: RedisClientSDK, + semaphore_name: str, +): + @with_limited_concurrency_cm( + redis_client_sdk, + key=semaphore_name, + capacity=1, + ) + @asynccontextmanager + async def failing_context_manager(): + yield + raise RuntimeError("Test exception") + + with pytest.raises(RuntimeError, match="Test exception"): + async with failing_context_manager(): + pass + + # Semaphore should be released even after exception + + @with_limited_concurrency_cm( + redis_client_sdk, + key=semaphore_name, + capacity=1, + ) + @asynccontextmanager + async def success_context_manager(): + yield "success" + + async with success_context_manager() as result: + assert result == "success" + + +async def test_context_manager_auto_renewal( + redis_client_sdk: RedisClientSDK, + semaphore_name: str, + semaphore_capacity: int, + short_ttl: datetime.timedelta, +): + work_started = asyncio.Event() + work_completed = asyncio.Event() + + @with_limited_concurrency_cm( + redis_client_sdk, + key=semaphore_name, + capacity=semaphore_capacity, + ttl=short_ttl, + ) + @asynccontextmanager + async def long_running_context_manager(): + work_started.set() + yield "data" + # Wait longer than TTL to ensure renewal works + await asyncio.sleep(short_ttl.total_seconds() * 2) + work_completed.set() + + async def use_long_running_cm(): + async with long_running_context_manager() as data: + assert data == "data" + # Keep context manager active for longer than TTL + await asyncio.sleep(short_ttl.total_seconds() * 1.5) + + task = asyncio.create_task(use_long_running_cm()) + await work_started.wait() + + # Check that semaphore is being held + temp_semaphore = DistributedSemaphore( + redis_client=redis_client_sdk, + key=semaphore_name, + capacity=semaphore_capacity, + ttl=short_ttl, + ) + assert await temp_semaphore.current_count() == 1 + assert await temp_semaphore.available_tokens() == semaphore_capacity - 1 + + # Wait for work to complete + await task + assert work_completed.is_set() + + # After completion, semaphore should be released + assert await temp_semaphore.current_count() == 0 + assert await temp_semaphore.available_tokens() == semaphore_capacity + + +async def test_context_manager_with_callable_parameters( + redis_client_sdk: RedisClientSDK, +): + executed_keys = [] + + def get_redis_client(*args, **kwargs): + return redis_client_sdk + + def get_key(user_id: str, resource: str) -> str: + return f"{user_id}-{resource}" + + def get_capacity(user_id: str, resource: str) -> int: + return 2 + + @with_limited_concurrency_cm( + get_redis_client, + key=get_key, + capacity=get_capacity, + ) + @asynccontextmanager + async def process_user_resource_cm(user_id: str, resource: str): + executed_keys.append(f"{user_id}-{resource}") + yield f"processed-{user_id}-{resource}" + await asyncio.sleep(0.05) + + async def use_cm(user_id: str, resource: str): + async with process_user_resource_cm(user_id, resource) as result: + return result + + # Test with different parameters + results = await asyncio.gather( + use_cm("user1", "wallet1"), + use_cm("user1", "wallet2"), + use_cm("user2", "wallet1"), + ) + + assert len(executed_keys) == 3 + assert "user1-wallet1" in executed_keys + assert "user1-wallet2" in executed_keys + assert "user2-wallet1" in executed_keys + + assert len(results) == 3 + assert "processed-user1-wallet1" in results + assert "processed-user1-wallet2" in results + assert "processed-user2-wallet1" in results + + +async def test_context_manager_non_blocking_behavior( + redis_client_sdk: RedisClientSDK, + semaphore_name: str, +): + started_event = asyncio.Event() + + @with_limited_concurrency_cm( + redis_client_sdk, + key=semaphore_name, + capacity=1, + blocking=True, + blocking_timeout=datetime.timedelta(seconds=0.1), + ) + @asynccontextmanager + async def limited_context_manager(): + started_event.set() + yield + await asyncio.sleep(2) + + # Start first context manager that will hold the semaphore + async def long_running_cm(): + async with limited_context_manager(): + await asyncio.sleep(2) + + task1 = asyncio.create_task(long_running_cm()) + await started_event.wait() # Wait until semaphore is actually acquired + + # Second context manager should timeout and raise an exception + + @with_limited_concurrency_cm( + redis_client_sdk, + key=semaphore_name, + capacity=1, + blocking=True, + blocking_timeout=datetime.timedelta(seconds=0.1), + ) + @asynccontextmanager + async def timeout_context_manager(): + yield + + with pytest.raises(SemaphoreAcquisitionError): + async with timeout_context_manager(): + pass + + await task1 + + +async def test_context_manager_lose_semaphore_raises( + redis_client_sdk: RedisClientSDK, + semaphore_name: str, + semaphore_capacity: int, + short_ttl: datetime.timedelta, +): + work_started = asyncio.Event() + + @with_limited_concurrency_cm( + redis_client_sdk, + key=semaphore_name, + capacity=semaphore_capacity, + ttl=short_ttl, + ) + @asynccontextmanager + async def context_manager_that_should_fail(): + yield "data" + + async def use_failing_cm() -> None: + async with context_manager_that_should_fail() as data: + assert data == "data" + work_started.set() + # Wait long enough for renewal to be attempted multiple times + await asyncio.sleep(short_ttl.total_seconds() * 100) + + task = asyncio.create_task(use_failing_cm()) + await work_started.wait() + + # Wait for the first renewal interval to pass + renewal_interval = short_ttl / 3 + await asyncio.sleep(renewal_interval.total_seconds() + 1.5) + + # Find and delete all holder keys for this semaphore + holder_keys = await redis_client_sdk.redis.keys( + f"{SEMAPHORE_KEY_PREFIX}{semaphore_name}_cap{semaphore_capacity}:holders:*" + ) + assert holder_keys, "Holder keys should exist before deletion" + await redis_client_sdk.redis.delete(*holder_keys) + + # wait another renewal interval to ensure the renewal fails + await asyncio.sleep(renewal_interval.total_seconds() * 1.5) + + async with asyncio.timeout(renewal_interval.total_seconds()): + with pytest.raises(SemaphoreLostError): + await task diff --git a/packages/service-library/tests/test_async_utils.py b/packages/service-library/tests/test_async_utils.py index 9bb1b4fff45a..e7164417fc6f 100644 --- a/packages/service-library/tests/test_async_utils.py +++ b/packages/service-library/tests/test_async_utils.py @@ -7,7 +7,6 @@ import random from collections import deque from dataclasses import dataclass -from datetime import timedelta from time import time from typing import Any @@ -15,7 +14,6 @@ from faker import Faker from servicelib.async_utils import ( _sequential_jobs_contexts, - delayed_start, run_sequentially_in_context, ) @@ -225,20 +223,3 @@ async def test_multiple_context_calls(context_param: int) -> int: assert i == await test_multiple_context_calls(i) assert len(_sequential_jobs_contexts) == RETRIES - - -async def test_with_delay(): - @delayed_start(timedelta(seconds=0.2)) - async def decorated_awaitable() -> int: - return 42 - - assert await decorated_awaitable() == 42 - - async def another_awaitable() -> int: - return 42 - - decorated_another_awaitable = delayed_start(timedelta(seconds=0.2))( - another_awaitable - ) - - assert await decorated_another_awaitable() == 42 diff --git a/packages/service-library/tests/test_background_task.py b/packages/service-library/tests/test_background_task.py index 8c508bf8979c..9a33ed6a62c1 100644 --- a/packages/service-library/tests/test_background_task.py +++ b/packages/service-library/tests/test_background_task.py @@ -13,9 +13,9 @@ from unittest.mock import AsyncMock import pytest +from common_library.async_tools import cancel_wait_task from faker import Faker from pytest_mock.plugin import MockerFixture -from servicelib.async_utils import cancel_wait_task from servicelib.background_task import create_periodic_task, periodic, periodic_task pytest_simcore_core_services_selection = [ @@ -26,8 +26,8 @@ ] -_FAST_POLL_INTERVAL: Final[int] = 1 -_VERY_SLOW_POLL_INTERVAL: Final[int] = 100 +_FAST_POLL_INTERVAL: Final[float] = 0.01 +_VERY_SLOW_POLL_INTERVAL: Final[float] = 1 @pytest.fixture @@ -207,12 +207,16 @@ async def _func() -> None: assert mock_func.call_count > 1 +class CustomError(Exception): + pass + + async def test_periodic_task_logs_error( mock_background_task: mock.AsyncMock, task_interval: datetime.timedelta, caplog: pytest.LogCaptureFixture, ): - mock_background_task.side_effect = RuntimeError("Test error") + mock_background_task.side_effect = CustomError("Test error") with caplog.at_level(logging.ERROR): async with periodic_task( diff --git a/packages/service-library/tests/test_background_task_utils.py b/packages/service-library/tests/test_background_task_utils.py index 9a03a6c35410..7307a8b7b89a 100644 --- a/packages/service-library/tests/test_background_task_utils.py +++ b/packages/service-library/tests/test_background_task_utils.py @@ -13,7 +13,7 @@ import arrow import pytest -from servicelib.async_utils import cancel_wait_task +from common_library.async_tools import cancel_wait_task from servicelib.background_task_utils import exclusive_periodic from servicelib.redis import RedisClientSDK from settings_library.redis import RedisDatabase @@ -24,13 +24,6 @@ wait_fixed, ) -pytest_simcore_core_services_selection = [ - "redis", -] -pytest_simcore_ops_services_selection = [ - "redis-commander", -] - @pytest.fixture async def redis_client_sdk( diff --git a/packages/service-library/tests/test_celery.py b/packages/service-library/tests/test_celery.py new file mode 100644 index 000000000000..670805d1a2ef --- /dev/null +++ b/packages/service-library/tests/test_celery.py @@ -0,0 +1,150 @@ +from types import NoneType +from typing import Annotated + +# pylint: disable=redefined-outer-name +# pylint: disable=protected-access +import pydantic +import pytest +from common_library.json_serialization import json_dumps +from faker import Faker +from pydantic import StringConstraints +from servicelib.celery.models import ( + OwnerMetadata, + TaskUUID, + Wildcard, +) + +_faker = Faker() + + +class _TestOwnerMetadata(OwnerMetadata): + string_: str + int_: int + bool_: bool + none_: None + uuid_: str + + +@pytest.fixture +def test_owner_metadata() -> dict[str, str | int | bool | None | list[str]]: + data = { + "string_": _faker.word(), + "int_": _faker.random_int(), + "bool_": _faker.boolean(), + "none_": None, + "uuid_": _faker.uuid4(), + "owner": _faker.word().lower(), + } + _TestOwnerMetadata.model_validate(data) # ensure it's valid + return data + + +async def test_task_filter_serialization( + test_owner_metadata: dict[str, str | int | bool | None | list[str]], +): + task_filter = _TestOwnerMetadata.model_validate(test_owner_metadata) + assert task_filter.model_dump() == test_owner_metadata + + +async def test_task_filter_sorting_key_not_serialized(): + + class _OwnerMetadata(OwnerMetadata): + a: int | Wildcard + b: str | Wildcard + + owner_metadata = _OwnerMetadata.model_validate( + {"a": _faker.random_int(), "b": _faker.word(), "owner": _faker.word().lower()} + ) + task_uuid = TaskUUID(_faker.uuid4()) + copy_owner_metadata = owner_metadata.model_dump() + copy_owner_metadata.update({"task_uuid": f"{task_uuid}"}) + + expected_key = ":".join( + [f"{k}={json_dumps(v)}" for k, v in sorted(copy_owner_metadata.items())] + ) + assert owner_metadata.model_dump_task_id(task_uuid=task_uuid) == expected_key + + +async def test_task_filter_task_uuid( + test_owner_metadata: dict[str, str | int | bool | None | list[str]], +): + task_filter = _TestOwnerMetadata.model_validate(test_owner_metadata) + task_uuid = TaskUUID(_faker.uuid4()) + task_id = task_filter.model_dump_task_id(task_uuid) + assert OwnerMetadata.get_task_uuid(task_id=task_id) == task_uuid + + +async def test_owner_metadata_task_id_dump_and_validate(): + + class MyModel(OwnerMetadata): + int_: int + bool_: bool + str_: str + float_: float + none_: NoneType + list_s: list[str] + list_i: list[int] + list_f: list[float] + list_b: list[bool] + + mymodel = MyModel( + int_=1, + none_=None, + bool_=True, + str_="test", + float_=1.0, + owner="myowner", + list_b=[True, False], + list_f=[1.0, 2.0], + list_i=[1, 2], + list_s=["a", "b"], + ) + task_uuid = TaskUUID(_faker.uuid4()) + task_id = mymodel.model_dump_task_id(task_uuid) + mymodel_recreated = MyModel.model_validate_task_id(task_id=task_id) + assert mymodel_recreated == mymodel + + +@pytest.mark.parametrize( + "bad_data", + [ + {"foo": "bar:baz"}, + {"foo": "bar=baz"}, + {"foo:bad": "bar"}, + {"foo=bad": "bar"}, + {"foo": ":baz"}, + {"foo": "=baz"}, + ], +) +def test_task_filter_validator_raises_on_forbidden_chars(bad_data): + with pytest.raises(pydantic.ValidationError): + OwnerMetadata.model_validate(bad_data) + + +async def test_task_owner(): + class MyOwnerMetadata(OwnerMetadata): + extra_field: str + + with pytest.raises(pydantic.ValidationError): + MyOwnerMetadata(owner="", extra_field="value") + + with pytest.raises(pydantic.ValidationError): + MyOwnerMetadata(owner="UPPER_CASE", extra_field="value") + + class MyNextFilter(OwnerMetadata): + owner: Annotated[ + str, StringConstraints(strip_whitespace=True, pattern=r"^the_task_owner$") + ] + + with pytest.raises(pydantic.ValidationError): + MyNextFilter(owner="wrong_owner") + + +def test_owner_metadata_serialize_deserialize(test_owner_metadata): + test_owner_metadata = _TestOwnerMetadata.model_validate(test_owner_metadata) + data = test_owner_metadata.model_dump() + deserialized_data = OwnerMetadata.model_validate(data) + assert len(_TestOwnerMetadata.model_fields) > len( + OwnerMetadata.model_fields + ) # ensure extra data is available in _TestOwnerMetadata -> needed for RPC + assert deserialized_data.model_dump() == data diff --git a/services/dynamic-sidecar/tests/unit/test_modules_container_utils.py b/packages/service-library/tests/test_container_utils.py similarity index 87% rename from services/dynamic-sidecar/tests/unit/test_modules_container_utils.py rename to packages/service-library/tests/test_container_utils.py index a8b84f7235c1..075645e5009d 100644 --- a/services/dynamic-sidecar/tests/unit/test_modules_container_utils.py +++ b/packages/service-library/tests/test_container_utils.py @@ -1,11 +1,10 @@ # pylint: disable=redefined-outer-name -import contextlib from collections.abc import AsyncIterable import aiodocker import pytest -from simcore_service_dynamic_sidecar.modules.container_utils import ( +from servicelib.container_utils import ( ContainerExecCommandFailedError, ContainerExecContainerNotFoundError, ContainerExecTimeoutError, @@ -26,9 +25,7 @@ async def running_container_name() -> AsyncIterable[str]: yield container_inspect["Name"][1:] - with contextlib.suppress(aiodocker.DockerError): - await container.kill() - await container.delete() + await container.delete(force=True) async def test_run_command_in_container_container_not_found(): diff --git a/packages/service-library/tests/test_exception_utils.py b/packages/service-library/tests/test_exception_utils.py index a884d3dafb19..040022b64eb6 100644 --- a/packages/service-library/tests/test_exception_utils.py +++ b/packages/service-library/tests/test_exception_utils.py @@ -4,7 +4,7 @@ import pytest from pydantic import PositiveFloat, PositiveInt -from servicelib.exception_utils import DelayedExceptionHandler, silence_exceptions +from servicelib.exception_utils import DelayedExceptionHandler, suppress_exceptions TOLERANCE: Final[PositiveFloat] = 0.1 SLEEP_FOR: Final[PositiveFloat] = TOLERANCE * 0.1 @@ -53,14 +53,17 @@ def test_workflow_raises() -> None: # Define some custom exceptions for testing class CustomError(Exception): - pass + def __init__(self, code: int = 0, message: str = ""): + self.code = code + self.message = message + super().__init__(message) class AnotherCustomError(Exception): pass -@silence_exceptions((CustomError,)) +@suppress_exceptions((CustomError,), reason="CustomError is silenced") def sync_function(*, raise_error: bool, raise_another_error: bool) -> str: if raise_error: raise CustomError @@ -69,7 +72,7 @@ def sync_function(*, raise_error: bool, raise_another_error: bool) -> str: return "Success" -@silence_exceptions((CustomError,)) +@suppress_exceptions((CustomError,), reason="CustomError is silenced") async def async_function(*, raise_error: bool, raise_another_error: bool) -> str: if raise_error: raise CustomError @@ -78,6 +81,29 @@ async def async_function(*, raise_error: bool, raise_another_error: bool) -> str return "Success" +# Test functions with predicate +@suppress_exceptions( + (CustomError,), + reason="Only suppress CustomError with code >= 100", + predicate=lambda e: hasattr(e, "code") and e.code >= 100, +) +def sync_function_with_predicate(error_code: int = 0) -> str: + if error_code > 0: + raise CustomError(code=error_code, message=f"Error {error_code}") + return "Success" + + +@suppress_exceptions( + (CustomError,), + reason="Only suppress CustomError with code >= 100", + predicate=lambda e: hasattr(e, "code") and e.code >= 100, +) +async def async_function_with_predicate(error_code: int = 0) -> str: + if error_code > 0: + raise CustomError(code=error_code, message=f"Error {error_code}") + return "Success" + + def test_sync_function_no_exception(): result = sync_function(raise_error=False, raise_another_error=False) assert result == "Success" @@ -106,3 +132,144 @@ def test_sync_function_with_different_exception(): async def test_async_function_with_different_exception(): with pytest.raises(AnotherCustomError): await async_function(raise_error=False, raise_another_error=True) + + +def test_sync_function_predicate_suppresses_matching_exception(): + result = sync_function_with_predicate( + error_code=150 + ) # code >= 100, should be suppressed + assert result is None + + +def test_sync_function_predicate_raises_non_matching_exception(): + with pytest.raises(CustomError): + sync_function_with_predicate(error_code=50) # code < 100, should be raised + + +def test_sync_function_predicate_no_exception(): + result = sync_function_with_predicate(error_code=0) + assert result == "Success" + + +async def test_async_function_predicate_suppresses_matching_exception(): + result = await async_function_with_predicate( + error_code=200 + ) # code >= 100, should be suppressed + assert result is None + + +async def test_async_function_predicate_raises_non_matching_exception(): + with pytest.raises(CustomError): + await async_function_with_predicate( + error_code=25 + ) # code < 100, should be raised + + +async def test_async_function_predicate_no_exception(): + result = await async_function_with_predicate(error_code=0) + assert result == "Success" + + +@suppress_exceptions( + (ValueError, TypeError), + reason="Complex predicate test", + predicate=lambda e: "suppress" in str(e).lower(), +) +def function_with_complex_predicate(message: str) -> str: + if "value" in message: + raise ValueError(message) + if "type" in message: + raise TypeError(message) + return "Success" + + +def test_complex_predicate_suppresses_matching(): + result = function_with_complex_predicate("please suppress this value error") + assert result is None + + +def test_complex_predicate_raises_non_matching(): + with pytest.raises(ValueError, match="value error without keyword"): + function_with_complex_predicate("value error without keyword") + + +def test_complex_predicate_different_exception_type(): + result = function_with_complex_predicate("type error with suppress keyword") + assert result is None + + +# Test predicate exception handling +@suppress_exceptions( + (ValueError,), + reason="Predicate that raises exception", + predicate=lambda _: bool(1 / 0), # This will raise ZeroDivisionError +) +def function_with_failing_predicate() -> str: + msg = "Original error" + raise ValueError(msg) + + +@suppress_exceptions( + (ValueError,), + reason="Predicate that raises exception", + predicate=lambda _: bool(1 / 0), # This will raise ZeroDivisionError +) +async def async_function_with_failing_predicate() -> str: + msg = "Original error" + raise ValueError(msg) + + +def test_sync_function_predicate_exception_reraised(caplog): + with pytest.raises(ValueError, match="Original error"): + function_with_failing_predicate() + + # Check that warning was logged + assert "Predicate function raised exception" in caplog.text + assert "ZeroDivisionError" in caplog.text + + +async def test_async_function_predicate_exception_reraised(caplog): + with pytest.raises(ValueError, match="Original error"): + await async_function_with_failing_predicate() + + # Check that warning was logged + assert "Predicate function raised exception" in caplog.text + assert "ZeroDivisionError" in caplog.text + + +@suppress_exceptions( + (ValueError,), + reason="Predicate that accesses invalid attribute", + predicate=lambda e: e.nonexistent_attribute == "test", +) +def function_with_attribute_error_predicate() -> str: + msg = "Original error" + raise ValueError(msg) + + +def test_predicate_attribute_error_reraised(caplog): + with pytest.raises(ValueError, match="Original error"): + function_with_attribute_error_predicate() + + # Check that warning was logged about predicate failure + assert "Predicate function raised exception" in caplog.text + assert "AttributeError" in caplog.text + + +@suppress_exceptions( + (ValueError,), + reason="Predicate that sometimes works", + predicate=lambda e: len(str(e)) > 5, # Safe predicate +) +def function_with_working_predicate(message: str) -> str: + raise ValueError(message) + + +def test_predicate_works_normally(): + # Short message - predicate returns False, exception re-raised + with pytest.raises(ValueError): + function_with_working_predicate("Hi") + + # Long message - predicate returns True, exception suppressed + result = function_with_working_predicate("This is a long error message") + assert result is None diff --git a/packages/service-library/tests/test_logging_utils.py b/packages/service-library/tests/test_logging_utils.py index d56e07962f23..63f0d69c7883 100644 --- a/packages/service-library/tests/test_logging_utils.py +++ b/packages/service-library/tests/test_logging_utils.py @@ -1,26 +1,52 @@ # pylint:disable=redefined-outer-name # pylint:disable=unused-argument +import io import logging +import re from collections.abc import Iterable from contextlib import suppress from pathlib import Path from typing import Any import pytest +from common_library.logging.logging_base import get_log_record_extra from faker import Faker from servicelib.logging_utils import ( + _DEFAULT_FORMATTING, + CustomFormatter, LogExtra, LogLevelInt, LogMessageStr, + async_loggers, guess_message_log_level, log_context, log_decorator, log_exceptions, set_parent_module_log_level, ) +from tenacity import ( + retry, + retry_if_exception_type, + stop_after_delay, + wait_fixed, +) _logger = logging.getLogger(__name__) + + +@retry( + wait=wait_fixed(0.01), + stop=stop_after_delay(2.0), + reraise=True, + retry=retry_if_exception_type(AssertionError), +) +def _assert_check_log_message( + caplog: pytest.LogCaptureFixture, expected_message: str +) -> None: + assert expected_message in caplog.text + + _ALL_LOGGING_LEVELS = [ logging.CRITICAL, logging.ERROR, @@ -325,8 +351,9 @@ def test_log_exceptions_and_suppress_without_exc_info( caplog.set_level(level) exc_msg = "logs exceptions and suppresses" - with suppress(ValueError), log_exceptions( - _logger, level, "CONTEXT", exc_info=False + with ( + suppress(ValueError), + log_exceptions(_logger, level, "CONTEXT", exc_info=False), ): raise ValueError(exc_msg) @@ -410,3 +437,315 @@ def test_set_parent_module_log_level_(caplog: pytest.LogCaptureFixture): assert "parent warning" in caplog.text assert "child warning" in caplog.text + + +@pytest.mark.parametrize("log_format_local_dev_enabled", [True, False]) +def test_setup_async_loggers_basic( + caplog: pytest.LogCaptureFixture, + log_format_local_dev_enabled: bool, +): + """Test basic async logging setup without filters.""" + caplog.clear() + caplog.set_level(logging.INFO) + + with async_loggers( + log_format_local_dev_enabled=log_format_local_dev_enabled, + logger_filter_mapping={}, # No filters for this test + tracing_settings=None, # No tracing for this test + log_base_level=logging.INFO, # Set base log level + noisy_loggers=(), # No noisy loggers for this test + ): + test_logger = logging.getLogger("test_async_logger") + test_logger.info("Test async log message") + + _assert_check_log_message(caplog, "Test async log message") + + +def test_setup_async_loggers_with_filters( + caplog: pytest.LogCaptureFixture, +): + caplog.clear() + caplog.set_level(logging.INFO) + + # Define filter mapping + filter_mapping = { + "test_filtered_logger": ["filtered_message"], + } + + with async_loggers( + log_format_local_dev_enabled=True, + logger_filter_mapping=filter_mapping, + tracing_settings=None, # No tracing for this test + log_base_level=logging.INFO, # Set base log level + noisy_loggers=(), # No noisy loggers for this test + ): + test_logger = logging.getLogger("test_filtered_logger") + unfiltered_logger = logging.getLogger("test_unfiltered_logger") + + # This should be filtered out + test_logger.info("This is a filtered_message") + + # This should pass through + test_logger.info("This is an unfiltered message") + unfiltered_logger.info("This is from unfiltered logger") + + _assert_check_log_message(caplog, "This is an unfiltered message") + _assert_check_log_message(caplog, "This is from unfiltered logger") + + # Check that filtered message was not captured + assert "This is a filtered_message" not in caplog.text + + # Check that unfiltered messages were captured + assert "This is an unfiltered message" in caplog.text + assert "This is from unfiltered logger" in caplog.text + + +def test_setup_async_loggers_with_tracing_settings( + caplog: pytest.LogCaptureFixture, +): + """Test async logging setup with tracing settings.""" + caplog.clear() + caplog.set_level(logging.INFO) + + # Note: We can't easily test actual tracing without setting up OpenTelemetry + # But we can test that the function accepts the parameter + with async_loggers( + log_format_local_dev_enabled=False, + logger_filter_mapping={}, # No filters for this test + tracing_settings=None, + log_base_level=logging.INFO, # Set base log level + noisy_loggers=(), # No noisy loggers for this test + ): + test_logger = logging.getLogger("test_tracing_logger") + test_logger.info("Test message with tracing settings") + + _assert_check_log_message(caplog, "Test message with tracing settings") + + +def test_setup_async_loggers_context_manager_cleanup( + caplog: pytest.LogCaptureFixture, +): + """Test that async logging context manager properly cleans up.""" + caplog.clear() + caplog.set_level(logging.DEBUG) + + test_logger = logging.getLogger("test_cleanup_logger") + + with async_loggers( + log_format_local_dev_enabled=True, + logger_filter_mapping={}, + tracing_settings=None, + log_base_level=logging.INFO, # Set base log level + noisy_loggers=(), # No noisy loggers for this test + ): + # During the context, handlers should be replaced + test_logger.info("Message during context") + + _assert_check_log_message(caplog, "Message during context") + + +def test_setup_async_loggers_exception_handling( + caplog: pytest.LogCaptureFixture, +): + """Test that async logging handles exceptions gracefully.""" + caplog.clear() + caplog.set_level(logging.DEBUG) # Set to DEBUG to capture cleanup messages + + def _raise_test_exception(): + """Helper function to raise exception for testing.""" + exc_msg = "Test exception" + raise ValueError(exc_msg) + + try: + with async_loggers( + log_format_local_dev_enabled=True, + logger_filter_mapping={}, + tracing_settings=None, + log_base_level=logging.INFO, # Set base log level + noisy_loggers=(), # No noisy loggers for this test + ): + test_logger = logging.getLogger("test_exception_logger") + test_logger.info("Message before exception") + + _assert_check_log_message(caplog, "Message before exception") + + # Raise an exception to test cleanup + _raise_test_exception() + + except ValueError: + # Expected exception + pass + + # Check that the message was logged and cleanup happened + assert "Message before exception" in caplog.text + + +def _create_grok_regex_pattern() -> str: + """Convert Grok pattern to regex for testing.""" + # The Grok pattern from the comment: + # log_level=%{WORD:log_level} \| log_timestamp=%{TIMESTAMP_ISO8601:log_timestamp} \| log_source=%{NOTSPACE:log_source} \| log_uid=%{NOTSPACE:log_uid} \| log_oec=%{NOTSPACE:log_oec} \| log_trace_id=%{NOTSPACE:log_trace_id} \| log_span_id=%{NOTSPACE:log_span_id} \| log_msg=%{GREEDYDATA:log_msg} + + grok_to_regex = { + r"%{WORD:log_level}": r"(?P\w+)", + r"%{TIMESTAMP_ISO8601:log_timestamp}": r"(?P\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d{3})", + r"%{NOTSPACE:log_source}": r"(?P\S+)", + r"%{NOTSPACE:log_uid}": r"(?P\S+)", + r"%{NOTSPACE:log_oec}": r"(?P\S+)", + r"%{NOTSPACE:log_trace_id}": r"(?P\S+)", + r"%{NOTSPACE:log_span_id}": r"(?P\S+)", + r"%{GREEDYDATA:log_msg}": r"(?P.*)", + } + + grok_pattern = r"log_level=%{WORD:log_level} \| log_timestamp=%{TIMESTAMP_ISO8601:log_timestamp} \| log_source=%{NOTSPACE:log_source} \| log_uid=%{NOTSPACE:log_uid} \| log_oec=%{NOTSPACE:log_oec} \| log_trace_id=%{NOTSPACE:log_trace_id} \| log_span_id=%{NOTSPACE:log_span_id} \| log_msg=%{GREEDYDATA:log_msg}" + + # Convert to regex + regex_pattern = grok_pattern + for grok, regex in grok_to_regex.items(): + regex_pattern = regex_pattern.replace(grok, regex) + + return regex_pattern + + +def _create_test_log_record( + name: str, + level: int, + func_name: str, + lineno: int, + message: str, + *, + user_id: int | str | None = None, + error_code: str | None = None, + trace_id: str | None = None, + span_id: str | None = None, +) -> logging.LogRecord: + """Create a test LogRecord with optional extra fields.""" + + record = logging.LogRecord( + name=name, + level=level, + pathname="/path/to/file.py", + lineno=lineno, + msg=message, + args=(), + exc_info=None, + func=func_name, + ) + + # Add extra fields if provided + extra = get_log_record_extra(user_id=user_id, error_code=error_code) + if extra: + for key, value in extra.items(): + setattr(record, key, value) + + # Add OpenTelemetry trace ID + record.otelTraceID = trace_id # type: ignore[attr-defined] + record.otelSpanID = span_id # type: ignore[attr-defined] + + return record + + +def test_grok_pattern_parsing(caplog: pytest.LogCaptureFixture) -> None: + """ + Test that the Graylog Grok pattern correctly parses logs formatted with _DEFAULT_FORMATTING. + + This test validates that the Grok pattern defined in the comment can correctly + parse logs formatted with _DEFAULT_FORMATTING. + + WARNING: If log formatting changes, the Grok pattern in Graylog must be updated accordingly. + """ + + # Create a custom handler with the default formatter + log_stream = io.StringIO() + handler = logging.StreamHandler(log_stream) + formatter = CustomFormatter(_DEFAULT_FORMATTING, log_format_local_dev_enabled=False) + handler.setFormatter(formatter) + + # Create test log record with all fields populated + test_message = ( + "This is a test log message with special chars: []{} and new line\nembedded" + ) + record = _create_test_log_record( + name="test.module.submodule", + level=logging.INFO, + func_name="test_function", + lineno=42, + message=test_message, + user_id=12345, + error_code="OEC001", + trace_id="1234567890abcdef1234567890abcdef", + span_id="987654321", + ) + + # Format the record + formatted_log = formatter.format(record) + + # Test that the formatted log matches the Grok pattern + regex_pattern = _create_grok_regex_pattern() + match = re.match(regex_pattern, formatted_log) + + assert ( + match is not None + ), f"Grok pattern did not match formatted log. Log: {formatted_log!r}" + + # Verify extracted fields match expected values + groups = match.groupdict() + + assert groups["log_level"] == "INFO" + assert groups["log_source"] == "test.module.submodule:test_function(42)" + assert groups["log_uid"] == "12345" + assert groups["log_oec"] == "OEC001" + assert groups["log_trace_id"] == "1234567890abcdef1234567890abcdef" + assert groups["log_span_id"] == "987654321" + + # Verify the message is correctly escaped (newlines become \\n) + expected_message = test_message.replace("\n", "\\n") + assert groups["log_msg"] == expected_message + + # Verify timestamp format is ISO8601-like (as expected by Python logging) + timestamp_pattern = r"\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d{3}" + assert re.match(timestamp_pattern, groups["log_timestamp"]) + + +def test_grok_pattern_parsing_with_none_values( + caplog: pytest.LogCaptureFixture, +) -> None: + """ + Test Grok pattern parsing when optional fields are None. + + WARNING: If log formatting changes, the Grok pattern in Graylog must be updated accordingly. + """ + + # Create a custom handler with the default formatter + handler = logging.StreamHandler(io.StringIO()) + formatter = CustomFormatter(_DEFAULT_FORMATTING, log_format_local_dev_enabled=False) + handler.setFormatter(formatter) + + # Create test log record with None values for optional fields + record = _create_test_log_record( + name="test.module", + level=logging.ERROR, + func_name="error_function", + lineno=100, + message="Error message", + user_id=None, + error_code=None, + trace_id=None, + span_id=None, + ) + + formatted_log = formatter.format(record) + regex_pattern = _create_grok_regex_pattern() + match = re.match(regex_pattern, formatted_log) + + assert ( + match is not None + ), f"Grok pattern did not match log with None values. Log: {formatted_log!r}" + + groups = match.groupdict() + assert groups["log_level"] == "ERROR" + assert groups["log_source"] == "test.module:error_function(100)" + assert groups["log_uid"] == "None" + assert groups["log_oec"] == "None" + assert groups["log_trace_id"] == "None" + assert groups["log_span_id"] == "None" + assert groups["log_msg"] == "Error message" diff --git a/packages/service-library/tests/test_utils_meta.py b/packages/service-library/tests/test_utils_meta.py index a6da532bc776..2468943268e1 100644 --- a/packages/service-library/tests/test_utils_meta.py +++ b/packages/service-library/tests/test_utils_meta.py @@ -1,7 +1,9 @@ from typing import Final +import pytest from models_library.basic_types import VersionStr from packaging.version import Version +from pytest_mock import MockerFixture from servicelib.utils_meta import PackageInfo @@ -32,3 +34,33 @@ def test_meta_module_implementation(): assert __version__ in APP_FINISHED_BANNER_MSG assert PROJECT_NAME in APP_FINISHED_BANNER_MSG + + +@pytest.mark.parametrize( + "package_name, app_name, is_valid_app_name, is_correct_app_name", + [ + ("simcore-service-library", "simcore-service-library", True, True), + ("simcore-service-lib", "simcore-service-library", True, False), + ("simcore_service_library", "simcore_service_library", False, True), + ], +) +def test_app_name( + mocker: MockerFixture, + package_name: str, + app_name: str, + is_valid_app_name: bool, + is_correct_app_name: bool, +): + + def mock_distribution(name): + return mocker.Mock(metadata={"Name": name, "Version": "1.0.0"}) + + mocker.patch("servicelib.utils_meta.distribution", side_effect=mock_distribution) + if is_valid_app_name: + info = PackageInfo(package_name=package_name) + if is_correct_app_name: + assert info.app_name == app_name + assert info.prometheus_friendly_app_name == app_name.replace("-", "_") + else: + with pytest.raises(ValueError): + _ = PackageInfo(package_name=package_name) diff --git a/packages/settings-library/requirements/_base.txt b/packages/settings-library/requirements/_base.txt index bc7e83313349..959a20139d50 100644 --- a/packages/settings-library/requirements/_base.txt +++ b/packages/settings-library/requirements/_base.txt @@ -1,6 +1,6 @@ annotated-types==0.7.0 # via pydantic -click==8.1.8 +click==8.2.1 # via typer markdown-it-py==3.0.0 # via rich @@ -11,7 +11,7 @@ orjson==3.10.15 # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/common-library/requirements/_base.in -pydantic==2.10.6 +pydantic==2.11.7 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt @@ -19,9 +19,9 @@ pydantic==2.10.6 # -r requirements/_base.in # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.2 +pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.10.2 +pydantic-extra-types==2.10.5 # via -r requirements/../../../packages/common-library/requirements/_base.in pydantic-settings==2.7.0 # via @@ -32,17 +32,20 @@ pygments==2.19.1 # via rich python-dotenv==1.0.1 # via pydantic-settings -rich==13.9.4 +rich==14.1.0 # via # -r requirements/_base.in # typer shellingham==1.5.4 # via typer -typer==0.15.2 +typer==0.16.1 # via -r requirements/_base.in -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # pydantic # pydantic-core # pydantic-extra-types # typer + # typing-inspection +typing-inspection==0.4.1 + # via pydantic diff --git a/packages/settings-library/requirements/_test.txt b/packages/settings-library/requirements/_test.txt index fb8381375d51..6a2a4716993a 100644 --- a/packages/settings-library/requirements/_test.txt +++ b/packages/settings-library/requirements/_test.txt @@ -11,19 +11,25 @@ packaging==24.2 # pytest # pytest-sugar pluggy==1.5.0 - # via pytest -pytest==8.3.5 + # via + # pytest + # pytest-cov +pygments==2.19.1 + # via + # -c requirements/_base.txt + # pytest +pytest==8.4.1 # via # -r requirements/_test.in # pytest-cov # pytest-instafail # pytest-mock # pytest-sugar -pytest-cov==6.0.0 +pytest-cov==6.2.1 # via -r requirements/_test.in pytest-instafail==0.5.0 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in diff --git a/packages/settings-library/requirements/_tools.txt b/packages/settings-library/requirements/_tools.txt index 13e0ee77ce64..7fa8b1865ba8 100644 --- a/packages/settings-library/requirements/_tools.txt +++ b/packages/settings-library/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # black @@ -27,9 +27,9 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via -r requirements/../../../requirements/devenv.txt -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via # black # mypy @@ -41,7 +41,9 @@ packaging==24.2 # black # build pathspec==0.12.1 - # via black + # via + # black + # mypy pip==25.0.1 # via pip-tools pip-tools==7.4.1 @@ -65,11 +67,11 @@ pyyaml==6.0.2 # pre-commit ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.2 +setuptools==80.9.0 # via pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # mypy diff --git a/packages/settings-library/src/settings_library/base.py b/packages/settings-library/src/settings_library/base.py index 9ab3119dfc79..8c5a06488ca9 100644 --- a/packages/settings-library/src/settings_library/base.py +++ b/packages/settings-library/src/settings_library/base.py @@ -1,6 +1,6 @@ import logging from functools import cached_property -from typing import Any, Final, get_origin +from typing import Any, Final from common_library.pydantic_fields_extension import get_type, is_literal, is_nullable from pydantic import ValidationInfo, field_validator @@ -15,9 +15,9 @@ _logger = logging.getLogger(__name__) -_AUTO_DEFAULT_FACTORY_RESOLVES_TO_NONE_FSTRING: Final[ - str -] = "{field_name} auto_default_from_env unresolved, defaulting to None" +_AUTO_DEFAULT_FACTORY_RESOLVES_TO_NONE_FSTRING: Final[str] = ( + "{field_name} auto_default_from_env unresolved, defaulting to None" +) class DefaultFromEnvFactoryError(ValueError): @@ -119,11 +119,14 @@ def _parse_none(cls, v, info: ValidationInfo): model_config = SettingsConfigDict( case_sensitive=True, # All must be capitalized - extra="forbid", + env_parse_none_str="null", + extra="ignore", # NOTE: if "strict" then fields with multiple aliases defined in the envs will fail to validate! frozen=True, - validate_default=True, ignored_types=(cached_property,), - env_parse_none_str="null", + populate_by_name=True, # NOTE: populate_by_name deprecated in pydantic v2.11+ + validate_by_alias=True, + validate_by_name=True, + validate_default=True, ) @classmethod @@ -133,28 +136,15 @@ def __pydantic_init_subclass__(cls, **kwargs: Any): for name, field in cls.model_fields.items(): auto_default_from_env = _is_auto_default_from_env_enabled(field) field_type = get_type(field) - - # Avoids issubclass raising TypeError. SEE test_issubclass_type_error_with_pydantic_models - is_not_composed = ( - get_origin(field_type) is None - ) # is not composed as dict[str, Any] or Generic[Base] is_not_literal = not is_literal(field) - if ( - is_not_literal - and is_not_composed - and issubclass(field_type, BaseCustomSettings) - ): + if is_not_literal and issubclass(field_type, BaseCustomSettings): if auto_default_from_env: # Builds a default factory `Field(default_factory=create_settings_from_env(field))` field.default_factory = _create_settings_from_env(name, field) field.default = None - elif ( - is_not_literal - and is_not_composed - and issubclass(field_type, BaseSettings) - ): + elif is_not_literal and issubclass(field_type, BaseSettings): msg = f"{cls}.{name} of type {field_type} must inherit from BaseCustomSettings" raise ValueError(msg) diff --git a/packages/settings-library/src/settings_library/docker_api_proxy.py b/packages/settings-library/src/settings_library/docker_api_proxy.py index 14f66f0934e5..cc002c5a3187 100644 --- a/packages/settings-library/src/settings_library/docker_api_proxy.py +++ b/packages/settings-library/src/settings_library/docker_api_proxy.py @@ -1,4 +1,5 @@ from functools import cached_property +from typing import Annotated from pydantic import Field, SecretStr @@ -7,12 +8,12 @@ class DockerApiProxysettings(BaseCustomSettings): - DOCKER_API_PROXY_HOST: str = Field( - description="hostname of the docker-api-proxy service" - ) - DOCKER_API_PROXY_PORT: PortInt = Field( - 8888, description="port of the docker-api-proxy service" - ) + DOCKER_API_PROXY_HOST: Annotated[ + str, Field(description="hostname of the docker-api-proxy service") + ] + DOCKER_API_PROXY_PORT: Annotated[ + PortInt, Field(description="port of the docker-api-proxy service") + ] = 8888 DOCKER_API_PROXY_SECURE: bool = False DOCKER_API_PROXY_USER: str diff --git a/packages/settings-library/src/settings_library/postgres.py b/packages/settings-library/src/settings_library/postgres.py index 83aa960c92cc..64276b7fdce4 100644 --- a/packages/settings-library/src/settings_library/postgres.py +++ b/packages/settings-library/src/settings_library/postgres.py @@ -1,15 +1,16 @@ from functools import cached_property -from typing import Annotated +from typing import Annotated, Self from urllib.parse import parse_qsl, urlencode, urlparse, urlunparse from pydantic import ( AliasChoices, Field, + NonNegativeInt, PostgresDsn, SecretStr, - ValidationInfo, - field_validator, + model_validator, ) +from pydantic.config import JsonDict from pydantic_settings import SettingsConfigDict from .base import BaseCustomSettings @@ -30,32 +31,51 @@ class PostgresSettings(BaseCustomSettings): # pool connection limits POSTGRES_MINSIZE: Annotated[ - int, Field(description="Minimum number of connections in the pool", ge=1) + int, + Field( + description="Minimum number of connections in the pool that are always created and kept", + ge=1, + ), ] = 1 POSTGRES_MAXSIZE: Annotated[ - int, Field(description="Maximum number of connections in the pool", ge=1) + int, + Field( + description="Maximum number of connections in the pool that are kept", + ge=1, + ), ] = 50 + POSTGRES_MAX_POOLSIZE: Annotated[ + int, + Field( + description="Maximal number of connection in asyncpg pool (without overflow), lazily created on demand" + ), + ] = 10 + POSTGRES_MAX_OVERFLOW: Annotated[ + NonNegativeInt, Field(description="Maximal overflow connections") + ] = 20 POSTGRES_CLIENT_NAME: Annotated[ str | None, Field( description="Name of the application connecting the postgres database, will default to use the host hostname (hostname on linux)", validation_alias=AliasChoices( - "POSTGRES_CLIENT_NAME", # This is useful when running inside a docker container, then the hostname is set each client gets a different name + "POSTGRES_CLIENT_NAME", "HOST", "HOSTNAME", ), ), ] = None - @field_validator("POSTGRES_MAXSIZE") - @classmethod - def _check_size(cls, v, info: ValidationInfo): - if info.data["POSTGRES_MINSIZE"] > v: - msg = f"assert POSTGRES_MINSIZE={info.data['POSTGRES_MINSIZE']} <= POSTGRES_MAXSIZE={v}" + @model_validator(mode="after") + def validate_postgres_sizes(self) -> Self: + if self.POSTGRES_MINSIZE > self.POSTGRES_MAXSIZE: + msg = ( + f"assert POSTGRES_MINSIZE={self.POSTGRES_MINSIZE} <= " + f"POSTGRES_MAXSIZE={self.POSTGRES_MAXSIZE}" + ) raise ValueError(msg) - return v + return self @cached_property def dsn(self) -> str: @@ -81,19 +101,19 @@ def dsn_with_async_sqlalchemy(self) -> str: ) return f"{url}" - @cached_property - def dsn_with_query(self) -> str: + def dsn_with_query(self, application_name: str, *, suffix: str | None) -> str: """Some clients do not support queries in the dsn""" dsn = self.dsn - return self._update_query(dsn) + return self._update_query(dsn, application_name, suffix=suffix) + + def client_name(self, application_name: str, *, suffix: str | None) -> str: + return f"{application_name}{'-' if self.POSTGRES_CLIENT_NAME else ''}{self.POSTGRES_CLIENT_NAME or ''}{'-' + suffix if suffix else ''}" - def _update_query(self, uri: str) -> str: + def _update_query(self, uri: str, application_name: str, suffix: str | None) -> str: # SEE https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PARAMKEYWORDS - new_params: dict[str, str] = {} - if self.POSTGRES_CLIENT_NAME: - new_params = { - "application_name": self.POSTGRES_CLIENT_NAME, - } + new_params: dict[str, str] = { + "application_name": self.client_name(application_name, suffix=suffix), + } if new_params: parsed_uri = urlparse(uri) @@ -103,17 +123,36 @@ def _update_query(self, uri: str) -> str: return urlunparse(parsed_uri._replace(query=updated_query)) return uri - model_config = SettingsConfigDict( - json_schema_extra={ - "examples": [ - # minimal required - { - "POSTGRES_HOST": "localhost", - "POSTGRES_PORT": "5432", - "POSTGRES_USER": "usr", - "POSTGRES_PASSWORD": "secret", - "POSTGRES_DB": "db", - } - ], - } - ) + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + # minimal required + { + "POSTGRES_HOST": "localhost", + "POSTGRES_PORT": "5432", + "POSTGRES_USER": "usr", + "POSTGRES_PASSWORD": "secret", + "POSTGRES_DB": "db", + }, + # full example + { + "POSTGRES_HOST": "localhost", + "POSTGRES_PORT": "5432", + "POSTGRES_USER": "usr", + "POSTGRES_PASSWORD": "secret", + "POSTGRES_DB": "db", + "POSTGRES_MINSIZE": 1, + "POSTGRES_MAXSIZE": 50, + "POSTGRES_MAX_POOLSIZE": 10, + "POSTGRES_MAX_OVERFLOW": 20, + "POSTGRES_CLIENT_NAME": "my_app", # first-choice + "HOST": "should be ignored", + "HOST_NAME": "should be ignored", + }, + ], + } + ) + + model_config = SettingsConfigDict(json_schema_extra=_update_json_schema_extra) diff --git a/packages/settings-library/src/settings_library/redis.py b/packages/settings-library/src/settings_library/redis.py index 40dd88aabf98..28d6b6c66bdb 100644 --- a/packages/settings-library/src/settings_library/redis.py +++ b/packages/settings-library/src/settings_library/redis.py @@ -15,10 +15,11 @@ class RedisDatabase(IntEnum): SCHEDULED_MAINTENANCE = 3 USER_NOTIFICATIONS = 4 ANNOUNCEMENTS = 5 - DISTRIBUTED_IDENTIFIERS = 6 + LONG_RUNNING_TASKS = 6 DEFERRED_TASKS = 7 DYNAMIC_SERVICES = 8 CELERY_TASKS = 9 + DOCUMENTS = 10 class RedisSettings(BaseCustomSettings): diff --git a/packages/settings-library/src/settings_library/utils_cli.py b/packages/settings-library/src/settings_library/utils_cli.py index 106b1d6fb746..3d180c8be2d3 100644 --- a/packages/settings-library/src/settings_library/utils_cli.py +++ b/packages/settings-library/src/settings_library/utils_cli.py @@ -26,8 +26,9 @@ def print_as_envfile( **pydantic_export_options, ): exclude_unset = pydantic_export_options.get("exclude_unset", False) + settings_cls = settings_obj.__class__ - for name, field in settings_obj.model_fields.items(): + for name, field in settings_cls.model_fields.items(): auto_default_from_env = ( field.json_schema_extra is not None and field.json_schema_extra.get("auto_default_from_env", False) @@ -66,6 +67,9 @@ def print_as_envfile( typer.echo(f"# {field.description}") if isinstance(value, Enum): value = value.value + elif isinstance(value, dict | list): + # Serialize complex objects as JSON to ensure they can be parsed correctly + value = json_dumps(value) typer.echo(f"{name}={value}") diff --git a/packages/settings-library/tests/conftest.py b/packages/settings-library/tests/conftest.py index 0431a6c67487..c2a02e3a9b46 100644 --- a/packages/settings-library/tests/conftest.py +++ b/packages/settings-library/tests/conftest.py @@ -4,6 +4,7 @@ import sys from pathlib import Path +from typing import Annotated import pytest import settings_library @@ -96,13 +97,15 @@ class _ApplicationSettings(BaseCustomSettings): # NOTE: by convention, an addon is disabled when APP_ADDON=None, so we make this # entry nullable as well - APP_OPTIONAL_ADDON: _ModuleSettings | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) + APP_OPTIONAL_ADDON: Annotated[ + _ModuleSettings | None, + Field(json_schema_extra={"auto_default_from_env": True}), + ] # NOTE: example of a group that cannot be disabled (not nullable) - APP_REQUIRED_PLUGIN: PostgresSettings | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) + APP_REQUIRED_PLUGIN: Annotated[ + PostgresSettings | None, + Field(json_schema_extra={"auto_default_from_env": True}), + ] return _ApplicationSettings diff --git a/packages/settings-library/tests/data/.env-compact b/packages/settings-library/tests/data/.env-compact index a0292aedbec6..b11273684dc2 100644 --- a/packages/settings-library/tests/data/.env-compact +++ b/packages/settings-library/tests/data/.env-compact @@ -3,4 +3,4 @@ APP_HOST=localhost APP_PORT=80 APP_OPTIONAL_ADDON='{"MODULE_VALUE": 10, "MODULE_VALUE_DEFAULT": 42}' -APP_REQUIRED_PLUGIN='{"POSTGRES_HOST": "localhost", "POSTGRES_PORT": 5432, "POSTGRES_USER": "foo", "POSTGRES_PASSWORD": "**********", "POSTGRES_DB": "foodb", "POSTGRES_MINSIZE": 1, "POSTGRES_MAXSIZE": 50, "POSTGRES_CLIENT_NAME": "None"}' +APP_REQUIRED_PLUGIN='{"POSTGRES_HOST": "localhost", "POSTGRES_PORT": 5432, "POSTGRES_USER": "foo", "POSTGRES_PASSWORD": "**********", "POSTGRES_DB": "foodb", "POSTGRES_MINSIZE": 1, "POSTGRES_MAX_POOLSIZE": 10, "POSTGRES_MAX_OVERFLOW": 20, "POSTGRES_MAXSIZE": 50, "POSTGRES_CLIENT_NAME": "None"}' diff --git a/packages/settings-library/tests/data/.env-granular b/packages/settings-library/tests/data/.env-granular index f1c1f9c703c7..c27099b7b98b 100644 --- a/packages/settings-library/tests/data/.env-granular +++ b/packages/settings-library/tests/data/.env-granular @@ -12,9 +12,9 @@ POSTGRES_USER=foo POSTGRES_PASSWORD=********** # Database name POSTGRES_DB=foodb -# Minimum number of connections in the pool POSTGRES_MINSIZE=1 -# Maximum number of connections in the pool +POSTGRES_MAX_POOLSIZE=10 +POSTGRES_MAX_OVERFLOW=20 POSTGRES_MAXSIZE=50 # Name of the application connecting the postgres database, will default to use the host hostname (hostname on linux) POSTGRES_CLIENT_NAME=None diff --git a/packages/settings-library/tests/data/.env-mixed b/packages/settings-library/tests/data/.env-mixed index 5333630c3ca0..4aed86d3b9b7 100644 --- a/packages/settings-library/tests/data/.env-mixed +++ b/packages/settings-library/tests/data/.env-mixed @@ -12,9 +12,9 @@ POSTGRES_USER=foo POSTGRES_PASSWORD=********** # Database name POSTGRES_DB=foodb -# Minimum number of connections in the pool POSTGRES_MINSIZE=1 -# Maximum number of connections in the pool +POSTGRES_MAX_POOLSIZE=10 +POSTGRES_MAX_OVERFLOW=20 POSTGRES_MAXSIZE=50 # Name of the application connecting the postgres database, will default to use the host hostname (hostname on linux) POSTGRES_CLIENT_NAME=None diff --git a/packages/settings-library/tests/data/.env-sample b/packages/settings-library/tests/data/.env-sample index cfc2ed996850..d6f115f76c23 100644 --- a/packages/settings-library/tests/data/.env-sample +++ b/packages/settings-library/tests/data/.env-sample @@ -8,9 +8,9 @@ POSTGRES_USER=foo POSTGRES_PASSWORD=secret # Database name POSTGRES_DB=foodb -# Maximum number of connections in the pool POSTGRES_MINSIZE=1 -# Minimum number of connections in the pool +POSTGRES_MAX_POOLSIZE=10 +POSTGRES_MAX_OVERFLOW=20 POSTGRES_MAXSIZE=50 # --- APP_MODULE_FIELD --- diff --git a/packages/settings-library/tests/test__models_examples.py b/packages/settings-library/tests/test__models_examples.py index c93ed934cf13..beaaefc88802 100644 --- a/packages/settings-library/tests/test__models_examples.py +++ b/packages/settings-library/tests/test__models_examples.py @@ -2,7 +2,7 @@ import pytest import settings_library -from pydantic import BaseModel +from pydantic_settings import BaseSettings from pytest_simcore.pydantic_models import ( assert_validation_model, walk_model_examples_in_package, @@ -14,9 +14,19 @@ walk_model_examples_in_package(settings_library), ) def test_all_settings_library_models_config_examples( - model_cls: type[BaseModel], example_name: str, example_data: Any + model_cls: type[BaseSettings], example_name: str, example_data: Any ): + assert ( + model_cls.model_config.get("populate_by_name") is True + ), f"populate_by_name must be enabled in {model_cls}. It will be deprecated in the future but for now it is required to use aliases in the settings" + assert ( + model_cls.model_config.get("validate_by_alias") is True + ), f"validate_by_alias must be enabled in {model_cls}" + assert ( + model_cls.model_config.get("validate_by_name") is True + ), f"validate_by_name must be enabled in {model_cls}" + assert_validation_model( model_cls, example_name=example_name, example_data=example_data ) diff --git a/packages/settings-library/tests/test__pydantic_settings.py b/packages/settings-library/tests/test__pydantic_settings.py index eb2989852cbf..a2d06c8a3ef7 100644 --- a/packages/settings-library/tests/test__pydantic_settings.py +++ b/packages/settings-library/tests/test__pydantic_settings.py @@ -2,7 +2,7 @@ # pylint: disable=unused-argument # pylint: disable=unused-variable -""" Tests subtle details about pydantic models +"""Tests subtle details about pydantic models This test suite intends to "freeze" some concepts/invariants from pydantic upon which we are going to build this libraries. @@ -17,7 +17,12 @@ import pytest from common_library.basic_types import LogLevel from common_library.pydantic_fields_extension import is_nullable -from pydantic import AliasChoices, Field, ValidationInfo, field_validator +from pydantic import ( + AliasChoices, + Field, + ValidationInfo, + field_validator, +) from pydantic_core import PydanticUndefined from pydantic_settings import BaseSettings from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict diff --git a/packages/settings-library/tests/test_base.py b/packages/settings-library/tests/test_base.py index d4ebd9877606..026abdfc5d60 100644 --- a/packages/settings-library/tests/test_base.py +++ b/packages/settings-library/tests/test_base.py @@ -6,7 +6,7 @@ import json from collections.abc import Callable -from typing import Any +from typing import Annotated, Any import pydantic import pytest @@ -74,12 +74,18 @@ class M1(BaseCustomSettings): VALUE_NULLABLE_DEFAULT_VALUE: S | None = S(S_VALUE=42) VALUE_NULLABLE_DEFAULT_NULL: S | None = None - VALUE_NULLABLE_DEFAULT_ENV: S | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) - VALUE_DEFAULT_ENV: S = Field( - json_schema_extra={"auto_default_from_env": True} - ) + VALUE_NULLABLE_DEFAULT_ENV: Annotated[ + S | None, + Field( + json_schema_extra={"auto_default_from_env": True}, + ), + ] + VALUE_DEFAULT_ENV: Annotated[ + S, + Field( + json_schema_extra={"auto_default_from_env": True}, + ), + ] class M2(BaseCustomSettings): # @@ -91,14 +97,20 @@ class M2(BaseCustomSettings): VALUE_NULLABLE_DEFAULT_NULL: S | None = None # defaults enabled but if not exists, it disables - VALUE_NULLABLE_DEFAULT_ENV: S | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) + VALUE_NULLABLE_DEFAULT_ENV: Annotated[ + S | None, + Field( + json_schema_extra={"auto_default_from_env": True}, + ), + ] # cannot be disabled - VALUE_DEFAULT_ENV: S = Field( - json_schema_extra={"auto_default_from_env": True} - ) + VALUE_DEFAULT_ENV: Annotated[ + S, + Field( + json_schema_extra={"auto_default_from_env": True}, + ), + ] # Changed in version 3.7: Dictionary order is guaranteed to be insertion order _classes = {"M1": M1, "M2": M2, "S": S} @@ -108,7 +120,7 @@ class M2(BaseCustomSettings): def test_create_settings_class( - create_settings_class: Callable[[str], type[BaseCustomSettings]] + create_settings_class: Callable[[str], type[BaseCustomSettings]], ): M = create_settings_class("M1") @@ -216,9 +228,12 @@ def test_auto_default_to_none_logs_a_warning( class SettingsClass(BaseCustomSettings): VALUE_NULLABLE_DEFAULT_NULL: S | None = None - VALUE_NULLABLE_DEFAULT_ENV: S | None = Field( - json_schema_extra={"auto_default_from_env": True}, - ) + VALUE_NULLABLE_DEFAULT_ENV: Annotated[ + S | None, + Field( + json_schema_extra={"auto_default_from_env": True}, + ), + ] = None instance = SettingsClass.create_from_envs() assert instance.VALUE_NULLABLE_DEFAULT_NULL is None @@ -245,9 +260,12 @@ def test_auto_default_to_not_none( class SettingsClass(BaseCustomSettings): VALUE_NULLABLE_DEFAULT_NULL: S | None = None - VALUE_NULLABLE_DEFAULT_ENV: S | None = Field( - json_schema_extra={"auto_default_from_env": True}, - ) + VALUE_NULLABLE_DEFAULT_ENV: Annotated[ + S | None, + Field( + json_schema_extra={"auto_default_from_env": True}, + ), + ] = None instance = SettingsClass.create_from_envs() assert instance.VALUE_NULLABLE_DEFAULT_NULL is None @@ -317,32 +335,38 @@ class SettingsClassExt(SettingsClass): } -def test_issubclass_type_error_with_pydantic_models(): - # There is a problem - # - # TypeError: issubclass() arg 1 must be a class - # - # SEE https://github.com/pydantic/pydantic/issues/545 - # - # >> issubclass(dict, BaseSettings) - # False - # >> issubclass(dict[str, str], BaseSettings) - # Traceback (most recent call last): - # File "", line 1, in - # File "/home/crespo/.pyenv/versions/3.10.13/lib/python3.10/abc.py", line 123, in __subclasscheck__ - # return _abc_subclasscheck(cls, subclass) - # TypeError: issubclass() arg 1 must be a class - # +def test_fixed_issubclass_type_error_with_pydantic_models(): assert not issubclass(dict, BaseSettings) - - # NOTE: this should be fixed by pydantic at some point. When this happens, this test will fail - with pytest.raises(TypeError): - issubclass(dict[str, str], BaseSettings) + assert not issubclass( + # FIXED with + # + # pydantic 2.11.7 + # pydantic_core 2.33.2 + # pydantic-extra-types 2.10.5 + # pydantic-settings 2.7.0 + # + # + # TypeError: issubclass() arg 1 must be a class + # + # SEE https://github.com/pydantic/pydantic/issues/545 + # + # >> issubclass(dict, BaseSettings) + # False + # >> issubclass(dict[str, str], BaseSettings) + # Traceback (most recent call last): + # File "", line 1, in + # File "/home/crespo/.pyenv/versions/3.10.13/lib/python3.10/abc.py", line 123, in __subclasscheck__ + # return _abc_subclasscheck(cls, subclass) + # TypeError: issubclass() arg 1 must be a class + # + dict[str, str], + BaseSettings, + ) # here reproduces the problem with our settings that ANE and PC had class SettingsClassThatFailed(BaseCustomSettings): - FOO: dict[str, str] | None = Field(default=None) + FOO: dict[str, str] | None = None SettingsClassThatFailed(FOO={}) assert SettingsClassThatFailed(FOO=None) == SettingsClassThatFailed() @@ -352,9 +376,7 @@ def test_upgrade_failure_to_pydantic_settings_2_6( mock_env_devel_environment: EnvVarsDict, ): class ProblematicSettings(BaseCustomSettings): - WEBSERVER_EMAIL: SMTPSettings | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) + WEBSERVER_EMAIL: SMTPSettings | None = None model_config = SettingsConfigDict(nested_model_default_partial_update=True) diff --git a/packages/settings-library/tests/test_base_w_postgres.py b/packages/settings-library/tests/test_base_w_postgres.py index 37329a4e9bb9..641d1df62a3d 100644 --- a/packages/settings-library/tests/test_base_w_postgres.py +++ b/packages/settings-library/tests/test_base_w_postgres.py @@ -5,6 +5,7 @@ import os from collections.abc import Callable +from typing import Annotated import pytest from pydantic import AliasChoices, Field, ValidationError, __version__ @@ -53,17 +54,22 @@ class _FakePostgresSettings(BaseCustomSettings): POSTGRES_USER: str POSTGRES_PASSWORD: str - POSTGRES_DB: str = Field(...) - - POSTGRES_MINSIZE: int = Field(1, ge=1) - POSTGRES_MAXSIZE: int = Field(50, ge=1) - - POSTGRES_CLIENT_NAME: str | None = Field( - None, - validation_alias=AliasChoices( - "HOST", "HOSTNAME", "POSTGRES_CLIENT_NAME" + POSTGRES_DB: str + POSTGRES_MINSIZE: Annotated[int, Field(ge=1)] = 1 + POSTGRES_MAXSIZE: Annotated[int, Field(ge=1)] = 50 + POSTGRES_MAX_POOLSIZE: int = 10 + POSTGRES_MAX_OVERFLOW: Annotated[int, Field(ge=0)] = 20 + + POSTGRES_CLIENT_NAME: Annotated[ + str | None, + Field( + validation_alias=AliasChoices( + "POSTGRES_CLIENT_NAME", + "HOST", + "HOSTNAME", + ), ), - ) + ] = None # # Different constraints on WEBSERVER_POSTGRES subsettings @@ -77,15 +83,17 @@ class S2(BaseCustomSettings): class S3(BaseCustomSettings): # cannot be disabled!! - WEBSERVER_POSTGRES_DEFAULT_ENV: _FakePostgresSettings = Field( - json_schema_extra={"auto_default_from_env": True} - ) + WEBSERVER_POSTGRES_DEFAULT_ENV: Annotated[ + _FakePostgresSettings, + Field(json_schema_extra={"auto_default_from_env": True}), + ] class S4(BaseCustomSettings): # defaults enabled but if cannot be resolved, it disables - WEBSERVER_POSTGRES_NULLABLE_DEFAULT_ENV: _FakePostgresSettings | None = ( - Field(json_schema_extra={"auto_default_from_env": True}) - ) + WEBSERVER_POSTGRES_NULLABLE_DEFAULT_ENV: Annotated[ + _FakePostgresSettings | None, + Field(json_schema_extra={"auto_default_from_env": True}), + ] class S5(BaseCustomSettings): # defaults disabled but only explicit enabled @@ -120,7 +128,6 @@ class S5(BaseCustomSettings): def test_parse_from_empty_envs( postgres_envvars_unset: None, model_classes_factory: Callable ): - S1, S2, S3, S4, S5 = model_classes_factory() with pytest.raises(ValidationError, match="WEBSERVER_POSTGRES") as exc_info: @@ -154,7 +161,6 @@ def test_parse_from_individual_envs( monkeypatch: pytest.MonkeyPatch, model_classes_factory: Callable, ): - S1, S2, S3, S4, S5 = model_classes_factory() # environment @@ -195,6 +201,8 @@ def test_parse_from_individual_envs( "POSTGRES_DB": "db", "POSTGRES_MAXSIZE": 50, "POSTGRES_MINSIZE": 1, + "POSTGRES_MAX_POOLSIZE": 10, + "POSTGRES_MAX_OVERFLOW": 20, "POSTGRES_CLIENT_NAME": None, } } @@ -210,6 +218,8 @@ def test_parse_from_individual_envs( "POSTGRES_DB": "db", "POSTGRES_MAXSIZE": 50, "POSTGRES_MINSIZE": 1, + "POSTGRES_MAX_POOLSIZE": 10, + "POSTGRES_MAX_OVERFLOW": 20, "POSTGRES_CLIENT_NAME": None, } } @@ -222,7 +232,6 @@ def test_parse_from_individual_envs( def test_parse_compact_env( postgres_envvars_unset: None, monkeypatch, model_classes_factory ): - S1, S2, S3, S4, S5 = model_classes_factory() # environment @@ -257,6 +266,8 @@ def test_parse_compact_env( "POSTGRES_DB": "db2", "POSTGRES_MAXSIZE": 50, "POSTGRES_MINSIZE": 1, + "POSTGRES_MAX_POOLSIZE": 10, + "POSTGRES_MAX_OVERFLOW": 20, "POSTGRES_CLIENT_NAME": None, } } @@ -336,7 +347,6 @@ def test_parse_compact_env( def test_parse_from_mixed_envs( postgres_envvars_unset: None, monkeypatch, model_classes_factory ): - S1, S2, S3, S4, S5 = model_classes_factory() # environment @@ -367,6 +377,8 @@ def test_parse_from_mixed_envs( "POSTGRES_DB": "db2", "POSTGRES_MAXSIZE": 50, "POSTGRES_MINSIZE": 1, + "POSTGRES_MAX_POOLSIZE": 10, + "POSTGRES_MAX_OVERFLOW": 20, "POSTGRES_CLIENT_NAME": None, } } @@ -466,7 +478,6 @@ def test_parse_from_mixed_envs( def test_toggle_plugin_1( postgres_envvars_unset: None, monkeypatch, model_classes_factory ): - *_, S4, S5 = model_classes_factory() # empty environ @@ -529,7 +540,6 @@ def test_toggle_plugin_3( def test_toggle_plugin_4( postgres_envvars_unset: None, monkeypatch, model_classes_factory ): - *_, S4, S5 = model_classes_factory() JSON_VALUE = '{"POSTGRES_HOST":"pg2", "POSTGRES_USER":"test2", "POSTGRES_PASSWORD":"shh2", "POSTGRES_DB":"db2"}' @@ -559,7 +569,6 @@ def test_toggle_plugin_4( ) with monkeypatch.context() as patch: - # Enables both but remove individuals setenvs_from_envfile( patch, diff --git a/packages/settings-library/tests/test_postgres.py b/packages/settings-library/tests/test_postgres.py index 6c9067c2d6b1..bdc33901f6eb 100644 --- a/packages/settings-library/tests/test_postgres.py +++ b/packages/settings-library/tests/test_postgres.py @@ -6,6 +6,7 @@ from urllib.parse import urlparse import pytest +from faker import Faker from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from settings_library.postgres import PostgresSettings @@ -24,7 +25,6 @@ def mock_environment(mock_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPa def test_cached_property_dsn(mock_environment: EnvVarsDict): - settings = PostgresSettings.create_from_envs() # all are upper-case @@ -36,14 +36,17 @@ def test_cached_property_dsn(mock_environment: EnvVarsDict): assert "dsn" not in settings.model_dump() -def test_dsn_with_query(mock_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch): +def test_dsn_with_query( + mock_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch, faker: Faker +): settings = PostgresSettings() assert settings.POSTGRES_CLIENT_NAME assert settings.dsn == "postgresql://foo:secret@localhost:5432/foodb" + app_name = faker.pystr() assert ( - settings.dsn_with_query - == "postgresql://foo:secret@localhost:5432/foodb?application_name=Some+%2643+funky+name" + settings.dsn_with_query(app_name, suffix="my-suffix") + == f"postgresql://foo:secret@localhost:5432/foodb?application_name={app_name}-Some+%2643+funky+name-my-suffix" ) with monkeypatch.context() as patch: @@ -51,7 +54,9 @@ def test_dsn_with_query(mock_environment: EnvVarsDict, monkeypatch: pytest.Monke settings = PostgresSettings() assert not settings.POSTGRES_CLIENT_NAME - assert settings.dsn == settings.dsn_with_query + assert f"{settings.dsn}?application_name=blah" == settings.dsn_with_query( + "blah", suffix=None + ) def test_dsn_with_async_sqlalchemy_has_query( diff --git a/packages/settings-library/tests/test_utils_cli.py b/packages/settings-library/tests/test_utils_cli.py index 49c684ea6265..a7194e1b4db0 100644 --- a/packages/settings-library/tests/test_utils_cli.py +++ b/packages/settings-library/tests/test_utils_cli.py @@ -6,7 +6,7 @@ import logging from collections.abc import Callable from io import StringIO -from typing import Any +from typing import Annotated, Any import pytest import typer @@ -76,6 +76,8 @@ def fake_granular_env_file_content() -> str: POSTGRES_DB=foodb POSTGRES_MINSIZE=1 POSTGRES_MAXSIZE=50 + POSTGRES_MAX_POOLSIZE=10 + POSTGRES_MAX_OVERFLOW=20 POSTGRES_CLIENT_NAME=None MODULE_VALUE=10 """ @@ -190,6 +192,8 @@ def test_cli_default_settings_envs( "POSTGRES_DB": "foodb", "POSTGRES_MINSIZE": 1, "POSTGRES_MAXSIZE": 50, + "POSTGRES_MAX_POOLSIZE": 10, + "POSTGRES_MAX_OVERFLOW": 20, "POSTGRES_CLIENT_NAME": None, }, } @@ -221,6 +225,8 @@ def test_cli_compact_settings_envs( "POSTGRES_DB": "foodb", "POSTGRES_MINSIZE": 1, "POSTGRES_MAXSIZE": 50, + "POSTGRES_MAX_POOLSIZE": 10, + "POSTGRES_MAX_OVERFLOW": 20, "POSTGRES_CLIENT_NAME": None, }, } @@ -244,7 +250,7 @@ def test_cli_compact_settings_envs( "APP_HOST": "localhost", "APP_PORT": "80", "APP_OPTIONAL_ADDON": '{"MODULE_VALUE":10,"MODULE_VALUE_DEFAULT":42}', - "APP_REQUIRED_PLUGIN": '{"POSTGRES_HOST":"localhost","POSTGRES_PORT":5432,"POSTGRES_USER":"foo","POSTGRES_PASSWORD":"secret","POSTGRES_DB":"foodb","POSTGRES_MINSIZE":1,"POSTGRES_MAXSIZE":50,"POSTGRES_CLIENT_NAME":null}', + "APP_REQUIRED_PLUGIN": '{"POSTGRES_HOST":"localhost","POSTGRES_PORT":5432,"POSTGRES_USER":"foo","POSTGRES_PASSWORD":"secret","POSTGRES_DB":"foodb","POSTGRES_MINSIZE":1,"POSTGRES_MAXSIZE":50,"POSTGRES_MAX_POOLSIZE":10,"POSTGRES_MAX_OVERFLOW":20,"POSTGRES_CLIENT_NAME":null}', } settings_2 = fake_settings_class() @@ -261,7 +267,7 @@ def test_compact_format( APP_HOST=localhost APP_PORT=80 APP_OPTIONAL_ADDON='{"MODULE_VALUE": 10, "MODULE_VALUE_DEFAULT": 42}' - APP_REQUIRED_PLUGIN='{"POSTGRES_HOST": "localhost", "POSTGRES_PORT": 5432, "POSTGRES_USER": "foo", "POSTGRES_PASSWORD": "secret", "POSTGRES_DB": "foodb", "POSTGRES_MINSIZE": 1, "POSTGRES_MAXSIZE": 50, "POSTGRES_CLIENT_NAME": "None"}' + APP_REQUIRED_PLUGIN='{"POSTGRES_HOST": "localhost", "POSTGRES_PORT": 5432, "POSTGRES_USER": "foo", "POSTGRES_PASSWORD": "secret", "POSTGRES_DB": "foodb", "POSTGRES_MINSIZE": 1, "POSTGRES_MAXSIZE": 50, "POSTGRES_MAX_POOLSIZE": 10, "POSTGRES_MAX_OVERFLOW": 20, "POSTGRES_CLIENT_NAME": "None"}' """, ) @@ -292,10 +298,10 @@ def test_granular_format( POSTGRES_PASSWORD=secret # Database name POSTGRES_DB=foodb - # Minimum number of connections in the pool POSTGRES_MINSIZE=1 - # Maximum number of connections in the pool POSTGRES_MAXSIZE=50 + POSTGRES_MAX_POOLSIZE=10 + POSTGRES_MAX_OVERFLOW=20 # Name of the application connecting the postgres database, will default to use the host hostname (hostname on linux) POSTGRES_CLIENT_NAME=None """, @@ -315,6 +321,8 @@ def test_granular_format( "POSTGRES_DB": "foodb", "POSTGRES_MINSIZE": 1, "POSTGRES_MAXSIZE": 50, + "POSTGRES_MAX_POOLSIZE": 10, + "POSTGRES_MAX_OVERFLOW": 20, "POSTGRES_CLIENT_NAME": None, }, ) @@ -414,7 +422,7 @@ def test_cli_settings_exclude_unset_as_json( def test_print_as(capsys: pytest.CaptureFixture): class FakeSettings(BaseCustomSettings): - INTEGER: int = Field(..., description="Some info") + INTEGER: Annotated[int, Field(description="Some info")] SECRET: SecretStr URL: AnyHttpUrl diff --git a/packages/settings-library/tests/test_utils_logging.py b/packages/settings-library/tests/test_utils_logging.py index d63a8ae8538c..f847a716e5f9 100644 --- a/packages/settings-library/tests/test_utils_logging.py +++ b/packages/settings-library/tests/test_utils_logging.py @@ -1,4 +1,5 @@ import logging +from typing import Annotated from pydantic import AliasChoices, Field, field_validator from settings_library.base import BaseCustomSettings @@ -17,17 +18,19 @@ class Settings(BaseCustomSettings, MixinLoggingSettings): SC_BOOT_MODE: BootModeEnum | None = None # LOGGING - LOG_LEVEL: str = Field( - "WARNING", - validation_alias=AliasChoices( - "APPNAME_LOG_LEVEL", - "LOG_LEVEL", + LOG_LEVEL: Annotated[ + str, + Field( + validation_alias=AliasChoices( + "APPNAME_LOG_LEVEL", + "LOG_LEVEL", + ), ), - ) + ] = "WARNING" - APPNAME_DEBUG: bool = Field( - default=False, description="Starts app in debug mode" - ) + APPNAME_DEBUG: Annotated[ + bool, Field(description="Starts app in debug mode") + ] = False @field_validator("LOG_LEVEL", mode="before") @classmethod diff --git a/packages/simcore-sdk/requirements/_base.in b/packages/simcore-sdk/requirements/_base.in index 9be327aed363..4ce6caec6571 100644 --- a/packages/simcore-sdk/requirements/_base.in +++ b/packages/simcore-sdk/requirements/_base.in @@ -13,9 +13,10 @@ aiocache aiofiles aiohttp +httpx packaging pint -sqlalchemy[asyncio] pydantic[email] +sqlalchemy[asyncio] tenacity tqdm diff --git a/packages/simcore-sdk/requirements/_base.txt b/packages/simcore-sdk/requirements/_base.txt index 97a80119b7f0..c614e77032c5 100644 --- a/packages/simcore-sdk/requirements/_base.txt +++ b/packages/simcore-sdk/requirements/_base.txt @@ -14,7 +14,7 @@ aiofiles==24.1.0 # -r requirements/_base.in aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -44,6 +44,7 @@ anyio==4.8.0 # via # fast-depends # faststream + # httpx arrow==1.3.0 # via # -r requirements/../../../packages/models-library/requirements/_base.in @@ -72,17 +73,13 @@ certifi==2025.1.31 # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt + # httpcore + # httpx # requests charset-normalizer==3.4.1 # via requests -click==8.1.8 +click==8.2.1 # via typer -deprecated==1.2.18 - # via - # opentelemetry-api - # opentelemetry-exporter-otlp-proto-grpc - # opentelemetry-exporter-otlp-proto-http - # opentelemetry-semantic-conventions dnspython==2.7.0 # via email-validator email-validator==2.2.0 @@ -101,7 +98,7 @@ frozenlist==1.5.0 # via # aiohttp # aiosignal -googleapis-common-protos==1.68.0 +googleapis-common-protos==1.70.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http @@ -109,21 +106,47 @@ greenlet==3.1.1 # via sqlalchemy grpcio==1.70.0 # via opentelemetry-exporter-otlp-proto-grpc +h11==0.16.0 + # via httpcore +httpcore==1.0.9 + # via httpx +httpx==0.28.1 + # via + # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/_base.in idna==3.10 # via # anyio # email-validator + # httpx # requests # yarl importlib-metadata==8.5.0 # via opentelemetry-api +jsonref==1.1.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema==4.23.0 # via # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema-specifications==2024.10.1 # via jsonschema -mako==1.3.9 +mako==1.3.10 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -150,7 +173,7 @@ multidict==6.1.0 # via # aiohttp # yarl -opentelemetry-api==1.30.0 +opentelemetry-api==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc @@ -163,51 +186,51 @@ opentelemetry-api==1.30.0 # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.30.0 +opentelemetry-exporter-otlp==1.34.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.30.0 +opentelemetry-exporter-otlp-proto-common==1.34.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.30.0 +opentelemetry-exporter-otlp-proto-grpc==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.30.0 +opentelemetry-exporter-otlp-proto-http==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.51b0 +opentelemetry-instrumentation==0.55b1 # via # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aio-pika==0.51b0 +opentelemetry-instrumentation-aio-pika==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-asyncpg==0.51b0 - # via -r requirements/../../../packages/postgres-database/requirements/_base.in -opentelemetry-instrumentation-logging==0.51b0 +opentelemetry-instrumentation-asyncpg==0.55b1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-logging==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.51b0 +opentelemetry-instrumentation-redis==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.51b0 +opentelemetry-instrumentation-requests==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.30.0 +opentelemetry-proto==1.34.1 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.30.0 +opentelemetry-sdk==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.51b0 +opentelemetry-semantic-conventions==0.55b1 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.51b0 +opentelemetry-util-http==0.55b1 # via opentelemetry-instrumentation-requests orjson==3.10.15 # via @@ -248,7 +271,7 @@ propcache==0.3.0 # via # aiohttp # yarl -protobuf==5.29.3 +protobuf==5.29.5 # via # googleapis-common-protos # opentelemetry-proto @@ -258,7 +281,7 @@ psycopg2-binary==2.9.10 # via sqlalchemy pycryptodome==3.21.0 # via stream-zip -pydantic==2.10.6 +pydantic==2.11.7 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -291,9 +314,9 @@ pydantic==2.10.6 # fast-depends # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.2 +pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.10.2 +pydantic-extra-types==2.10.5 # via # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -384,9 +407,9 @@ referencing==0.35.1 # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications -requests==2.32.3 +requests==2.32.4 # via opentelemetry-exporter-otlp-proto-http -rich==13.9.4 +rich==14.1.0 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in @@ -432,13 +455,13 @@ tqdm==4.67.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/_base.in -typer==0.15.2 +typer==0.16.1 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in types-python-dateutil==2.9.0.20241206 # via arrow -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # aiodebug # alembic @@ -446,13 +469,20 @@ typing-extensions==4.12.2 # faststream # flexcache # flexparser + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http # opentelemetry-sdk + # opentelemetry-semantic-conventions # pint # pydantic # pydantic-core # pydantic-extra-types # typer -urllib3==2.3.0 + # typing-inspection +typing-inspection==0.4.1 + # via pydantic +urllib3==2.5.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -471,7 +501,6 @@ urllib3==2.3.0 # requests wrapt==1.17.2 # via - # deprecated # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-redis diff --git a/packages/simcore-sdk/requirements/_test.txt b/packages/simcore-sdk/requirements/_test.txt index 9601c8200551..82fe7b4fdfd7 100644 --- a/packages/simcore-sdk/requirements/_test.txt +++ b/packages/simcore-sdk/requirements/_test.txt @@ -10,7 +10,7 @@ aiohappyeyeballs==2.6.1 # via # -c requirements/_base.txt # aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -73,7 +73,7 @@ charset-normalizer==3.4.1 # via # -c requirements/_base.txt # requests -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # -r requirements/_test.in @@ -101,7 +101,7 @@ flask==3.1.0 # via # flask-cors # moto -flask-cors==5.0.1 +flask-cors==6.0.1 # via moto frozenlist==1.5.0 # via @@ -125,7 +125,7 @@ iniconfig==2.0.0 # via pytest itsdangerous==2.2.0 # via flask -jinja2==3.1.5 +jinja2==3.1.6 # via # -c requirements/../../../requirements/constraints.txt # flask @@ -158,7 +158,7 @@ jsonschema-specifications==2024.10.1 # openapi-schema-validator lazy-object-proxy==1.10.0 # via openapi-spec-validator -mako==1.3.9 +mako==1.3.10 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -179,9 +179,9 @@ multidict==6.1.0 # aiobotocore # aiohttp # yarl -mypy==1.15.0 +mypy==1.16.1 # via sqlalchemy -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via mypy networkx==3.4.2 # via cfn-lint @@ -197,8 +197,12 @@ packaging==24.2 # pytest-sugar pathable==0.4.4 # via jsonschema-path +pathspec==0.12.1 + # via mypy pluggy==1.5.0 - # via pytest + # via + # pytest + # pytest-cov ply==3.11 # via jsonpath-ng pprintpp==0.4.0 @@ -212,18 +216,22 @@ py-partiql-parser==0.6.1 # via moto pycparser==2.22 # via cffi -pydantic==2.10.6 +pydantic==2.11.7 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator -pydantic-core==2.27.2 +pydantic-core==2.33.2 # via # -c requirements/_base.txt # pydantic +pygments==2.19.1 + # via + # -c requirements/_base.txt + # pytest pyparsing==3.2.1 # via moto -pytest==8.3.5 +pytest==8.4.1 # via # -r requirements/_test.in # pytest-asyncio @@ -233,21 +241,21 @@ pytest==8.3.5 # pytest-mock # pytest-sugar # pytest-xdist -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via -r requirements/_test.in -pytest-cov==6.0.0 +pytest-cov==6.2.1 # via -r requirements/_test.in pytest-icdiff==0.9 # via -r requirements/_test.in pytest-instafail==0.5.0 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in pytest-sugar==1.0.0 # via -r requirements/_test.in -pytest-xdist==3.6.1 +pytest-xdist==3.8.0 # via -r requirements/_test.in python-dateutil==2.9.0.post0 # via @@ -276,7 +284,7 @@ referencing==0.35.1 # jsonschema-specifications regex==2024.11.6 # via cfn-lint -requests==2.32.3 +requests==2.32.4 # via # -c requirements/_base.txt # -r requirements/_test.in @@ -295,7 +303,7 @@ rpds-py==0.23.1 # referencing s3transfer==0.11.3 # via boto3 -setuptools==75.8.2 +setuptools==80.9.0 # via moto six==1.17.0 # via @@ -326,7 +334,7 @@ types-requests==2.32.0.20250301 # via types-tqdm types-tqdm==4.67.0.20250301 # via -r requirements/_test.in -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # alembic @@ -338,9 +346,14 @@ typing-extensions==4.12.2 # sqlalchemy2-stubs # types-aiobotocore # types-aiobotocore-s3 + # typing-inspection +typing-inspection==0.4.1 + # via + # -c requirements/_base.txt + # pydantic tzdata==2025.1 # via faker -urllib3==2.3.0 +urllib3==2.5.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt diff --git a/packages/simcore-sdk/requirements/_tools.txt b/packages/simcore-sdk/requirements/_tools.txt index 57c8ea032462..606e31a1bf7e 100644 --- a/packages/simcore-sdk/requirements/_tools.txt +++ b/packages/simcore-sdk/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # -c requirements/_test.txt @@ -28,11 +28,11 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via # -c requirements/_test.txt # black @@ -46,7 +46,10 @@ packaging==24.2 # black # build pathspec==0.12.1 - # via black + # via + # -c requirements/_test.txt + # black + # mypy pip==25.0.1 # via pip-tools pip-tools==7.4.1 @@ -73,13 +76,13 @@ pyyaml==6.0.2 # pre-commit ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.2 +setuptools==80.9.0 # via # -c requirements/_test.txt # pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # -c requirements/_test.txt diff --git a/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py b/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py index db552f193b72..679c9645aea2 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py @@ -169,7 +169,7 @@ async def _state_metadata_entry_exists( async def _delete_legacy_archive( - project_id: ProjectID, node_uuid: NodeID, path: Path + project_id: ProjectID, node_uuid: NodeID, path: Path, *, application_name: str ) -> None: """removes the .zip state archive from storage""" s3_object = __create_s3_object_key( @@ -180,13 +180,15 @@ async def _delete_legacy_archive( # NOTE: if service is opened by a person which the users shared it with, # they will not have the permission to delete the node # Removing it via it's owner allows to always have access to the delete operation. - owner_id = await DBManager().get_project_owner_user_id(project_id) + owner_id = await DBManager( + application_name=application_name + ).get_project_owner_user_id(project_id) await filemanager.delete_file( user_id=owner_id, store_id=SIMCORE_LOCATION, s3_object=s3_object ) -async def push( +async def push( # pylint: disable=too-many-arguments user_id: UserID, project_id: ProjectID, node_uuid: NodeID, @@ -198,6 +200,7 @@ async def push( progress_bar: ProgressBarData, aws_s3_cli_settings: AwsS3CliSettings | None, legacy_state: LegacyState | None, + application_name: str, ) -> None: """pushes and removes the legacy archive if present""" @@ -226,6 +229,7 @@ async def push( project_id=project_id, node_uuid=node_uuid, path=source_path, + application_name=application_name, ) if legacy_state: @@ -244,6 +248,7 @@ async def push( project_id=project_id, node_uuid=node_uuid, path=legacy_state.old_state_path, + application_name=application_name, ) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager_utils.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager_utils.py index 484380fded76..043b763764ad 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager_utils.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/_filemanager_utils.py @@ -82,7 +82,9 @@ async def complete_upload( state_url = _get_https_link_if_storage_secure( f"{file_upload_complete_response.data.links.state}" ) - _logger.info("completed upload of %s", f"{len(parts)} parts, received {state_url}") + _logger.info( + "required upload completion of %s", f"{len(parts)} parts, received {state_url}" + ) async for attempt in AsyncRetrying( reraise=True, @@ -101,14 +103,14 @@ async def complete_upload( ).validate_python(await resp.json()) assert future_enveloped.data # nosec if future_enveloped.data.state == FileUploadCompleteState.NOK: - msg = "upload not ready yet" + msg = "upload not ready yet (FileUploadCompleteState.NOK)" raise ValueError(msg) if is_directory: assert future_enveloped.data.e_tag is None # nosec return None assert future_enveloped.data.e_tag # nosec - _logger.debug( + _logger.info( "multipart upload completed in %s, received %s", attempt.retry_state.retry_object.statistics, f"{future_enveloped.data.e_tag=}", diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/dbmanager.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/dbmanager.py index 76d6d8222d31..21c0f0173b91 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/dbmanager.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/dbmanager.py @@ -82,22 +82,25 @@ async def _update_comp_run_snapshot_tasks_if_computational( class DBContextManager: - def __init__(self, db_engine: AsyncEngine | None = None) -> None: + def __init__( + self, db_engine: AsyncEngine | None = None, *, application_name: str + ) -> None: self._db_engine: AsyncEngine | None = db_engine self._db_engine_created: bool = False + self._application_name: str = application_name @staticmethod - async def _create_db_engine() -> AsyncEngine: + async def _create_db_engine(application_name: str) -> AsyncEngine: settings = NodePortsSettings.create_from_envs() engine = await create_async_engine_and_database_ready( - settings.POSTGRES_SETTINGS + settings.POSTGRES_SETTINGS, f"{application_name}-simcore-sdk" ) assert isinstance(engine, AsyncEngine) # nosec return engine async def __aenter__(self) -> AsyncEngine: if not self._db_engine: - self._db_engine = await self._create_db_engine() + self._db_engine = await self._create_db_engine(self._application_name) self._db_engine_created = True return self._db_engine @@ -107,8 +110,9 @@ async def __aexit__(self, exc_type, exc, tb) -> None: class DBManager: - def __init__(self, db_engine: AsyncEngine | None = None): + def __init__(self, db_engine: AsyncEngine | None = None, *, application_name: str): self._db_engine = db_engine + self._application_name = application_name async def write_ports_configuration( self, @@ -124,7 +128,9 @@ async def write_ports_configuration( node_configuration = json_loads(json_configuration) async with ( - DBContextManager(self._db_engine) as engine, + DBContextManager( + self._db_engine, application_name=self._application_name + ) as engine, engine.begin() as connection, ): # 1. Update comp_tasks table @@ -154,7 +160,9 @@ async def get_ports_configuration_from_node_uuid( "Getting ports configuration of node %s from comp_tasks table", node_uuid ) async with ( - DBContextManager(self._db_engine) as engine, + DBContextManager( + self._db_engine, application_name=self._application_name + ) as engine, engine.connect() as connection, ): node = await _get_node_from_db(project_id, node_uuid, connection) @@ -171,7 +179,9 @@ async def get_ports_configuration_from_node_uuid( async def get_project_owner_user_id(self, project_id: ProjectID) -> UserID: async with ( - DBContextManager(self._db_engine) as engine, + DBContextManager( + self._db_engine, application_name=self._application_name + ) as engine, engine.connect() as connection, ): prj_owner = await connection.scalar( diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py index 51aa3bae3c1c..be5cde27a24a 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py @@ -7,10 +7,10 @@ from typing import IO, Any, Final, Protocol, runtime_checkable import aiofiles +import httpx from aiohttp import ( ClientConnectionError, ClientError, - ClientPayloadError, ClientResponse, ClientResponseError, ClientSession, @@ -39,6 +39,7 @@ from tqdm.contrib.logging import tqdm_logging_redirect from yarl import URL +from ..config.http_clients import client_request_settings from . import exceptions from .constants import CHUNK_SIZE @@ -148,13 +149,13 @@ async def __call__(self, log: str) -> None: ... async def _file_chunk_writer( file: Path, - response: ClientResponse, + response: httpx.Response, pbar: tqdm, io_log_redirect_cb: LogRedirectCB | None, progress_bar: ProgressBarData, ): async with aiofiles.open(file, "wb") as file_pointer: - while chunk := await response.content.read(CHUNK_SIZE): + async for chunk in response.aiter_bytes(CHUNK_SIZE): await file_pointer.write(chunk) if io_log_redirect_cb and pbar.update(len(chunk)): with log_catch(_logger, reraise=False): @@ -172,7 +173,6 @@ async def _file_chunk_writer( async def download_link_to_file( - session: ClientSession, url: URL, file_path: Path, *, @@ -185,16 +185,25 @@ async def download_link_to_file( reraise=True, wait=wait_exponential(min=1, max=10), stop=stop_after_attempt(num_retries), - retry=retry_if_exception_type(ClientConnectionError), + retry=retry_if_exception_type(httpx.TransportError), before_sleep=before_sleep_log(_logger, logging.WARNING, exc_info=True), after=after_log(_logger, log_level=logging.ERROR), ): with attempt: async with AsyncExitStack() as stack: - response = await stack.enter_async_context(session.get(url)) - if response.status == status.HTTP_404_NOT_FOUND: + client = await stack.enter_async_context( + httpx.AsyncClient( + timeout=httpx.Timeout( + client_request_settings.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT + ) + ) + ) + response = await stack.enter_async_context( + client.stream("GET", f"{url}") + ) + if response.status_code == status.HTTP_404_NOT_FOUND: raise exceptions.InvalidDownloadLinkError(url) - if response.status > _VALID_HTTP_STATUS_CODES: + if response.status_code > _VALID_HTTP_STATUS_CODES: raise exceptions.TransferError(url) file_path.parent.mkdir(parents=True, exist_ok=True) # SEE https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Length @@ -231,7 +240,7 @@ async def download_link_to_file( sub_progress, ) _logger.debug("Download complete") - except ClientPayloadError as exc: + except httpx.HTTPError as exc: raise exceptions.TransferError(url) from exc diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py index 5fdd631474d1..0849e8a0732a 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py @@ -217,7 +217,6 @@ async def download_path_from_s3( return await download_file_from_link( download_link, local_path, - client_session=session, io_log_redirect_cb=io_log_redirect_cb, progress_bar=progress_bar, ) @@ -229,7 +228,6 @@ async def download_file_from_link( *, io_log_redirect_cb: LogRedirectCB | None, file_name: str | None = None, - client_session: ClientSession | None = None, progress_bar: ProgressBarData, ) -> Path: # a download link looks something like: @@ -242,15 +240,14 @@ async def download_file_from_link( if io_log_redirect_cb: await io_log_redirect_cb(f"downloading {local_file_path}, please wait...") - async with ClientSessionContextManager(client_session) as session: - await download_link_to_file( - session, - download_link, - local_file_path, - num_retries=NodePortsSettings.create_from_envs().NODE_PORTS_IO_NUM_RETRY_ATTEMPTS, - io_log_redirect_cb=io_log_redirect_cb, - progress_bar=progress_bar, - ) + + await download_link_to_file( + download_link, + local_file_path, + num_retries=NodePortsSettings.create_from_envs().NODE_PORTS_IO_NUM_RETRY_ATTEMPTS, + io_log_redirect_cb=io_log_redirect_cb, + progress_bar=progress_bar, + ) if io_log_redirect_cb: await io_log_redirect_cb(f"download of {local_file_path} complete.") return local_file_path diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/__init__.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/__init__.py index 8874f98efe74..83c7435b4eeb 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/__init__.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/__init__.py @@ -22,17 +22,11 @@ async def ports( project_id: ProjectIDStr, node_uuid: NodeIDStr, *, - db_manager: DBManager | None = None, + db_manager: DBManager, r_clone_settings: RCloneSettings | None = None, io_log_redirect_cb: LogRedirectCB | None = None, aws_s3_cli_settings: AwsS3CliSettings | None = None ) -> Nodeports: - log.debug("creating node_ports_v2 object using provided dbmanager: %s", db_manager) - # NOTE: warning every dbmanager create a new db engine! - if db_manager is None: # NOTE: keeps backwards compatibility - log.debug("no db manager provided, creating one...") - db_manager = DBManager() - return await load( db_manager=db_manager, user_id=user_id, @@ -47,9 +41,9 @@ async def ports( __all__ = ( "DBManager", - "exceptions", "FileLinkType", "Nodeports", "Port", + "exceptions", "ports", ) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py index 014aff565298..a7f9ec22fd04 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py @@ -114,10 +114,9 @@ def check_value(cls, v: DataItemValue, info: ValidationInfo) -> DataItemValue: and not isinstance(v, PortLink) ): if port_utils.is_file_type(property_type): - if not isinstance(v, (FileLink, DownloadLink)): - raise ValueError( - f"{property_type!r} value does not validate against any of FileLink, DownloadLink or PortLink schemas" - ) + if not isinstance(v, FileLink | DownloadLink): + msg = f"{property_type!r} value does not validate against any of FileLink, DownloadLink or PortLink schemas" + raise ValueError(msg) elif property_type == "ref_contentSchema": v, _ = validate_port_content( port_key=info.data.get("key"), @@ -125,10 +124,11 @@ def check_value(cls, v: DataItemValue, info: ValidationInfo) -> DataItemValue: unit=None, content_schema=info.data.get("content_schema", {}), ) - elif isinstance(v, (list, dict)): - raise TypeError( + elif isinstance(v, list | dict): + msg = ( f"Containers as {v} currently only supported within content_schema." ) + raise TypeError(msg) return v @field_validator("value_item", "value_concrete", mode="before") @@ -194,28 +194,29 @@ async def get_value( ) async def _evaluate() -> ItemValue | None: + # NOTE: review types returned by this function !!! if isinstance(self.value, PortLink): # this is a link to another node's port - other_port_itemvalue: None | ( - ItemValue - ) = await port_utils.get_value_link_from_port_link( - self.value, - # pylint: disable=protected-access - self._node_ports._node_ports_creator_cb, - file_link_type=file_link_type, + other_port_itemvalue: ItemValue | None = ( + await port_utils.get_value_link_from_port_link( + self.value, + # pylint: disable=protected-access + self._node_ports._node_ports_creator_cb, + file_link_type=file_link_type, + ) ) return other_port_itemvalue if isinstance(self.value, FileLink): # let's get the download/upload link from storage - url_itemvalue: None | ( - AnyUrl - ) = await port_utils.get_download_link_from_storage( - # pylint: disable=protected-access - user_id=self._node_ports.user_id, - value=self.value, - link_type=file_link_type, + url_itemvalue: AnyUrl | None = ( + await port_utils.get_download_link_from_storage( + # pylint: disable=protected-access + user_id=self._node_ports.user_id, + value=self.value, + link_type=file_link_type, + ) ) return url_itemvalue @@ -256,15 +257,15 @@ async def _evaluate() -> ItemConcreteValue | None: if isinstance(self.value, PortLink): # this is a link to another node - other_port_concretevalue: None | ( - ItemConcreteValue - ) = await port_utils.get_value_from_link( - # pylint: disable=protected-access - key=self.key, - value=self.value, - file_to_key_map=self.file_to_key_map, - node_port_creator=self._node_ports._node_ports_creator_cb, # noqa: SLF001 - progress_bar=progress_bar, + other_port_concretevalue: None | ItemConcreteValue = ( + await port_utils.get_value_from_link( + # pylint: disable=protected-access + key=self.key, + value=self.value, + file_to_key_map=self.file_to_key_map, + node_port_creator=self._node_ports._node_ports_creator_cb, # noqa: SLF001 + progress_bar=progress_bar, + ) ) value = other_port_concretevalue diff --git a/packages/simcore-sdk/tests/conftest.py b/packages/simcore-sdk/tests/conftest.py index 39bd7070bae4..f8086084d748 100644 --- a/packages/simcore-sdk/tests/conftest.py +++ b/packages/simcore-sdk/tests/conftest.py @@ -10,6 +10,7 @@ import pytest import simcore_sdk +from faker import Faker from helpers.utils_port_v2 import CONSTANT_UUID from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.postgres_tools import PostgresTestConfig @@ -23,10 +24,12 @@ pytest_plugins = [ "pytest_simcore.aws_s3_service", "pytest_simcore.aws_server", + "pytest_simcore.db_entries_mocks", "pytest_simcore.disk_usage_monitoring", "pytest_simcore.docker_compose", "pytest_simcore.docker_swarm", "pytest_simcore.file_extra", + "pytest_simcore.logging", "pytest_simcore.minio_service", "pytest_simcore.postgres_service", "pytest_simcore.pytest_global_environs", @@ -66,8 +69,7 @@ def empty_configuration_file() -> Path: @pytest.fixture def node_ports_config( postgres_host_config: PostgresTestConfig, minio_s3_settings_envs: EnvVarsDict -) -> None: - ... +) -> None: ... @pytest.fixture @@ -84,3 +86,8 @@ def constant_uuid4(mocker: MockerFixture) -> None: "simcore_sdk.node_ports_common.data_items_utils.uuid4", return_value=CONSTANT_UUID, ) + + +@pytest.fixture +def mock_app_name(faker: Faker) -> str: + return faker.pystr() diff --git a/packages/simcore-sdk/tests/integration/conftest.py b/packages/simcore-sdk/tests/integration/conftest.py index b32fc4aa1dfa..c7c755c24d5e 100644 --- a/packages/simcore-sdk/tests/integration/conftest.py +++ b/packages/simcore-sdk/tests/integration/conftest.py @@ -19,10 +19,10 @@ from models_library.users import UserID from pydantic import TypeAdapter from pytest_simcore.helpers.faker_factories import random_project, random_user +from pytest_simcore.helpers.postgres_tools import sync_insert_and_get_row_lifespan from settings_library.aws_s3_cli import AwsS3CliSettings from settings_library.r_clone import RCloneSettings, S3Provider from settings_library.s3 import S3Settings -from simcore_postgres_database.models.comp_pipeline import comp_pipeline from simcore_postgres_database.models.comp_tasks import comp_tasks from simcore_postgres_database.models.file_meta_data import file_meta_data from simcore_postgres_database.models.projects import projects @@ -41,18 +41,16 @@ def user_id(postgres_db: sa.engine.Engine) -> Iterable[UserID]: # which would turn this test too complex. # pylint: disable=no-value-for-parameter - with postgres_db.connect() as conn: - result = conn.execute( - users.insert().values(**random_user(name="test")).returning(users.c.id) - ) - row = result.first() - assert row - usr_id = row[users.c.id] - - yield usr_id + with sync_insert_and_get_row_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup + postgres_db, + table=users, + values=random_user( + name="test", + ), + pk_col=users.c.id, + ) as user_row: - with postgres_db.connect() as conn: - conn.execute(users.delete().where(users.c.id == usr_id)) + yield user_row["id"] @pytest.fixture @@ -100,9 +98,9 @@ def _create(key: str, file_path: Path) -> SimcoreS3FileID: @pytest.fixture() -def default_configuration( +async def default_configuration( node_ports_config: None, - create_pipeline: Callable[[str], str], + create_pipeline: Callable[..., Awaitable[dict[str, Any]]], create_task: Callable[..., str], default_configuration_file: Path, project_id: str, @@ -110,7 +108,7 @@ def default_configuration( ) -> dict[str, Any]: # prepare database with default configuration json_configuration = default_configuration_file.read_text() - create_pipeline(project_id) + await create_pipeline(project_id=project_id) return _set_configuration(create_task, project_id, node_uuid, json_configuration) @@ -167,15 +165,15 @@ async def _create(file_path: Path) -> dict[str, Any]: @pytest.fixture() -def create_special_configuration( +async def create_special_configuration( node_ports_config: None, - create_pipeline: Callable[[str], str], + create_pipeline: Callable[..., Awaitable[dict[str, Any]]], create_task: Callable[..., str], empty_configuration_file: Path, project_id: str, node_uuid: str, -) -> Callable: - def _create( +) -> Callable[..., Awaitable[tuple[dict, str, str]]]: + async def _create( inputs: list[tuple[str, str, Any]] | None = None, outputs: list[tuple[str, str, Any]] | None = None, project_id: str = project_id, @@ -184,7 +182,7 @@ def _create( config_dict = json.loads(empty_configuration_file.read_text()) _assign_config(config_dict, "inputs", inputs if inputs else []) _assign_config(config_dict, "outputs", outputs if outputs else []) - project_id = create_pipeline(project_id) + await create_pipeline(project_id=project_id) config_dict = _set_configuration( create_task, project_id, node_id, json.dumps(config_dict) ) @@ -194,13 +192,13 @@ def _create( @pytest.fixture() -def create_2nodes_configuration( +async def create_2nodes_configuration( node_ports_config: None, - create_pipeline: Callable[[str], str], + create_pipeline: Callable[..., Awaitable[dict[str, Any]]], create_task: Callable[..., str], empty_configuration_file: Path, -) -> Callable: - def _create( +) -> Callable[..., Awaitable[tuple[dict, str, str]]]: + async def _create( prev_node_inputs: list[tuple[str, str, Any]], prev_node_outputs: list[tuple[str, str, Any]], inputs: list[tuple[str, str, Any]], @@ -209,7 +207,7 @@ def _create( previous_node_id: str, node_id: str, ) -> tuple[dict, str, str]: - create_pipeline(project_id) + await create_pipeline(project_id=project_id) # create previous node previous_config_dict = json.loads(empty_configuration_file.read_text()) @@ -241,34 +239,6 @@ def _create( return _create -@pytest.fixture -def create_pipeline(postgres_db: sa.engine.Engine) -> Iterator[Callable[[str], str]]: - created_pipeline_ids: list[str] = [] - - def _create(project_id: str) -> str: - with postgres_db.connect() as conn: - result = conn.execute( - comp_pipeline.insert() # pylint: disable=no-value-for-parameter - .values(project_id=project_id) - .returning(comp_pipeline.c.project_id) - ) - row = result.first() - assert row - new_pipeline_id = row[comp_pipeline.c.project_id] - created_pipeline_ids.append(f"{new_pipeline_id}") - return new_pipeline_id - - yield _create - - # cleanup - with postgres_db.connect() as conn: - conn.execute( - comp_pipeline.delete().where( # pylint: disable=no-value-for-parameter - comp_pipeline.c.project_id.in_(created_pipeline_ids) - ) - ) - - @pytest.fixture def create_task(postgres_db: sa.engine.Engine) -> Iterator[Callable[..., str]]: created_task_ids: list[int] = [] diff --git a/packages/simcore-sdk/tests/integration/test_node_data_data_manager.py b/packages/simcore-sdk/tests/integration/test_node_data_data_manager.py index a25e95aa715f..f0ba46092397 100644 --- a/packages/simcore-sdk/tests/integration/test_node_data_data_manager.py +++ b/packages/simcore-sdk/tests/integration/test_node_data_data_manager.py @@ -292,6 +292,7 @@ async def test_delete_legacy_archive( project_id=project_id, node_uuid=node_uuid, path=content_path, + application_name=faker.pystr(), ) assert ( diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_common_dbmanager.py b/packages/simcore-sdk/tests/integration/test_node_ports_common_dbmanager.py index db8e51d7dbd4..cfd97db1c982 100644 --- a/packages/simcore-sdk/tests/integration/test_node_ports_common_dbmanager.py +++ b/packages/simcore-sdk/tests/integration/test_node_ports_common_dbmanager.py @@ -3,7 +3,7 @@ # pylint:disable=redefined-outer-name import json -from collections.abc import Callable +from collections.abc import Awaitable, Callable from pathlib import Path from simcore_sdk.node_ports_common.dbmanager import DBManager @@ -21,8 +21,9 @@ async def test_db_manager_read_config( node_uuid: str, node_ports_config: None, default_configuration: dict, + mock_app_name: str, ): - db_manager = DBManager() + db_manager = DBManager(application_name=mock_app_name) ports_configuration_str = await db_manager.get_ports_configuration_from_node_uuid( project_id, node_uuid ) @@ -35,15 +36,16 @@ async def test_db_manager_write_config( project_id: str, node_uuid: str, node_ports_config: None, - create_special_configuration: Callable, + create_special_configuration: Callable[..., Awaitable[tuple[dict, str, str]]], default_configuration_file: Path, + mock_app_name: str, ): # create an empty config - create_special_configuration() + await create_special_configuration() # read the default config json_configuration = default_configuration_file.read_text() # write the default config to the database - db_manager = DBManager() + db_manager = DBManager(application_name=mock_app_name) await db_manager.write_ports_configuration( json_configuration, project_id, node_uuid ) diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py b/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py index 88d16e383d25..9832f758a2ef 100644 --- a/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py +++ b/packages/simcore-sdk/tests/integration/test_node_ports_v2_nodeports2.py @@ -34,6 +34,7 @@ from servicelib.progress_bar import ProgressBarData from settings_library.r_clone import RCloneSettings from simcore_sdk import node_ports_v2 +from simcore_sdk.node_ports_common.dbmanager import DBManager from simcore_sdk.node_ports_common.exceptions import UnboundPortError from simcore_sdk.node_ports_v2 import exceptions from simcore_sdk.node_ports_v2.links import ItemConcreteValue, PortLink @@ -162,12 +163,18 @@ async def option_r_clone_settings( return None +@pytest.fixture +def default_db_manager(faker: Faker) -> DBManager: + return node_ports_v2.DBManager(application_name=f"pytest_{faker.pystr()}") + + async def test_default_configuration( user_id: int, project_id: str, node_uuid: NodeIDStr, default_configuration: dict[str, Any], option_r_clone_settings: RCloneSettings | None, + default_db_manager: DBManager, ): config_dict = default_configuration await check_config_valid( @@ -176,6 +183,7 @@ async def test_default_configuration( project_id=project_id, node_uuid=node_uuid, r_clone_settings=option_r_clone_settings, + db_manager=default_db_manager, ), config_dict, ) @@ -185,15 +193,17 @@ async def test_invalid_ports( user_id: int, project_id: str, node_uuid: NodeIDStr, - create_special_configuration: Callable, + create_special_configuration: Callable[..., Awaitable[tuple[dict, str, str]]], option_r_clone_settings: RCloneSettings | None, + default_db_manager: DBManager, ): - config_dict, _, _ = create_special_configuration() + config_dict, _, _ = await create_special_configuration() PORTS = await node_ports_v2.ports( user_id=user_id, project_id=project_id, node_uuid=node_uuid, r_clone_settings=option_r_clone_settings, + db_manager=default_db_manager, ) await check_config_valid(PORTS, config_dict) @@ -223,14 +233,15 @@ async def test_port_value_accessors( user_id: int, project_id: str, node_uuid: NodeIDStr, - create_special_configuration: Callable, + create_special_configuration: Callable[..., Awaitable[tuple[dict, str, str]]], item_type: str, item_value: ItemConcreteValue, item_pytype: type, option_r_clone_settings: RCloneSettings | None, + default_db_manager: DBManager, ): # pylint: disable=W0613, W0621 item_key = TypeAdapter(ServicePortKey).validate_python("some_key") - config_dict, _, _ = create_special_configuration( + config_dict, _, _ = await create_special_configuration( inputs=[(item_key, item_type, item_value)], outputs=[(item_key, item_type, None)], ) @@ -240,6 +251,7 @@ async def test_port_value_accessors( project_id=project_id, node_uuid=node_uuid, r_clone_settings=option_r_clone_settings, + db_manager=default_db_manager, ) await check_config_valid(PORTS, config_dict) @@ -266,7 +278,7 @@ async def test_port_value_accessors( ], ) async def test_port_file_accessors( - create_special_configuration: Callable, + create_special_configuration: Callable[..., Awaitable[tuple[dict, str, str]]], s3_simcore_location: LocationID, item_type: str, item_value: str, @@ -279,6 +291,7 @@ async def test_port_file_accessors( option_r_clone_settings: RCloneSettings | None, request: pytest.FixtureRequest, constant_uuid4: None, + default_db_manager: DBManager, ): if item_value == "symlink_path": item_value = request.getfixturevalue("symlink_path") @@ -287,7 +300,7 @@ async def test_port_file_accessors( config_value["path"] = f"{project_id}/{node_uuid}/{Path(config_value['path']).name}" - config_dict, _project_id, _node_uuid = create_special_configuration( + config_dict, _project_id, _node_uuid = await create_special_configuration( inputs=[("in_1", item_type, config_value)], outputs=[("out_34", item_type, None)], ) @@ -300,6 +313,7 @@ async def test_port_file_accessors( project_id=project_id, node_uuid=node_uuid, r_clone_settings=option_r_clone_settings, + db_manager=default_db_manager, ) await check_config_valid(PORTS, config_dict) assert ( @@ -367,16 +381,18 @@ async def test_adding_new_ports( user_id: int, project_id: str, node_uuid: NodeIDStr, - create_special_configuration: Callable, + create_special_configuration: Callable[..., Awaitable[tuple[dict, str, str]]], postgres_db: sa.engine.Engine, option_r_clone_settings: RCloneSettings | None, + default_db_manager: DBManager, ): - config_dict, project_id, node_uuid = create_special_configuration() + config_dict, project_id, node_uuid = await create_special_configuration() PORTS = await node_ports_v2.ports( user_id=user_id, project_id=project_id, node_uuid=node_uuid, r_clone_settings=option_r_clone_settings, + db_manager=default_db_manager, ) await check_config_valid(PORTS, config_dict) @@ -418,11 +434,12 @@ async def test_removing_ports( user_id: int, project_id: str, node_uuid: NodeIDStr, - create_special_configuration: Callable, + create_special_configuration: Callable[..., Awaitable[tuple[dict, str, str]]], postgres_db: sa.engine.Engine, option_r_clone_settings: RCloneSettings | None, + default_db_manager: DBManager, ): - config_dict, project_id, node_uuid = create_special_configuration( + config_dict, project_id, node_uuid = await create_special_configuration( inputs=[("in_14", "integer", 15), ("in_17", "boolean", False)], outputs=[("out_123", "string", "blahblah"), ("out_2", "number", -12.3)], ) # pylint: disable=W0612 @@ -431,6 +448,7 @@ async def test_removing_ports( project_id=project_id, node_uuid=node_uuid, r_clone_settings=option_r_clone_settings, + db_manager=default_db_manager, ) await check_config_valid(PORTS, config_dict) # let's remove the first input @@ -469,14 +487,15 @@ async def test_get_value_from_previous_node( user_id: int, project_id: str, node_uuid: NodeIDStr, - create_2nodes_configuration: Callable, + create_2nodes_configuration: Callable[..., Awaitable[tuple[dict, str, str]]], create_node_link: Callable, item_type: str, item_value: ItemConcreteValue, item_pytype: type, option_r_clone_settings: RCloneSettings | None, + default_db_manager: DBManager, ): - config_dict, _, _ = create_2nodes_configuration( + config_dict, _, _ = await create_2nodes_configuration( prev_node_inputs=None, prev_node_outputs=[("output_int", item_type, item_value)], inputs=[("in_15", item_type, create_node_link("output_int"))], @@ -491,6 +510,7 @@ async def test_get_value_from_previous_node( project_id=project_id, node_uuid=node_uuid, r_clone_settings=option_r_clone_settings, + db_manager=default_db_manager, ) await check_config_valid(PORTS, config_dict) @@ -515,7 +535,7 @@ async def test_get_value_from_previous_node( ], ) async def test_get_file_from_previous_node( - create_2nodes_configuration: Callable, + create_2nodes_configuration: Callable[..., Awaitable[tuple[dict, str, str]]], user_id: int, project_id: str, node_uuid: NodeIDStr, @@ -526,8 +546,9 @@ async def test_get_file_from_previous_node( item_pytype: type, option_r_clone_settings: RCloneSettings | None, constant_uuid4: None, + default_db_manager: DBManager, ): - config_dict, _, _ = create_2nodes_configuration( + config_dict, _, _ = await create_2nodes_configuration( prev_node_inputs=None, prev_node_outputs=[ ("output_int", item_type, await create_store_link(item_value)) @@ -543,6 +564,7 @@ async def test_get_file_from_previous_node( project_id=project_id, node_uuid=node_uuid, r_clone_settings=option_r_clone_settings, + db_manager=default_db_manager, ) await check_config_valid(PORTS, config_dict) file_path = await (await PORTS.inputs)[ @@ -572,7 +594,7 @@ async def test_get_file_from_previous_node( ], ) async def test_get_file_from_previous_node_with_mapping_of_same_key_name( - create_2nodes_configuration: Callable, + create_2nodes_configuration: Callable[..., Awaitable[tuple[dict, str, str]]], user_id: int, project_id: str, node_uuid: NodeIDStr, @@ -585,8 +607,9 @@ async def test_get_file_from_previous_node_with_mapping_of_same_key_name( item_pytype: type, option_r_clone_settings: RCloneSettings | None, constant_uuid4: None, + default_db_manager: DBManager, ): - config_dict, _, this_node_uuid = create_2nodes_configuration( + config_dict, _, this_node_uuid = await create_2nodes_configuration( prev_node_inputs=None, prev_node_outputs=[("in_15", item_type, await create_store_link(item_value))], inputs=[("in_15", item_type, create_node_link("in_15"))], @@ -600,6 +623,7 @@ async def test_get_file_from_previous_node_with_mapping_of_same_key_name( project_id=project_id, node_uuid=node_uuid, r_clone_settings=option_r_clone_settings, + db_manager=default_db_manager, ) await check_config_valid(PORTS, config_dict) # add a filetokeymap @@ -635,7 +659,7 @@ async def test_get_file_from_previous_node_with_mapping_of_same_key_name( ], ) async def test_file_mapping( - create_special_configuration: Callable, + create_special_configuration: Callable[..., Awaitable[tuple[dict, str, str]]], user_id: int, project_id: str, node_uuid: NodeIDStr, @@ -649,8 +673,9 @@ async def test_file_mapping( option_r_clone_settings: RCloneSettings | None, create_valid_file_uuid: Callable[[str, Path], SimcoreS3FileID], constant_uuid4: None, + default_db_manager: DBManager, ): - config_dict, project_id, node_uuid = create_special_configuration( + config_dict, project_id, node_uuid = await create_special_configuration( inputs=[("in_1", item_type, await create_store_link(item_value))], outputs=[("out_1", item_type, None)], project_id=project_id, @@ -661,6 +686,7 @@ async def test_file_mapping( project_id=project_id, node_uuid=node_uuid, r_clone_settings=option_r_clone_settings, + db_manager=default_db_manager, ) await check_config_valid(PORTS, config_dict) # add a filetokeymap @@ -735,11 +761,12 @@ async def test_regression_concurrent_port_update_fails( user_id: int, project_id: str, node_uuid: NodeIDStr, - create_special_configuration: Callable, + create_special_configuration: Callable[..., Awaitable[tuple[dict, str, str]]], int_item_value: int, parallel_int_item_value: int, port_count: int, option_r_clone_settings: RCloneSettings | None, + default_db_manager: DBManager, ) -> None: """ when using `await PORTS.outputs` test will fail @@ -747,13 +774,14 @@ async def test_regression_concurrent_port_update_fails( """ outputs = [(f"value_{i}", "integer", None) for i in range(port_count)] - config_dict, _, _ = create_special_configuration(inputs=[], outputs=outputs) + config_dict, _, _ = await create_special_configuration(inputs=[], outputs=outputs) PORTS = await node_ports_v2.ports( user_id=user_id, project_id=project_id, node_uuid=node_uuid, r_clone_settings=option_r_clone_settings, + db_manager=default_db_manager, ) await check_config_valid(PORTS, config_dict) @@ -824,25 +852,29 @@ async def test_batch_update_inputs_outputs( user_id: int, project_id: str, node_uuid: NodeIDStr, - create_special_configuration: Callable, + create_special_configuration: Callable[..., Awaitable[tuple[dict, str, str]]], port_count: int, option_r_clone_settings: RCloneSettings | None, faker: Faker, output_callbacks: _Callbacks, spy_outputs_callbaks: dict[str, AsyncMock], use_output_callbacks: bool, + default_db_manager: DBManager, ) -> None: callbacks = output_callbacks if use_output_callbacks else None outputs = [(f"value_out_{i}", "integer", None) for i in range(port_count)] inputs = [(f"value_in_{i}", "integer", None) for i in range(port_count)] - config_dict, _, _ = create_special_configuration(inputs=inputs, outputs=outputs) + config_dict, _, _ = await create_special_configuration( + inputs=inputs, outputs=outputs + ) PORTS = await node_ports_v2.ports( user_id=user_id, project_id=project_id, node_uuid=node_uuid, r_clone_settings=option_r_clone_settings, + db_manager=default_db_manager, ) await check_config_valid(PORTS, config_dict) diff --git a/packages/simcore-sdk/tests/unit/conftest.py b/packages/simcore-sdk/tests/unit/conftest.py index 34cd932081cf..527e02d10038 100644 --- a/packages/simcore-sdk/tests/unit/conftest.py +++ b/packages/simcore-sdk/tests/unit/conftest.py @@ -3,7 +3,7 @@ # pylint:disable=redefined-outer-name import json -from collections.abc import AsyncIterator, Callable +from collections.abc import Callable from random import randint from typing import Any from uuid import uuid4 @@ -32,7 +32,8 @@ async def mock_db_manager( monkeypatch, project_id: str, node_uuid: str, -) -> AsyncIterator[Callable]: + mock_app_name: str, +) -> Callable[[dict[str, Any]], DBManager]: def _mock_db_manager(port_cfg: dict[str, Any]) -> DBManager: async def mock_get_ports_configuration_from_node_uuid(*args, **kwargs) -> str: return json.dumps(port_cfg) @@ -55,7 +56,6 @@ async def mock_write_ports_configuration( mock_write_ports_configuration, ) - db_manager = DBManager() - return db_manager + return DBManager(application_name=mock_app_name) return _mock_db_manager diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py b/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py index 250f9d2599d4..abd91d7b5910 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_v2_nodeports_v2.py @@ -3,8 +3,9 @@ # pylint:disable=redefined-outer-name # pylint:disable=protected-access +from collections.abc import Callable from pathlib import Path -from typing import Any, Callable +from typing import Any from unittest.mock import AsyncMock import pytest @@ -222,8 +223,7 @@ async def test_node_ports_v2_packages( node_uuid: str, ): db_manager = mock_db_manager(default_configuration) - node_ports = await ports(user_id, project_id, node_uuid) - node_ports = await ports(user_id, project_id, node_uuid, db_manager=db_manager) + await ports(user_id, project_id, node_uuid, db_manager=db_manager) @pytest.fixture diff --git a/requirements/base.Makefile b/requirements/base.Makefile index 35823f26d16c..a2ee7be1ddfd 100644 --- a/requirements/base.Makefile +++ b/requirements/base.Makefile @@ -15,7 +15,7 @@ UPGRADE_OPTION := $(if $(upgrade),--upgrade-package "$(upgrade)",$(DO_CLEAN_OR_U objects = $(sort $(wildcard *.in)) outputs := $(objects:.in=.txt) -reqs: $(outputs) ## pip-compiles all requirements/*.in -> requirements/*.txt; make reqs upgrade=foo will only upgrade package foo +reqs: $(outputs) ## pip-compiles all requirements/*.in -> requirements/*.txt; make reqs upgrade=foo will only upgrade package foo; make reqs startswith=pytest will upgrade packages starting with pytest touch: @$(foreach p,${objects},touch ${p};) @@ -36,6 +36,12 @@ help: ## this colorful help @echo "" @awk --posix 'BEGIN {FS = ":.*?## "} /^[[:alpha:][:space:]_-]+:.*?## / {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) @echo "" + @echo "Examples:" + @echo " make reqs # Upgrade all packages" + @echo " make reqs upgrade=pytest # Upgrade only pytest package" + @echo " make reqs startswith=pytest # Upgrade all packages starting with 'pytest'" + @echo " make reqs clean=1 # Clean and rebuild all requirements" + @echo "" # ------------------------------------------------------------------------------------------ @@ -44,10 +50,28 @@ help: ## this colorful help # extracting subsets of requiremenst like e.g _dask-distributed.* # %.txt: %.in - cd ..; \ - uv pip compile $(UPGRADE_OPTION) \ - --no-header \ - --output-file requirements/$@ requirements/$< + @if [ -n "$(startswith)" ]; then \ + MATCHING_PACKAGES=$$(grep '^$(startswith)' $@ 2>/dev/null | cut -d= -f1); \ + if [ -z "$$MATCHING_PACKAGES" ]; then \ + echo "No packages starting with '$(startswith)' found in $@. Skipping."; \ + exit 0; \ + fi; \ + STARTSWITH_UPGRADE=$$(echo "$$MATCHING_PACKAGES" | xargs -n1 echo --upgrade-package); \ + cd ..; \ + uv pip compile $$STARTSWITH_UPGRADE \ + --no-header \ + --output-file requirements/$@ requirements/$<; \ + elif [ -n "$(upgrade)" ]; then \ + cd ..; \ + uv pip compile --upgrade-package "$(upgrade)" \ + --no-header \ + --output-file requirements/$@ requirements/$<; \ + else \ + cd ..; \ + uv pip compile $(DO_CLEAN_OR_UPGRADE) \ + --no-header \ + --output-file requirements/$@ requirements/$<; \ + fi _test.txt: _base.txt diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 627c93b23ac6..b839fb276dcd 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -10,7 +10,6 @@ # Vulnerabilities ----------------------------------------------------------------------------------------- # aiohttp>=3.7.4, !=3.11.13 # https://github.com/advisories/GHSA-v6wp-4m6f-gcjg, 3.11.13 was yanked https://github.com/aio-libs/aiohttp/issues/10617 -aiohttp<3.12.0 # attempt to fix an issue with Content Length which could have been introduced in 3.12.x versions certifi>=2023.7.22 # https://github.com/advisories/GHSA-xqr8-7jwr-rhp7 cryptography>=41.0.6 # https://github.com/advisories/GHSA-v8gr-m533-ghj9 httpx>=0.23.0 # https://github.com/advisories/GHSA-h8pj-cxx2-jfg2 / CVE-2021-41945 @@ -51,6 +50,11 @@ httpx!=0.28.0 # Waiting for fix in respx: https://github.com/lundberg/respx/pul # pydantic>=2.10.0 # Avoids inter-version compatibility serialization errors as: _pickle.UnpicklingError: NEWOBJ class argument must be a type, not _AnnotatedAlias +# See issue https://github.com/ITISFoundation/osparc-simcore/issues/7300 +pydantic-settings<2.7.1 + +# avoid downgrades of openapi-spec-validator related libraries +referencing<=0.35.1 # @@ -67,9 +71,3 @@ pennsieve>=999999999 # User alternative e.g. parametrized fixture or request.getfixturevalue(.) pytest-lazy-fixture>=999999999 - -# avoid downgrades of openapi-spec-validator related libraries -referencing<=0.35.1 - -# See issue https://github.com/ITISFoundation/osparc-simcore/issues/7300 -pydantic-settings<2.7.1 diff --git a/requirements/how-to-upgrade-python.md b/requirements/how-to-upgrade-python.md index 51217f5f53b4..0b28fcad29f6 100644 --- a/requirements/how-to-upgrade-python.md +++ b/requirements/how-to-upgrade-python.md @@ -17,16 +17,7 @@ Both python and pip version are specified: ARG PYTHON_VERSION="3.9.12" FROM python:${PYTHON_VERSION}-slim-bookworm as base ``` -- in the CI ``.github/workflows/ci-testing-deploy.yml`` - ```yaml - jobs: - ... : - runs-on: ${{ matrix.os }} - strategy: - matrix: - python: ["3.9"] - ``` - and in ``ci/helpers/ensure_python_pip.bash`` +- in ``.python-version`` diff --git a/requirements/tools/Dockerfile b/requirements/tools/Dockerfile index e5fc9373dc99..534f47ce757f 100644 --- a/requirements/tools/Dockerfile +++ b/requirements/tools/Dockerfile @@ -9,7 +9,7 @@ # # ARG PYTHON_VERSION="3.11.9" -ARG UV_VERSION="0.6" +ARG UV_VERSION="0.7" FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build # we docker image is built based on debian FROM python:${PYTHON_VERSION}-slim-bookworm AS base @@ -31,10 +31,7 @@ COPY --from=uv_build /uv /uvx /bin/ RUN uv venv "${VIRTUAL_ENV}" -RUN --mount=type=cache,target=/root/.cache/uv \ - uv pip install --upgrade \ - wheel \ - setuptools + diff --git a/requirements/tools/Makefile b/requirements/tools/Makefile index ee094b80ef94..87e3afdcd4b6 100644 --- a/requirements/tools/Makefile +++ b/requirements/tools/Makefile @@ -24,6 +24,9 @@ SERVICES_DIR := $(abspath $(REPODIR)/services) IMAGE := local/requirements/tools UPGRADE_OPTION := $(if $(upgrade),upgrade=$(upgrade),) +STARTSWITH_OPTION := $(if $(startswith),startswith=$(startswith),) +UPGRADE_OR_STARTSWITH_OPTION := $(if $(upgrade),upgrade=$(upgrade),$(if $(startswith),startswith=$(startswith),)) +UPGRADE_OR_STARTSWITH_OR_CLEAN_OPTION := $(if $(upgrade),upgrade=$(upgrade),$(if $(startswith),startswith=$(startswith),$(if $(clean),clean=$(clean),))) # tools MAKE_C := $(MAKE) --directory @@ -70,20 +73,20 @@ touch: ## touches all package requirement inputs only-tools: ## upgrades tools repo wide # Upgrading ONLY _tools.in - @$(foreach p,${_tools-in},echo Touching $(p);touch $(p);$(MAKE_C) $(dir $(p)) reqs $(UPGRADE_OPTION);) + @$(foreach p,${_tools-in},echo Touching $(p);touch $(p);$(MAKE_C) $(dir $(p)) reqs $(UPGRADE_OR_STARTSWITH_OR_CLEAN_OPTION);) -reqs: ## updates test & tooling requirements - # Upgrading $(upgrade) requirements - @$(foreach p,${_target-inputs},echo Touching $(p);touch $(p);$(MAKE_C) $(dir $(p)) reqs $(UPGRADE_OPTION);) +reqs: guard-UPGRADE_OR_STARTSWITH_OR_CLEAN_OPTION ## updates test & tooling requirements + # Upgrading $(upgrade)$(startswith)$(if $(clean), cleaning) requirements + @$(foreach p,${_target-inputs},echo Touching $(p);touch $(p);$(MAKE_C) $(dir $(p)) reqs $(UPGRADE_OR_STARTSWITH_OR_CLEAN_OPTION);) -reqs-all: guard-UPGRADE_OPTION ## updates a given package repository-wise IN ALL `requirements/` folders (e.g. make reqs-all upgrade=foo==1.2.3 ) - # Upgrading $(upgrade) ALL requirements - @$(foreach p,${_all-in},echo Touching $(p);touch $(p);$(MAKE_C) $(dir $(p)) reqs $(UPGRADE_OPTION);) +reqs-all: guard-UPGRADE_OR_STARTSWITH_OR_CLEAN_OPTION ## updates a given package or pattern repository-wise IN ALL `requirements/` folders (e.g. make reqs-all upgrade=foo==1.2.3 or make reqs-all startswith=pytest or make reqs-all clean=1) + # Upgrading $(upgrade)$(startswith)$(if $(clean), cleaning) ALL requirements + @$(foreach p,${_all-in},echo Touching $(p);touch $(p);$(MAKE_C) $(dir $(p)) reqs $(UPGRADE_OR_STARTSWITH_OR_CLEAN_OPTION);) -reqs-services: guard-UPGRADE_OPTION ## updates a given package on all services [and not packages] (e.g. make reqs-services upgrade=foo==1.2.3 ) - # Upgrading $(upgrade) in services - @$(foreach p,${_services-in},echo Touching $(p);touch $(p);$(MAKE_C) $(dir $(p)) reqs $(UPGRADE_OPTION);) +reqs-services: guard-UPGRADE_OR_STARTSWITH_OR_CLEAN_OPTION ## updates a given package or pattern on all services [and not packages] (e.g. make reqs-services upgrade=foo==1.2.3 or make reqs-services startswith=pytest or make reqs-services clean=1) + # Upgrading $(upgrade)$(startswith)$(if $(clean), cleaning) in services + @$(foreach p,${_services-in},echo Touching $(p);touch $(p);$(MAKE_C) $(dir $(p)) reqs $(UPGRADE_OR_STARTSWITH_OR_CLEAN_OPTION);) reqs-ci: ## upgrades requirements for pylint recipe in CI @@ -112,7 +115,7 @@ run: build ## Runs upgrade in a container [WARNING! UNDER DEV. USE CAREFULY] --user=$(shell id -u):$(shell id -g) \ --entrypoint=/bin/bash \ ${IMAGE_NAME} \ - -c "cd requirements/tools; make reqs $(if $(upgrade),upgrade=$(upgrade),)" + -c "cd requirements/tools; make reqs $(UPGRADE_OR_STARTSWITH_OR_CLEAN_OPTION)" .PHONY: shell @@ -134,6 +137,21 @@ help: ## this colorful help @echo "" @awk --posix 'BEGIN {FS = ":.*?## "} /^[[:alpha:][:space:]_-]+:.*?## / {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) @echo "" + @echo "Examples:" + @echo " make reqs # Upgrade all test & tooling requirements" + @echo " make reqs upgrade=pytest # Upgrade only pytest package in test & tooling" + @echo " make reqs startswith=pytest # Upgrade all packages starting with 'pytest' in test & tooling" + @echo " make reqs clean=1 # Clean and rebuild all test & tooling requirements" + @echo "" + @echo " make reqs-all upgrade=fastapi # Upgrade fastapi in ALL requirements files" + @echo " make reqs-all startswith=pydantic # Upgrade all packages starting with pydantic repo-wide" + @echo " make reqs-all clean=1 # Clean and rebuild ALL requirements" + @echo "" + @echo " make reqs-services upgrade=uvicorn # Upgrade uvicorn only in services" + @echo " make reqs-services startswith=sqlalchemy # Upgrade all packages starting with sqlalchemy in services folder" + @echo "" + @echo " make only-tools upgrade=black # Upgrade black only in tools requirements" + @echo "" .PHONY: guard-% diff --git a/requirements/tools/check_changes.py b/requirements/tools/check_changes.py index e14425a740b3..234cafeed4b3 100644 --- a/requirements/tools/check_changes.py +++ b/requirements/tools/check_changes.py @@ -166,9 +166,11 @@ def main_changes_stats() -> None: f'{",".join(to_versions) if to_versions else "🗑️ removed":10s}', "|", # how big the version change is - f"{tag_upgrade(sorted(set(before[name]))[-1], sorted(set(after[name]))[-1]):10s}" - if to_versions - else "", + ( + f"{tag_upgrade(sorted(set(before[name]))[-1], sorted(set(after[name]))[-1]):10s}" + if to_versions + else "" + ), "|", counts[name], "|", @@ -279,7 +281,7 @@ def main() -> None: if args.changed_reqs: main_changes_stats() else: - repo_wide_changes(exclude={"*/director/*"}) + repo_wide_changes(exclude={"*/director/*", "*/.cache/uv/*"}) if __name__ == "__main__": diff --git a/scripts/common.Makefile b/scripts/common.Makefile index 0dc78b889dd7..dbaddbfce93e 100644 --- a/scripts/common.Makefile +++ b/scripts/common.Makefile @@ -173,6 +173,21 @@ mypy: $(REPO_BASE_DIR)/mypy.ini ## runs mypy python static type-checker on this $(CURDIR)/src +.PHONY: mypy-debug +mypy-debug: $(REPO_BASE_DIR)/mypy.ini ## runs mypy with profiling and reporting enabled + @rm -rf $(CURDIR)/.mypy-report + @mkdir -p $(CURDIR)/.mypy-report + @mypy \ + --config-file=$(REPO_BASE_DIR)/mypy.ini \ + --show-error-context \ + --show-traceback \ + --verbose \ + --linecount-report $(CURDIR)/.mypy-report \ + --any-exprs-report $(CURDIR)/.mypy-report \ + $(CURDIR)/src \ + | tee $(CURDIR)/.mypy-report/mypy.logs + + .PHONY: codestyle codestyle codestyle-ci: ## enforces codestyle (isort & black) finally runs pylint & mypy @$(SCRIPTS_DIR)/codestyle.bash $(if $(findstring -ci,$@),ci,development) $(shell basename "${SRC_DIR}") diff --git a/scripts/erd/Dockerfile b/scripts/erd/Dockerfile index 6991e52f8c0e..522b55e9101b 100644 --- a/scripts/erd/Dockerfile +++ b/scripts/erd/Dockerfile @@ -8,7 +8,7 @@ # ARG PYTHON_VERSION="3.11.9" -ARG UV_VERSION="0.6" +ARG UV_VERSION="0.7" FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build # we docker image is built based on debian FROM python:${PYTHON_VERSION}-slim-bookworm AS base @@ -28,10 +28,6 @@ RUN apt-get update \ # install UV https://docs.astral.sh/uv/guides/integration/docker/#installing-uv COPY --from=uv_build /uv /uvx /bin/ -RUN --mount=type=cache,target=/root/.cache/uv \ - uv pip install --upgrade \ - wheel \ - setuptools COPY requirements.txt . RUN --mount=type=cache,target=/root/.cache/uv \ diff --git a/scripts/maintenance/computational-clusters/autoscaled_monitor/core.py b/scripts/maintenance/computational-clusters/autoscaled_monitor/core.py index 4eff89026dd6..9f1919b8d33f 100755 --- a/scripts/maintenance/computational-clusters/autoscaled_monitor/core.py +++ b/scripts/maintenance/computational-clusters/autoscaled_monitor/core.py @@ -615,7 +615,11 @@ async def _cancel_all_jobs( the_cluster, dask_task.job_id, ) - if comp_task is not None and abort_in_db: + if ( + comp_task is not None + and comp_task.state not in ["FAILED", "SUCCESS", "ABORTED"] + and abort_in_db + ): await db.abort_job_in_db(state, comp_task.project_id, comp_task.node_id) rich.print("cancelled all tasks") diff --git a/scripts/maintenance/computational-clusters/autoscaled_monitor/db.py b/scripts/maintenance/computational-clusters/autoscaled_monitor/db.py index 14190934aa19..c266cdd3cd52 100644 --- a/scripts/maintenance/computational-clusters/autoscaled_monitor/db.py +++ b/scripts/maintenance/computational-clusters/autoscaled_monitor/db.py @@ -18,8 +18,11 @@ async def db_engine( state: AppState, ) -> AsyncGenerator[AsyncEngine, Any]: async with contextlib.AsyncExitStack() as stack: - assert state.environment["POSTGRES_ENDPOINT"] # nosec - db_endpoint = state.environment["POSTGRES_ENDPOINT"] + assert state.environment["POSTGRES_HOST"] # nosec + assert state.environment["POSTGRES_PORT"] # nosec + db_endpoint = ( + f"{state.environment['POSTGRES_HOST']}:{state.environment['POSTGRES_PORT']}" + ) if state.main_bastion_host: assert state.ssh_key_path # nosec db_host, db_port = db_endpoint.split(":") diff --git a/scripts/maintenance/migrate_project/Dockerfile b/scripts/maintenance/migrate_project/Dockerfile index 1092f6ca3b1e..31d120405ab3 100644 --- a/scripts/maintenance/migrate_project/Dockerfile +++ b/scripts/maintenance/migrate_project/Dockerfile @@ -1,5 +1,5 @@ # syntax=docker/dockerfile:1 -ARG UV_VERSION="0.6" +ARG UV_VERSION="0.7" FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build # we docker image is built based on debian FROM python:3.11.9-buster diff --git a/scripts/metrics/Makefile b/scripts/metrics/Makefile deleted file mode 100644 index 3453cfe3cfb6..000000000000 --- a/scripts/metrics/Makefile +++ /dev/null @@ -1,11 +0,0 @@ -.DEFAULT_GOAL := install - -SHELL := /bin/bash - -install: - # creating python virtual environment - @python3 -m venv .venv - # installing python dependencies - @.venv/bin/pip install --upgrade pip setuptools wheel - @.venv/bin/pip install -r requirements.txt - # activate the python virtual environment by running: ```source .venv/bin/activate``` diff --git a/scripts/metrics/compute_list_of_images_in_registry.py b/scripts/metrics/compute_list_of_images_in_registry.py deleted file mode 100755 index 518c7932b5f2..000000000000 --- a/scripts/metrics/compute_list_of_images_in_registry.py +++ /dev/null @@ -1,144 +0,0 @@ -#! /usr/bin/env python3 - -import asyncio -import json -from collections import defaultdict, deque -from datetime import date, datetime -from pathlib import Path -from pprint import pformat - -import typer -from httpx import URL, AsyncClient - -N = len("2020-10-09T12:28:14.7710") - - -async def get_repos(client): - r = await client.get( - "/_catalog", - ) - r.raise_for_status() - list_of_repositories = r.json()["repositories"] - typer.secho( - f"got the list of {len(list_of_repositories)} repositories from the registry" - ) - filtered_list_of_repositories = list( - filter( - lambda repo: repo.startswith("simcore/services/dynamic/") - or repo.startswith("simcore/services/comp/"), - list_of_repositories, - ) - ) - return filtered_list_of_repositories - - -async def list_images_in_registry( - endpoint: URL, - username: str, - password: str, - from_date: datetime | None, - to_date: datetime, -) -> dict[str, list[tuple[str, str, str, str]]]: - if not from_date: - from_date = datetime(year=2000, month=1, day=1) - typer.secho( - f"listing images from {from_date} to {to_date} from {endpoint}", - fg=typer.colors.YELLOW, - ) - - list_of_images_in_date_range = defaultdict(list) - - async with AsyncClient( - base_url=endpoint.join("v2"), auth=(username, password), http2=True - ) as client: - list_of_repositories = await get_repos(client) - - with typer.progressbar( - list_of_repositories, label="Processing repositories" - ) as progress: - for repo in progress: - r = await client.get(f"/{repo}/tags/list") - r.raise_for_status() - list_of_tags = [tag for tag in r.json()["tags"] if tag != "latest"] - - # we go in reverse order, so the first that does not go in the date range will stop the loop - for tag in reversed(list_of_tags): - r = await client.get(f"/{repo}/manifests/{tag}") - r.raise_for_status() - manifest = r.json() - # manifest[history] contains all the blobs, taking the latest one corresponds to the image creation date - history = manifest["history"] - tag_creation_dates = deque() - for blob in history: - v1_comp = json.loads(blob["v1Compatibility"]) - tag_creation_dates.append( - datetime.strptime( - v1_comp["created"][:N], "%Y-%m-%dT%H:%M:%S.%f" - ) - ) - tag_last_date = sorted(tag_creation_dates)[-1] - # check this service is in the time range - if tag_last_date < from_date or tag_last_date > to_date: - break - - # get the image labels from the last blob (same as director does) - v1_comp = json.loads(history[0]["v1Compatibility"]) - container_config = v1_comp.get( - "container_config", v1_comp["config"] - ) - - simcore_labels = {} - for label_key, label_value in container_config["Labels"].items(): - if label_key.startswith("io.simcore"): - simcore_labels.update(json.loads(label_value)) - - list_of_images_in_date_range[repo].append( - ( - tag, - simcore_labels["name"], - simcore_labels["description"], - simcore_labels["type"], - ) - ) - typer.secho( - f"Completed. Found {len(list_of_images_in_date_range)} created between {from_date} and {to_date}", - fg=typer.colors.YELLOW, - ) - typer.secho(f"{pformat(list_of_images_in_date_range)}") - - return list_of_images_in_date_range - - -def main( - endpoint: str, - username: str, - password: str = typer.Option(..., prompt=True, hide_input=True), - from_date: datetime | None = typer.Option(None, formats=["%Y-%m-%d"]), - to_date: datetime = typer.Option(f"{date.today()}", formats=["%Y-%m-%d"]), - markdown: bool = typer.Option(False), -): - endpoint_url = URL(endpoint) - list_of_images: dict[str, list[tuple[str, str, str, str]]] = asyncio.run( - list_images_in_registry(endpoint_url, username, password, from_date, to_date) - ) - - if markdown: - output_file = Path.cwd() / f"{endpoint_url.host}.md" - with output_file.open("w") as fp: - fp.write( - f"# {endpoint_url.host}: Services added between {from_date} and {to_date}\n\n" - ) - fp.write("| Service | Version(s) | Name | Description | Type |\n") - fp.write("| ------- | ---------- | ---- | ----------- | ---- |\n") - for repo, repo_details in list_of_images.items(): - for index, (version, name, description, service_type) in enumerate( - repo_details - ): - filtered_description = description.strip().replace("\n", "") - fp.write( - f"| {repo if index == 0 else ''} | {version} | {name if index == 0 else ''} | {filtered_description if index == 0 else ''} | {('Dynamic service' if service_type == 'dynamic' else 'Computational service') if index == 0 else ''} |\n" - ) - - -if __name__ == "__main__": - typer.run(main) diff --git a/scripts/metrics/requirements.txt b/scripts/metrics/requirements.txt deleted file mode 100644 index c843a6494bf3..000000000000 --- a/scripts/metrics/requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -black -httpx[http2] -pydantic[email,dotenv] -pylint -typer[all] diff --git a/scripts/openapi/oas_resolver/Dockerfile b/scripts/openapi/oas_resolver/Dockerfile index 92c1113ab657..a0fb9f7232a3 100644 --- a/scripts/openapi/oas_resolver/Dockerfile +++ b/scripts/openapi/oas_resolver/Dockerfile @@ -2,7 +2,7 @@ # Usage: # docker build . -t oas_resolver # docker run -v /path/to/api:/input -v /path/to/compiled/file:/output oas_resolver /input/path/to/openapi.yaml /output/output_file.yaml -ARG UV_VERSION="0.6" +ARG UV_VERSION="0.7" FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build # we docker image is built based on debian FROM python:3.6-alpine @@ -17,11 +17,6 @@ WORKDIR /src # install UV https://docs.astral.sh/uv/guides/integration/docker/#installing-uv COPY --from=uv_build /uv /uvx /bin/ -# update pip -RUN --mount=type=cache,target=/root/.cache/uv \ - uv pip install --upgrade \ - wheel \ - setuptools RUN --mount=type=cache,target=/root/.cache/uv \ uv pip install prance click openapi_spec_validator diff --git a/scripts/pydeps-docker/Dockerfile b/scripts/pydeps-docker/Dockerfile index 401a57c38eb1..5ebd38a61cf7 100644 --- a/scripts/pydeps-docker/Dockerfile +++ b/scripts/pydeps-docker/Dockerfile @@ -9,7 +9,7 @@ # # ARG PYTHON_VERSION="3.11.9" -ARG UV_VERSION="0.6" +ARG UV_VERSION="0.7" FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build # we docker image is built based on debian FROM python:${PYTHON_VERSION}-slim-bookworm AS base @@ -33,11 +33,6 @@ ARG HOME_DIR RUN mkdir -p ${HOME_DIR} COPY .pydeps ${HOME_DIR}/.pydeps -RUN --mount=type=cache,target=/root/.cache/uv \ - uv pip install --upgrade \ - wheel \ - setuptools - # devenv RUN --mount=type=cache,target=/root/.cache/uv \ diff --git a/scripts/pyupgrade.bash b/scripts/pyupgrade.bash index 4423a8583ee6..3428b60efff6 100755 --- a/scripts/pyupgrade.bash +++ b/scripts/pyupgrade.bash @@ -19,12 +19,8 @@ Build() { --tag "$IMAGE_NAME" \ - <=0.9", "rich", "requests"] +dependencies = ["arrow", "python-dotenv","pydantic", "pydantic-settings", "typer>=0.9", "rich", "requests"] requires-python = ">=3.10" [project.scripts] diff --git a/services/agent/Dockerfile b/services/agent/Dockerfile index ff0658cfcc75..f2073066cc17 100644 --- a/services/agent/Dockerfile +++ b/services/agent/Dockerfile @@ -2,7 +2,7 @@ # Define arguments in the global scope ARG PYTHON_VERSION="3.11.9" -ARG UV_VERSION="0.6" +ARG UV_VERSION="0.7" FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build @@ -33,6 +33,7 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=private \ set -eux && \ apt-get update && \ apt-get install -y --no-install-recommends \ + fd-find \ gosu \ curl \ && apt-get clean -y \ @@ -96,10 +97,7 @@ RUN uv venv "${VIRTUAL_ENV}" -RUN --mount=type=cache,target=/root/.cache/uv \ - uv pip install --upgrade \ - wheel \ - setuptools + WORKDIR /build @@ -116,6 +114,9 @@ WORKDIR /build FROM build AS prod-only-deps ENV SC_BUILD_TARGET=prod-only-deps +# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode +ENV UV_COMPILE_BYTECODE=1 \ + UV_LINK_MODE=copy WORKDIR /build/services/agent @@ -141,8 +142,6 @@ ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production ENV PYTHONOPTIMIZE=TRUE -# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode -ENV UV_COMPILE_BYTECODE=1 WORKDIR /home/scu diff --git a/services/agent/docker/boot.sh b/services/agent/docker/boot.sh index 5cc8f9f5aad6..3b502cd95747 100755 --- a/services/agent/docker/boot.sh +++ b/services/agent/docker/boot.sh @@ -24,7 +24,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/agent - uv pip --quiet sync requirements/dev.txt + uv pip --quiet sync --link-mode=copy requirements/dev.txt cd - echo "$INFO" "PIP :" uv pip list @@ -33,7 +33,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy if command -v uv >/dev/null 2>&1; then - uv pip install debugpy + uv pip install --link-mode=copy debugpy else pip install debugpy fi @@ -48,20 +48,22 @@ SERVER_LOG_LEVEL=$(echo "${APP_LOG_LEVEL}" | tr '[:upper:]' '[:lower:]') echo "$INFO" "Log-level app/server: $APP_LOG_LEVEL/$SERVER_LOG_LEVEL" if [ "${SC_BOOT_MODE}" = "debug" ]; then - reload_dir_packages=$(find /devel/packages -maxdepth 3 -type d -path "*/src/*" ! -path "*.*" -exec echo '--reload-dir {} \' \;) - + reload_dir_packages=$(fdfind src /devel/packages --exec echo '--reload-dir {} ' | tr '\n' ' ') exec sh -c " cd services/agent/src/simcore_service_agent && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${AGENT_SERVER_REMOTE_DEBUG_PORT} -m uvicorn main:the_app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${AGENT_SERVER_REMOTE_DEBUG_PORT} -m \ + uvicorn \ + --factory main:app_factory \ --host 0.0.0.0 \ --port 8000 \ --reload \ - $reload_dir_packages + $reload_dir_packages \ --reload-dir . \ --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_agent.main:the_app \ + exec uvicorn \ + --factory simcore_service_agent.main:app_factory \ --host 0.0.0.0 \ --port 8000 \ --log-level "${SERVER_LOG_LEVEL}" \ diff --git a/services/agent/docker/entrypoint.sh b/services/agent/docker/entrypoint.sh index e89ad5408a31..a319c6824d73 100755 --- a/services/agent/docker/entrypoint.sh +++ b/services/agent/docker/entrypoint.sh @@ -19,6 +19,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" # # DEVELOPMENT MODE @@ -56,10 +57,9 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME" echo "$INFO" "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \; - # change user property of files already around + fdfind --owner ":$SC_USER_ID" --exclude proc --exec-batch chgrp --no-dereference "$CONT_GROUPNAME" . '/' echo "$INFO" "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \; + fdfind --owner "$SC_USER_ID:" --exclude proc --exec-batch chown --no-dereference "$SC_USER_NAME" . '/' fi fi diff --git a/services/agent/requirements/_base.txt b/services/agent/requirements/_base.txt index b3fe3dfecfbc..da576d1b4fbe 100644 --- a/services/agent/requirements/_base.txt +++ b/services/agent/requirements/_base.txt @@ -12,7 +12,7 @@ aiofiles==24.1.0 # via -r requirements/../../../packages/service-library/requirements/_base.in aiohappyeyeballs==2.5.0 # via aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -45,6 +45,8 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi attrs==25.1.0 @@ -69,19 +71,14 @@ certifi==2025.1.31 # httpcore # httpx # requests + # sentry-sdk charset-normalizer==3.4.1 # via requests -click==8.1.8 +click==8.2.1 # via # rich-toolkit # typer # uvicorn -deprecated==1.2.18 - # via - # opentelemetry-api - # opentelemetry-exporter-otlp-proto-grpc - # opentelemetry-exporter-otlp-proto-http - # opentelemetry-semantic-conventions dnspython==2.7.0 # via email-validator email-validator==2.2.0 @@ -92,12 +89,14 @@ exceptiongroup==1.2.2 # via aio-pika fast-depends==2.4.12 # via faststream -fastapi==0.115.12 +fastapi==0.116.1 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi-lifespan-manager -fastapi-cli==0.0.7 +fastapi-cli==0.0.8 # via fastapi +fastapi-cloud-cli==0.1.5 + # via fastapi-cli fastapi-lifespan-manager==0.1.4 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in faststream==0.5.35 @@ -106,13 +105,13 @@ frozenlist==1.5.0 # via # aiohttp # aiosignal -googleapis-common-protos==1.69.1 +googleapis-common-protos==1.70.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http grpcio==1.70.0 # via opentelemetry-exporter-otlp-proto-grpc -h11==0.14.0 +h11==0.16.0 # via # httpcore # uvicorn @@ -120,7 +119,7 @@ h2==4.2.0 # via httpx hpack==4.1.0 # via h2 -httpcore==1.0.7 +httpcore==1.0.9 # via httpx httptools==0.6.4 # via uvicorn @@ -140,6 +139,7 @@ httpx==0.28.1 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi + # fastapi-cloud-cli hyperframe==6.1.0 # via h2 idna==3.10 @@ -166,6 +166,10 @@ jinja2==3.1.6 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # fastapi +jsonref==1.1.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema==4.23.0 # via # -r requirements/../../../packages/models-library/requirements/_base.in @@ -182,7 +186,7 @@ multidict==6.1.0 # via # aiohttp # yarl -opentelemetry-api==1.30.0 +opentelemetry-api==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc @@ -190,6 +194,7 @@ opentelemetry-api==1.30.0 # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-logging @@ -197,59 +202,63 @@ opentelemetry-api==1.30.0 # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.30.0 +opentelemetry-exporter-otlp==1.34.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.30.0 +opentelemetry-exporter-otlp-proto-common==1.34.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.30.0 +opentelemetry-exporter-otlp-proto-grpc==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.30.0 +opentelemetry-exporter-otlp-proto-http==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.51b0 +opentelemetry-instrumentation==0.55b1 # via # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aio-pika==0.51b0 +opentelemetry-instrumentation-aio-pika==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-asgi==0.51b0 +opentelemetry-instrumentation-asgi==0.55b1 # via opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-fastapi==0.51b0 +opentelemetry-instrumentation-asyncpg==0.55b1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-fastapi==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-httpx==0.51b0 +opentelemetry-instrumentation-httpx==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-logging==0.51b0 +opentelemetry-instrumentation-logging==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.51b0 +opentelemetry-instrumentation-redis==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.51b0 +opentelemetry-instrumentation-requests==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.30.0 +opentelemetry-proto==1.34.1 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.30.0 +opentelemetry-sdk==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.51b0 +opentelemetry-semantic-conventions==0.55b1 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.51b0 +opentelemetry-util-http==0.55b1 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi @@ -289,7 +298,7 @@ propcache==0.3.0 # via # aiohttp # yarl -protobuf==5.29.3 +protobuf==5.29.5 # via # googleapis-common-protos # opentelemetry-proto @@ -297,7 +306,7 @@ psutil==7.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in pycryptodome==3.21.0 # via stream-zip -pydantic==2.10.6 +pydantic==2.11.7 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -325,11 +334,12 @@ pydantic==2.10.6 # -r requirements/_base.in # fast-depends # fastapi + # fastapi-cloud-cli # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.2 +pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.10.2 +pydantic-extra-types==2.10.5 # via # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -416,27 +426,35 @@ referencing==0.35.1 # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications -requests==2.32.3 +requests==2.32.4 # via opentelemetry-exporter-otlp-proto-http -rich==13.9.4 +rich==14.1.0 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # rich-toolkit # typer -rich-toolkit==0.14.7 - # via fastapi-cli +rich-toolkit==0.15.0 + # via + # fastapi-cli + # fastapi-cloud-cli +rignore==0.6.4 + # via fastapi-cloud-cli rpds-py==0.23.1 # via # jsonschema # referencing +sentry-sdk==2.35.0 + # via fastapi-cloud-cli shellingham==1.5.4 # via typer six==1.17.0 # via python-dateutil sniffio==1.3.1 - # via anyio -starlette==0.46.0 + # via + # anyio + # asgi-lifespan +starlette==0.47.2 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -459,26 +477,35 @@ toolz==1.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in tqdm==4.67.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.15.2 +typer==0.16.1 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # fastapi-cli + # fastapi-cloud-cli types-python-dateutil==2.9.0.20241206 # via arrow -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # aiodebug # anyio # fastapi # faststream + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http # opentelemetry-sdk + # opentelemetry-semantic-conventions # pydantic # pydantic-core # pydantic-extra-types # rich-toolkit + # starlette # typer -urllib3==2.3.0 + # typing-inspection +typing-inspection==0.4.1 + # via pydantic +urllib3==2.5.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -493,10 +520,12 @@ urllib3==2.3.0 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # requests + # sentry-sdk uvicorn==0.34.2 # via # fastapi # fastapi-cli + # fastapi-cloud-cli uvloop==0.21.0 # via uvicorn watchfiles==1.0.5 @@ -505,7 +534,6 @@ websockets==15.0.1 # via uvicorn wrapt==1.17.2 # via - # deprecated # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-httpx diff --git a/services/agent/requirements/_test.txt b/services/agent/requirements/_test.txt index a0c0ea3b114c..063dda189f27 100644 --- a/services/agent/requirements/_test.txt +++ b/services/agent/requirements/_test.txt @@ -10,7 +10,7 @@ aiohappyeyeballs==2.5.0 # via # -c requirements/_base.txt # aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -32,7 +32,9 @@ anyio==4.8.0 # -c requirements/_base.txt # httpx asgi-lifespan==2.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in attrs==25.1.0 # via # -c requirements/_base.txt @@ -72,7 +74,7 @@ charset-normalizer==3.4.1 # via # -c requirements/_base.txt # requests -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # flask @@ -93,7 +95,7 @@ flask==3.1.0 # via # flask-cors # moto -flask-cors==5.0.1 +flask-cors==6.0.1 # via moto frozenlist==1.5.0 # via @@ -102,11 +104,11 @@ frozenlist==1.5.0 # aiosignal graphql-core==3.2.6 # via moto -h11==0.14.0 +h11==0.16.0 # via # -c requirements/_base.txt # httpcore -httpcore==1.0.7 +httpcore==1.0.9 # via # -c requirements/_base.txt # httpx @@ -188,7 +190,9 @@ packaging==24.2 pathable==0.4.4 # via jsonschema-path pluggy==1.5.0 - # via pytest + # via + # pytest + # pytest-cov ply==3.11 # via jsonpath-ng propcache==0.3.0 @@ -200,28 +204,32 @@ py-partiql-parser==0.6.1 # via moto pycparser==2.22 # via cffi -pydantic==2.10.6 +pydantic==2.11.7 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator -pydantic-core==2.27.2 +pydantic-core==2.33.2 # via # -c requirements/_base.txt # pydantic +pygments==2.19.1 + # via + # -c requirements/_base.txt + # pytest pyparsing==3.2.1 # via moto -pytest==8.3.5 +pytest==8.4.1 # via # -r requirements/_test.in # pytest-asyncio # pytest-cov # pytest-mock -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via -r requirements/_test.in -pytest-cov==6.0.0 +pytest-cov==6.2.1 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in @@ -252,7 +260,7 @@ referencing==0.35.1 # jsonschema-specifications regex==2024.11.6 # via cfn-lint -requests==2.32.3 +requests==2.32.4 # via # -c requirements/_base.txt # docker @@ -270,7 +278,7 @@ rpds-py==0.23.1 # referencing s3transfer==0.11.3 # via boto3 -setuptools==75.8.2 +setuptools==80.9.0 # via moto six==1.17.0 # via @@ -284,7 +292,7 @@ sniffio==1.3.1 # asgi-lifespan sympy==1.13.3 # via cfn-lint -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # anyio @@ -292,9 +300,14 @@ typing-extensions==4.12.2 # cfn-lint # pydantic # pydantic-core + # typing-inspection +typing-inspection==0.4.1 + # via + # -c requirements/_base.txt + # pydantic tzdata==2025.1 # via faker -urllib3==2.3.0 +urllib3==2.5.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt diff --git a/services/agent/requirements/_tools.txt b/services/agent/requirements/_tools.txt index 70694d84d7ba..3a03174fcf52 100644 --- a/services/agent/requirements/_tools.txt +++ b/services/agent/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # -c requirements/_test.txt @@ -28,9 +28,9 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via -r requirements/../../../requirements/devenv.txt -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via # black # mypy @@ -43,7 +43,9 @@ packaging==24.2 # black # build pathspec==0.12.1 - # via black + # via + # black + # mypy pip==25.0.1 # via pip-tools pip-tools==7.4.1 @@ -69,13 +71,13 @@ pyyaml==6.0.2 # pre-commit ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.2 +setuptools==80.9.0 # via # -c requirements/_test.txt # pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # -c requirements/_test.txt diff --git a/services/agent/src/simcore_service_agent/core/application.py b/services/agent/src/simcore_service_agent/core/application.py index 442c4649c626..e7972c1042a1 100644 --- a/services/agent/src/simcore_service_agent/core/application.py +++ b/services/agent/src/simcore_service_agent/core/application.py @@ -1,5 +1,6 @@ import logging +from common_library.json_serialization import json_dumps from fastapi import FastAPI from servicelib.fastapi.openapi import ( get_common_oas_options, @@ -9,7 +10,6 @@ initialize_fastapi_app_tracing, setup_tracing, ) -from servicelib.logging_utils import config_all_loggers from .._meta import ( API_VTAG, @@ -27,24 +27,16 @@ from ..services.volumes_manager import setup_volume_manager from .settings import ApplicationSettings -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) -def _setup_logger(settings: ApplicationSettings): - # SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 - logging.basicConfig(level=settings.LOG_LEVEL.value) # NOSONAR - logging.root.setLevel(settings.LOG_LEVEL.value) - config_all_loggers( - log_format_local_dev_enabled=settings.AGENT_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=settings.AGENT_VOLUMES_LOG_FILTER_MAPPING, - tracing_settings=settings.AGENT_TRACING, - ) - - -def create_app() -> FastAPI: - settings = ApplicationSettings.create_from_envs() - _setup_logger(settings) - logger.debug(settings.model_dump_json(indent=2)) +def create_app(settings: ApplicationSettings | None = None) -> FastAPI: + if settings is None: + settings = ApplicationSettings.create_from_envs() + _logger.info( + "Application settings: %s", + json_dumps(settings, indent=2, sort_keys=True), + ) assert settings.SC_BOOT_MODE # nosec app = FastAPI( diff --git a/services/agent/src/simcore_service_agent/core/settings.py b/services/agent/src/simcore_service_agent/core/settings.py index d11b286f065f..edec12db2a57 100644 --- a/services/agent/src/simcore_service_agent/core/settings.py +++ b/services/agent/src/simcore_service_agent/core/settings.py @@ -2,10 +2,10 @@ from typing import Annotated from common_library.basic_types import DEFAULT_FACTORY +from common_library.logging.logging_utils_filtering import LoggerName, MessageSubstring from models_library.basic_types import BootModeEnum, LogLevel from models_library.docker import DockerNodeID from pydantic import AliasChoices, AnyHttpUrl, Field, field_validator -from servicelib.logging_utils_filtering import LoggerName, MessageSubstring from settings_library.base import BaseCustomSettings from settings_library.r_clone import S3Provider from settings_library.rabbit import RabbitSettings diff --git a/services/agent/src/simcore_service_agent/main.py b/services/agent/src/simcore_service_agent/main.py index a16db0c3d527..1af4eb695c24 100644 --- a/services/agent/src/simcore_service_agent/main.py +++ b/services/agent/src/simcore_service_agent/main.py @@ -1,3 +1,36 @@ +import logging +from typing import Final + +from common_library.json_serialization import json_dumps +from fastapi import FastAPI +from servicelib.fastapi.logging_lifespan import create_logging_shutdown_event from simcore_service_agent.core.application import create_app +from simcore_service_agent.core.settings import ApplicationSettings + +_logger = logging.getLogger(__name__) + +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aio_pika", + "aiormq", + "httpcore", + "httpx", +) + + +def app_factory() -> FastAPI: + app_settings = ApplicationSettings.create_from_envs() + logging_shutdown_event = create_logging_shutdown_event( + log_format_local_dev_enabled=app_settings.AGENT_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.AGENT_VOLUMES_LOG_FILTER_MAPPING, + tracing_settings=app_settings.AGENT_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) -the_app = create_app() + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + app = create_app(settings=app_settings) + app.add_event_handler("shutdown", logging_shutdown_event) + return app diff --git a/services/agent/src/simcore_service_agent/services/backup.py b/services/agent/src/simcore_service_agent/services/backup.py index a7e125af0c42..0e1a9b00bcac 100644 --- a/services/agent/src/simcore_service_agent/services/backup.py +++ b/services/agent/src/simcore_service_agent/services/backup.py @@ -1,18 +1,25 @@ import asyncio +import json import logging +import socket import tempfile from asyncio.streams import StreamReader +from datetime import timedelta from pathlib import Path from textwrap import dedent from typing import Final from uuid import uuid4 +import httpx from fastapi import FastAPI +from servicelib.container_utils import run_command_in_container from settings_library.utils_r_clone import resolve_provider from ..core.settings import ApplicationSettings from ..models.volumes import DynamicServiceVolumeLabels, VolumeDetails +_TIMEOUT_PERMISSION_CHANGES: Final[timedelta] = timedelta(minutes=5) + _logger = logging.getLogger(__name__) @@ -107,6 +114,35 @@ def _log_expected_operation( _logger.log(log_level, formatted_message) +def _get_self_container_ip() -> str: + return socket.gethostbyname(socket.gethostname()) + + +async def _get_self_container() -> str: + ip = _get_self_container_ip() + + async with httpx.AsyncClient( + transport=httpx.AsyncHTTPTransport(uds="/var/run/docker.sock") + ) as client: + response = await client.get("http://localhost/containers/json") + for entry in response.json(): + if ip in json.dumps(entry): + container_id: str = entry["Id"] + return container_id + + msg = "Could not determine self container ID" + raise RuntimeError(msg) + + +async def _ensure_permissions_on_source_dir(source_dir: Path) -> None: + self_container = await _get_self_container() + await run_command_in_container( + self_container, + command=f"chmod -R o+rX '{source_dir}'", + timeout=_TIMEOUT_PERMISSION_CHANGES.total_seconds(), + ) + + async def _store_in_s3( settings: ApplicationSettings, volume_name: str, volume_details: VolumeDetails ) -> None: @@ -148,6 +184,8 @@ async def _store_in_s3( volume_details.labels, s3_path, r_clone_ls_output, volume_name ) + await _ensure_permissions_on_source_dir(source_dir) + # sync files via rclone r_clone_sync = [ "rclone", diff --git a/services/agent/src/simcore_service_agent/services/volumes_manager.py b/services/agent/src/simcore_service_agent/services/volumes_manager.py index 860ab86d0e21..1ef6ef1d0cbd 100644 --- a/services/agent/src/simcore_service_agent/services/volumes_manager.py +++ b/services/agent/src/simcore_service_agent/services/volumes_manager.py @@ -6,10 +6,10 @@ import arrow from aiodocker.docker import Docker +from common_library.async_tools import cancel_wait_task from fastapi import FastAPI from models_library.projects_nodes_io import NodeID from pydantic import NonNegativeFloat -from servicelib.async_utils import cancel_wait_task from servicelib.background_task import create_periodic_task from servicelib.fastapi.app_state import SingletonInAppStateMixin from servicelib.logging_utils import log_context diff --git a/services/agent/tests/conftest.py b/services/agent/tests/conftest.py index 97df58d4e5a7..8213e84ad47d 100644 --- a/services/agent/tests/conftest.py +++ b/services/agent/tests/conftest.py @@ -12,9 +12,11 @@ from settings_library.r_clone import S3Provider pytest_plugins = [ + "pytest_simcore.asyncio_event_loops", "pytest_simcore.aws_server", "pytest_simcore.docker_compose", "pytest_simcore.docker_swarm", + "pytest_simcore.logging", "pytest_simcore.rabbit_service", "pytest_simcore.repository_paths", ] diff --git a/services/agent/tests/unit/test_core_settings.py b/services/agent/tests/unit/test_core_settings.py new file mode 100644 index 000000000000..7e3fb5b5d2d4 --- /dev/null +++ b/services/agent/tests/unit/test_core_settings.py @@ -0,0 +1,18 @@ +# pylint: disable=unused-variable +# pylint: disable=unused-argument +# pylint: disable=redefined-outer-name + + +from pytest_simcore.helpers.monkeypatch_envs import ( + EnvVarsDict, +) +from simcore_service_agent.core.settings import ApplicationSettings + + +def test_valid_application_settings(mock_environment: EnvVarsDict): + assert mock_environment + + settings = ApplicationSettings() # type: ignore + assert settings + + assert settings == ApplicationSettings.create_from_envs() diff --git a/services/agent/tests/unit/test_services_backup.py b/services/agent/tests/unit/test_services_backup.py index d544a25dfa5e..2d73dd80fb17 100644 --- a/services/agent/tests/unit/test_services_backup.py +++ b/services/agent/tests/unit/test_services_backup.py @@ -1,18 +1,22 @@ # pylint: disable=redefined-outer-name +# pylint: disable=unused-argument import asyncio -from collections.abc import Awaitable, Callable +from collections.abc import AsyncIterable, Awaitable, Callable from pathlib import Path from typing import Final from uuid import uuid4 import aioboto3 +import aiodocker import pytest from fastapi import FastAPI from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.services_types import ServiceRunID from pydantic import NonNegativeInt +from pytest_mock import MockerFixture +from servicelib.container_utils import run_command_in_container from simcore_service_agent.core.settings import ApplicationSettings from simcore_service_agent.services.backup import backup_volume from simcore_service_agent.services.docker_utils import get_volume_details @@ -37,6 +41,28 @@ def volume_content(tmpdir: Path) -> Path: return path +@pytest.fixture +async def mock_container_with_data( + volume_content: Path, monkeypatch: pytest.MonkeyPatch +) -> AsyncIterable[str]: + async with aiodocker.Docker() as client: + container = await client.containers.run( + config={ + "Image": "alpine:latest", + "Cmd": ["/bin/ash", "-c", "sleep 10000"], + "HostConfig": {"Binds": [f"{volume_content}:{volume_content}:rw"]}, + } + ) + container_inspect = await container.show() + + container_name = container_inspect["Name"][1:] + monkeypatch.setenv("HOSTNAME", container_name) + + yield container_inspect["Id"] + + await container.delete(force=True) + + @pytest.fixture def downlaoded_from_s3(tmpdir: Path) -> Path: path = Path(tmpdir) / "downloaded_from_s3" @@ -44,7 +70,24 @@ def downlaoded_from_s3(tmpdir: Path) -> Path: return path +@pytest.fixture +async def mock__get_self_container_ip( + mock_container_with_data: str, + mocker: MockerFixture, +) -> None: + container_ip = await run_command_in_container( + mock_container_with_data, command="hostname -i" + ) + + mocker.patch( + "simcore_service_agent.services.backup._get_self_container_ip", + return_value=container_ip.strip(), + ) + + async def test_backup_volume( + mock_container_with_data: str, + mock__get_self_container_ip: None, volume_content: Path, project_id: ProjectID, swarm_stack_name: str, diff --git a/services/api-server/.env-devel b/services/api-server/.env-devel index 29d4830d47fb..a18401e3a5bd 100644 --- a/services/api-server/.env-devel +++ b/services/api-server/.env-devel @@ -28,9 +28,12 @@ POSTGRES_PASSWORD=test POSTGRES_DB=test POSTGRES_HOST=127.0.0.1 -# Enables debug -SC_BOOT_MODE=debug - +# rabbit +RABBIT_HOST=rabbit +RABBIT_PASSWORD=adminadmin +RABBIT_PORT=5672 +RABBIT_SECURE=false +RABBIT_USER=admin # webserver WEBSERVER_HOST=webserver diff --git a/services/api-server/Dockerfile b/services/api-server/Dockerfile index 99ef5272ec9b..9f9b0da600af 100644 --- a/services/api-server/Dockerfile +++ b/services/api-server/Dockerfile @@ -2,7 +2,7 @@ # Define arguments in the global scope ARG PYTHON_VERSION="3.11.9" -ARG UV_VERSION="0.6" +ARG UV_VERSION="0.7" FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build FROM python:${PYTHON_VERSION}-slim-bookworm AS base-arm64 @@ -30,6 +30,7 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=private \ set -eux && \ apt-get update && \ apt-get install -y --no-install-recommends \ + fd-find \ gosu \ && apt-get clean -y \ && rm -rf /var/lib/apt/lists/* \ @@ -89,10 +90,7 @@ RUN uv venv "${VIRTUAL_ENV}" -RUN --mount=type=cache,target=/root/.cache/uv \ - uv pip install --upgrade \ - wheel \ - setuptools + WORKDIR /build @@ -109,6 +107,9 @@ WORKDIR /build FROM build AS prod-only-deps ENV SC_BUILD_TARGET=prod-only-deps +# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode +ENV UV_COMPILE_BYTECODE=1 \ + UV_LINK_MODE=copy WORKDIR /build/services/api-server @@ -135,8 +136,6 @@ ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production ENV PYTHONOPTIMIZE=TRUE -# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode -ENV UV_COMPILE_BYTECODE=1 WORKDIR /home/scu diff --git a/services/api-server/Makefile b/services/api-server/Makefile index e923de11db89..555c88f6ec37 100644 --- a/services/api-server/Makefile +++ b/services/api-server/Makefile @@ -28,9 +28,11 @@ reqs: ## compiles pip requirements (.in -> .txt) define _create_and_validate_openapi # generating openapi specs file under $< (NOTE: Skips DEV FEATURES since this OAS is the 'offically released'!) - @source .env; \ + set -o allexport; \ + source .env; \ + set +o allexport; \ export API_SERVER_DEV_FEATURES_ENABLED=$1; \ - python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(the_app.openapi(), indent=2) )" > $@ + python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(app_factory().openapi(), indent=2) )" > $@ # validates OAS file: $@ docker run --rm \ diff --git a/services/api-server/VERSION b/services/api-server/VERSION index ac39a106c485..54d1a4f2a4a7 100644 --- a/services/api-server/VERSION +++ b/services/api-server/VERSION @@ -1 +1 @@ -0.9.0 +0.13.0 diff --git a/services/api-server/docker/boot.sh b/services/api-server/docker/boot.sh index ea12e3446c95..227be9c56b96 100755 --- a/services/api-server/docker/boot.sh +++ b/services/api-server/docker/boot.sh @@ -19,7 +19,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/api-server - uv pip --quiet sync requirements/dev.txt + uv pip --quiet sync --link-mode=copy requirements/dev.txt cd - echo "$INFO" "PIP :" uv pip list @@ -28,7 +28,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy if command -v uv >/dev/null 2>&1; then - uv pip install debugpy + uv pip install --link-mode=copy debugpy else pip install debugpy fi @@ -39,20 +39,51 @@ APP_LOG_LEVEL=${API_SERVER_LOGLEVEL:-${LOG_LEVEL:-${LOGLEVEL:-INFO}}} SERVER_LOG_LEVEL=$(echo "${APP_LOG_LEVEL}" | tr '[:upper:]' '[:lower:]') echo "$INFO" "Log-level app/server: $APP_LOG_LEVEL/$SERVER_LOG_LEVEL" -if [ "${SC_BOOT_MODE}" = "debug" ]; then - reload_dir_packages=$(find /devel/packages -maxdepth 3 -type d -path "*/src/*" ! -path "*.*" -exec echo '--reload-dir {} \' \;) +if [ "${API_SERVER_WORKER_MODE}" = "true" ]; then + if [ "${SC_BOOT_MODE}" = "debug" ]; then + exec watchmedo auto-restart \ + --directory /devel/packages \ + --directory services/api-server \ + --pattern "*.py" \ + --recursive \ + -- \ + celery \ + --app=boot_celery_worker:app \ + --workdir=services/api-server/docker \ + worker --pool=threads \ + --loglevel="${API_SERVER_LOGLEVEL}" \ + --concurrency="${CELERY_CONCURRENCY}" \ + --hostname="${API_SERVER_WORKER_NAME}" \ + --queues="${CELERY_QUEUES:-default}" + else + exec celery \ + --app=boot_celery_worker:app \ + --workdir=services/api-server/docker \ + worker --pool=threads \ + --loglevel="${API_SERVER_LOGLEVEL}" \ + --concurrency="${CELERY_CONCURRENCY}" \ + --hostname="${API_SERVER_WORKER_NAME}" \ + --queues="${CELERY_QUEUES:-default}" + fi +else + if [ "${SC_BOOT_MODE}" = "debug" ]; then + reload_dir_packages=$(fdfind src /devel/packages --exec echo '--reload-dir {} ' | tr '\n' ' ') - exec sh -c " - cd services/api-server/src/simcore_service_api_server && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${API_SERVER_REMOTE_DEBUG_PORT} -m uvicorn main:the_app \ + exec sh -c " + cd services/api-server/src/simcore_service_api_server && \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${API_SERVER_REMOTE_DEBUG_PORT} -m \ + uvicorn \ + --factory main:app_factory \ + --host 0.0.0.0 \ + --reload \ + $reload_dir_packages \ + --reload-dir . \ + --log-level \"${SERVER_LOG_LEVEL}\" + " + else + exec uvicorn \ + --factory simcore_service_api_server.main:app_factory \ --host 0.0.0.0 \ - --reload \ - $reload_dir_packages - --reload-dir . \ - --log-level \"${SERVER_LOG_LEVEL}\" - " -else - exec uvicorn simcore_service_api_server.main:the_app \ - --host 0.0.0.0 \ - --log-level "${SERVER_LOG_LEVEL}" + --log-level "${SERVER_LOG_LEVEL}" + fi fi diff --git a/services/api-server/docker/boot_celery_worker.py b/services/api-server/docker/boot_celery_worker.py new file mode 100644 index 000000000000..e0c7e119ced8 --- /dev/null +++ b/services/api-server/docker/boot_celery_worker.py @@ -0,0 +1,13 @@ +from celery.signals import worker_init, worker_shutdown # type: ignore[import-untyped] +from celery_library.signals import ( + on_worker_shutdown, +) +from simcore_service_api_server.celery_worker.worker_main import ( + get_app, + worker_init_wrapper, +) + +app = get_app() + +worker_init.connect(worker_init_wrapper) +worker_shutdown.connect(on_worker_shutdown) diff --git a/services/api-server/docker/entrypoint.sh b/services/api-server/docker/entrypoint.sh index b579236b5623..0124a12961b3 100755 --- a/services/api-server/docker/entrypoint.sh +++ b/services/api-server/docker/entrypoint.sh @@ -20,6 +20,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" USERNAME=scu GROUPNAME=scu @@ -57,10 +58,9 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME" echo "$INFO" "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \; - # change user property of files already around + fdfind --owner ":$SC_USER_ID" --exclude proc --exec-batch chgrp --no-dereference "$CONT_GROUPNAME" . '/' echo "$INFO" "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \; + fdfind --owner "$SC_USER_ID:" --exclude proc --exec-batch chown --no-dereference "$SC_USER_NAME" . '/' fi fi diff --git a/services/api-server/docker/healthcheck.py b/services/api-server/docker/healthcheck.py index 808782f32617..66ba806d0dbb 100755 --- a/services/api-server/docker/healthcheck.py +++ b/services/api-server/docker/healthcheck.py @@ -18,18 +18,49 @@ """ import os +import subprocess import sys from urllib.request import urlopen +from simcore_service_api_server.core.settings import ApplicationSettings + SUCCESS, UNHEALTHY = 0, 1 # Disabled if boots with debugger ok = os.environ.get("SC_BOOT_MODE", "").lower() == "debug" +app_settings = ApplicationSettings.create_from_envs() + + +def _is_celery_worker_healthy(): + assert app_settings.API_SERVER_CELERY + broker_url = app_settings.API_SERVER_CELERY.CELERY_RABBIT_BROKER.dsn + + try: + result = subprocess.run( + [ + "celery", + "--broker", + broker_url, + "inspect", + "ping", + "--destination", + "celery@" + os.getenv("API_SERVER_WORKER_NAME", "worker"), + ], + capture_output=True, + text=True, + check=True, + ) + return "pong" in result.stdout + except subprocess.CalledProcessError: + return False + + # Queries host # pylint: disable=consider-using-with ok = ( ok + or (app_settings.API_SERVER_WORKER_MODE and _is_celery_worker_healthy()) or urlopen( "{host}{baseurl}".format( host=sys.argv[1], baseurl=os.environ.get("SIMCORE_NODE_BASEPATH", "") diff --git a/services/api-server/docs/api-server.drawio.svg b/services/api-server/docs/api-server.drawio.svg index 98f7dcfdfc6e..8024640c9508 100644 --- a/services/api-server/docs/api-server.drawio.svg +++ b/services/api-server/docs/api-server.drawio.svg @@ -1,19 +1,43 @@ - + - + - + -
+
+
+
+ SERVICE +
+ (CELERY DEPENDENT) +
+
+
+
+ + + SERVICE... + + + + + + + + + + + +
CONTROLLER @@ -21,20 +45,20 @@
- + CONTROLLER - + -
+
SERVICE @@ -42,20 +66,20 @@
- + SERVICE - + -
+
REPOSITORY @@ -63,20 +87,20 @@
- + REPOSITORY - + -
+
CLIENTS @@ -84,7 +108,7 @@
- + CLIENTS @@ -92,13 +116,13 @@ - + -
+
CONTROLLER @@ -106,20 +130,20 @@
- + CONTROLLER - + -
+
SERVICE @@ -127,20 +151,20 @@
- + SERVICE - + -
+
REPOSITORY @@ -148,20 +172,20 @@
- + REPOSITORY - + -
+
CLIENTS @@ -169,24 +193,24 @@
- + CLIENTS - - + + - + -
+
rest @@ -194,24 +218,24 @@
- + rest - - + + - + -
+
rpc @@ -219,24 +243,24 @@
- + rpc - - + + - + -
+
projects @@ -244,24 +268,24 @@
- + projects - - + + - + -
+
projects @@ -269,20 +293,20 @@
- + projects - + -
+
sa[asyngpg] @@ -290,7 +314,7 @@
- + sa[asyngpg] @@ -298,13 +322,13 @@ - + -
+
CONTROLLER @@ -312,20 +336,20 @@
- + CONTROLLER - + -
+
SERVICE @@ -333,20 +357,20 @@
- + SERVICE - + -
+
REPOSITORY @@ -354,20 +378,20 @@
- + REPOSITORY - + -
+
CLIENTS @@ -375,20 +399,20 @@
- + CLIENTS - + -
+
rest @@ -396,24 +420,24 @@
- + rest - - + + - + -
+
rpc @@ -421,24 +445,24 @@
- + rpc - - + + - + -
+
services @@ -446,24 +470,24 @@
- + services - - + + - + -
+
catalog_srv @@ -471,20 +495,20 @@
- + catalog_srv - + -
+
sa[asyncpg] @@ -492,28 +516,24 @@
- + sa[asyncpg] - - + + - - - - - + -
+
SolverService @@ -521,24 +541,24 @@
- + SolverService - - + + - + -
+
CatalogService @@ -546,21 +566,21 @@
- + CatalogService - - + + -
+
httpx @@ -568,20 +588,20 @@
- + httpx - + -
+
AuthSession @@ -589,28 +609,28 @@
- + AuthSession - - + + - - + + - + -
+
WbApiRpcClient @@ -618,24 +638,24 @@
- + WbApiRpcClient - - + + - + -
+
sa[asyncg] @@ -643,20 +663,20 @@
- + sa[asyncg] - + -
+
simcore_service_catalog @@ -664,20 +684,20 @@
- + simcore_ser... - + -
+
simcore_service_webserver @@ -685,302 +705,425 @@
- + simcore_ser... - + -
+
-
- simcore_service_api_server +
+ /solvers
- - simcore_ser... + + /solvers - - + + - + -
+
- /solvers + ProgramsService
- - /solvers + + ProgramsService - - - - - + -
+
- ProgramsService + /programs
- - ProgramsService + + /programs - - + + + + + + +
+
+
+ /studies +
+
+
+
+ + /studies + +
+
- + -
+
- /programs + JobService
- - /programs + + JobService - - + + - + + + + + + + + + + + + -
+
- StudyService + ApiKeysRepository
- - StudyService + + ApiKeysRepository - - + + - + -
+
- /studies + UsersRepository
- - /studies + + UsersRepository - - + + - - - - - + -
+
- JobService + RabbitMQ +
+ RPCClient
- - JobService + + RabbitMQ... + + + + + + + + + + + +
+
+
+ Dependencies go inwards +
+
+
+
+ + Dependencies go inwards
- - + + + + + + + + + + + + + - - + + - + + - - + + - + + + + + + + + + -
+
- ApiKeysRepository + /functions
- - ApiKeysRepository + + /functions + + + + + + + + + + + +
+
+
+ FunctionService +
+
+
+
+ + FunctionService
- - + + - + -
+
- UsersRepository + FunctionJobService
- - UsersRepository + + FunctionJobService - - + + - + + + + + -
+
- RabbitMQ -
- RPCClient + TaskManager
- - RabbitMQ... + + TaskManager - + -
-
-
- Dependencies go inwards +
+
+
+ FunctionJobTaskClient + + Service +
- - Dependencies go inwards + + FunctionJobTaskClientService - - + + - + + + + + + + + + + + + + + + + + + + + + + +
+
+
+ Dependencies go inward +
+
+
+
+ + Dependencies go inward + +
+
diff --git a/services/api-server/openapi.json b/services/api-server/openapi.json index 5a64c1baacb7..17f6bd059ae3 100644 --- a/services/api-server/openapi.json +++ b/services/api-server/openapi.json @@ -3,7 +3,7 @@ "info": { "title": "osparc.io public API", "description": "osparc-simcore public API specifications", - "version": "0.9.0" + "version": "0.13.0" }, "paths": { "/v0/meta": { @@ -2761,6 +2761,138 @@ } } }, + "/v0/solvers/{solver_key}/releases/{version}/jobs/{job_id}/assets": { + "delete": { + "tags": [ + "solvers" + ], + "summary": "Delete Job Assets", + "description": "Deletes assets associated with an existing solver job. N.B. this renders the solver job un-startable\n\nNew in *version 0.12*", + "operationId": "delete_job_assets", + "security": [ + { + "HTTPBasic": [] + } + ], + "parameters": [ + { + "name": "solver_key", + "in": "path", + "required": true, + "schema": { + "type": "string", + "pattern": "^simcore/services/comp/([a-z0-9][a-z0-9_.-]*/)*([a-z0-9-_]+[a-z0-9])$", + "title": "Solver Key" + } + }, + { + "name": "version", + "in": "path", + "required": true, + "schema": { + "type": "string", + "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", + "title": "Version" + } + }, + { + "name": "job_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid", + "title": "Job Id" + } + } + ], + "responses": { + "204": { + "description": "Successful Response" + }, + "402": { + "description": "Payment required", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "404": { + "description": "Job/wallet/pricing details not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "429": { + "description": "Too many requests", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "502": { + "description": "Unexpected error when communicating with backend service", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "503": { + "description": "Service unavailable", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "504": { + "description": "Request to a backend service timed out.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, "/v0/solvers/{solver_key}/releases/{version}/jobs/{job_id}:start": { "post": { "tags": [ @@ -2926,6 +3058,16 @@ } } }, + "409": { + "description": "Job assets missing", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, "422": { "description": "Configuration error", "content": { @@ -3760,6 +3902,16 @@ } } }, + "409": { + "description": "Job assets missing", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, "422": { "description": "Validation Error", "content": { @@ -4459,6 +4611,16 @@ "title": "Study Id" } }, + { + "name": "hidden", + "in": "query", + "required": false, + "schema": { + "type": "boolean", + "default": false, + "title": "Hidden" + } + }, { "name": "x-simcore-parent-project-uuid", "in": "header", @@ -4494,6 +4656,15 @@ } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Body_clone_study_v0_studies__study_id__clone_post" + } + } + } + }, "responses": { "201": { "description": "Successful Response", @@ -5286,7 +5457,7 @@ "function_jobs" ], "summary": "List Function Jobs", - "description": "List function jobs\n\nNew in *version 0.8.0*\n\nAdded in *version 0.9.0*: add `created_at` field in the registered function-related objects", + "description": "List function jobs\n\nNew in *version 0.8.0*\n\nAdded in *version 0.9.0*: add `created_at` field in the registered function-related objects\n\nAdded in *version 0.11.0*: add filter by `function_id`, `function_job_ids` and `function_job_collection_id`\n\nAdded in *version 0.13.0*: add include_status bool query parameter to list function jobs with their status", "operationId": "list_function_jobs", "security": [ { @@ -5294,6 +5465,18 @@ } ], "parameters": [ + { + "name": "include_status", + "in": "query", + "required": false, + "schema": { + "type": "boolean", + "description": "Include job status in response", + "default": false, + "title": "Include Status" + }, + "description": "Include job status in response" + }, { "name": "limit", "in": "query", @@ -5316,6 +5499,66 @@ "default": 0, "title": "Offset" } + }, + { + "name": "function_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string", + "format": "uuid" + }, + { + "type": "null" + } + ], + "description": "Filter by function ID pattern", + "title": "Function Id" + }, + "description": "Filter by function ID pattern" + }, + { + "name": "function_job_ids", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string", + "format": "uuid" + } + }, + { + "type": "null" + } + ], + "description": "Filter by function job IDs", + "title": "Function Job Ids" + }, + "description": "Filter by function job IDs" + }, + { + "name": "function_job_collection_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string", + "format": "uuid" + }, + { + "type": "null" + } + ], + "description": "Filter by function job collection ID", + "title": "Function Job Collection Id" + }, + "description": "Filter by function job collection ID" } ], "responses": { @@ -5324,7 +5567,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Page_Annotated_Union_RegisteredProjectFunctionJob__RegisteredPythonCodeFunctionJob__RegisteredSolverFunctionJob___FieldInfo_annotation_NoneType__required_True__discriminator__function_class____" + "$ref": "#/components/schemas/PageRegisteredFunctionJobWithorWithoutStatus" } } } @@ -5650,7 +5893,8 @@ "schema": { "anyOf": [ { - "type": "object" + "type": "object", + "additionalProperties": true }, { "type": "null" @@ -5684,14 +5928,14 @@ } } }, - "/v0/function_job_collections": { - "get": { + "/v0/function_jobs/{function_job_id}/log": { + "post": { "tags": [ - "function_job_collections" + "function_jobs" ], - "summary": "List Function Job Collections", - "description": "List function job collections\n\nNew in *version 0.8.0*", - "operationId": "list_function_job_collections", + "summary": "Get Function Job Logs Task", + "description": "Get function job logs task\n\nNew in *version 0.11.0*", + "operationId": "get_function_job_logs_task", "security": [ { "HTTPBasic": [] @@ -5699,19 +5943,78 @@ ], "parameters": [ { - "name": "limit", - "in": "query", - "required": false, + "name": "function_job_id", + "in": "path", + "required": true, "schema": { - "type": "integer", - "maximum": 50, - "minimum": 1, - "default": 20, - "title": "Limit" + "type": "string", + "format": "uuid", + "title": "Function Job Id" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TaskGet" + } + } } }, - { - "name": "offset", + "404": { + "description": "Function job not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/v0/function_job_collections": { + "get": { + "tags": [ + "function_job_collections" + ], + "summary": "List Function Job Collections", + "description": "List function job collections\n\nNew in *version 0.8.0*", + "operationId": "list_function_job_collections", + "security": [ + { + "HTTPBasic": [] + } + ], + "parameters": [ + { + "name": "limit", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "maximum": 50, + "minimum": 1, + "default": 20, + "title": "Limit" + } + }, + { + "name": "offset", "in": "query", "required": false, "schema": { @@ -6003,6 +6306,169 @@ } } }, + "/v0/function_job_collections/{function_job_collection_id}/function_jobs/page": { + "get": { + "tags": [ + "function_job_collections" + ], + "summary": "Function Job Collection List Function Jobs Page", + "description": "Get the function jobs in function job collection\n\nNew in *version 0.11.0*", + "operationId": "function_job_collection_list_function_jobs_page", + "security": [ + { + "HTTPBasic": [] + } + ], + "parameters": [ + { + "name": "function_job_collection_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid", + "title": "Function Job Collection Id" + } + }, + { + "name": "limit", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "maximum": 50, + "minimum": 1, + "default": 20, + "title": "Limit" + } + }, + { + "name": "offset", + "in": "query", + "required": false, + "schema": { + "type": "integer", + "minimum": 0, + "default": 0, + "title": "Offset" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Page_Annotated_Union_RegisteredProjectFunctionJob__RegisteredPythonCodeFunctionJob__RegisteredSolverFunctionJob___FieldInfo_annotation_NoneType__required_True__discriminator__function_class____" + } + } + } + }, + "404": { + "description": "Function job collection not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/v0/function_job_collections/{function_job_collection_id}/function_jobs/list": { + "get": { + "tags": [ + "function_job_collections" + ], + "summary": "Function Job Collection List Function Jobs List", + "description": "Get the function jobs in function job collection\n\nNew in *version 0.11.0*", + "operationId": "function_job_collection_list_function_jobs_list", + "security": [ + { + "HTTPBasic": [] + } + ], + "parameters": [ + { + "name": "function_job_collection_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid", + "title": "Function Job Collection Id" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "oneOf": [ + { + "$ref": "#/components/schemas/RegisteredProjectFunctionJob" + }, + { + "$ref": "#/components/schemas/RegisteredPythonCodeFunctionJob" + }, + { + "$ref": "#/components/schemas/RegisteredSolverFunctionJob" + } + ], + "discriminator": { + "propertyName": "function_class", + "mapping": { + "PROJECT": "#/components/schemas/RegisteredProjectFunctionJob", + "PYTHON_CODE": "#/components/schemas/RegisteredPythonCodeFunctionJob", + "SOLVER": "#/components/schemas/RegisteredSolverFunctionJob" + } + } + }, + "title": "Response Function Job Collection List Function Jobs List V0 Function Job Collections Function Job Collection Id Function Jobs List Get" + } + } + } + }, + "404": { + "description": "Function job collection not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, "/v0/function_job_collections/{function_job_collection_id}/status": { "get": { "tags": [ @@ -7510,7 +7976,8 @@ "schema": { "anyOf": [ { - "type": "object" + "type": "object", + "additionalProperties": true }, { "type": "null" @@ -7585,6 +8052,42 @@ "format": "uuid", "title": "Function Id" } + }, + { + "name": "x-simcore-parent-project-uuid", + "in": "header", + "required": true, + "schema": { + "anyOf": [ + { + "type": "string", + "format": "uuid" + }, + { + "const": "null", + "type": "string" + } + ], + "title": "X-Simcore-Parent-Project-Uuid" + } + }, + { + "name": "x-simcore-parent-node-id", + "in": "header", + "required": true, + "schema": { + "anyOf": [ + { + "type": "string", + "format": "uuid" + }, + { + "const": "null", + "type": "string" + } + ], + "title": "X-Simcore-Parent-Node-Id" + } } ], "requestBody": { @@ -7594,7 +8097,8 @@ "schema": { "anyOf": [ { - "type": "object" + "type": "object", + "additionalProperties": true }, { "type": "null" @@ -7681,24 +8185,61 @@ "format": "uuid", "title": "Function Id" } - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { - "type": "array", - "items": { - "anyOf": [ - { - "type": "object" - }, - { - "type": "null" - } - ] - }, + }, + { + "name": "x-simcore-parent-project-uuid", + "in": "header", + "required": true, + "schema": { + "anyOf": [ + { + "type": "string", + "format": "uuid" + }, + { + "const": "null", + "type": "string" + } + ], + "title": "X-Simcore-Parent-Project-Uuid" + } + }, + { + "name": "x-simcore-parent-node-id", + "in": "header", + "required": true, + "schema": { + "anyOf": [ + { + "type": "string", + "format": "uuid" + }, + { + "const": "null", + "type": "string" + } + ], + "title": "X-Simcore-Parent-Node-Id" + } + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "anyOf": [ + { + "type": "object", + "additionalProperties": true + }, + { + "type": "null" + } + ] + }, "maxItems": 50, "title": "Function Inputs List" } @@ -7738,10 +8279,243 @@ } } } + }, + "/v0/tasks": { + "get": { + "tags": [ + "tasks" + ], + "summary": "List Tasks", + "description": "List all tasks\n\nNew in *version 0.10-rc1*", + "operationId": "list_tasks", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ApiServerEnvelope_list_TaskGet__" + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + } + }, + "security": [ + { + "HTTPBasic": [] + } + ] + } + }, + "/v0/tasks/{task_id}": { + "get": { + "tags": [ + "tasks" + ], + "summary": "Get Task Status", + "description": "Get task status\n\nNew in *version 0.10-rc1*", + "operationId": "get_task_status", + "security": [ + { + "HTTPBasic": [] + } + ], + "parameters": [ + { + "name": "task_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid", + "title": "Task Id" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TaskStatus" + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/v0/tasks/{task_id}:cancel": { + "post": { + "tags": [ + "tasks" + ], + "summary": "Cancel Task", + "description": "Cancel task\n\nNew in *version 0.10-rc1*", + "operationId": "cancel_task", + "security": [ + { + "HTTPBasic": [] + } + ], + "parameters": [ + { + "name": "task_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid", + "title": "Task Id" + } + } + ], + "responses": { + "204": { + "description": "Successful Response" + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } + }, + "/v0/tasks/{task_id}/result": { + "get": { + "tags": [ + "tasks" + ], + "summary": "Get Task Result", + "description": "Get task result\n\nNew in *version 0.10-rc1*", + "operationId": "get_task_result", + "security": [ + { + "HTTPBasic": [] + } + ], + "parameters": [ + { + "name": "task_id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid", + "title": "Task Id" + } + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/TaskResult" + } + } + } + }, + "404": { + "description": "Task result not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "500": { + "description": "Internal server error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + } } }, "components": { "schemas": { + "ApiServerEnvelope_list_TaskGet__": { + "properties": { + "data": { + "items": { + "$ref": "#/components/schemas/TaskGet" + }, + "type": "array", + "title": "Data" + } + }, + "type": "object", + "required": [ + "data" + ], + "title": "ApiServerEnvelope[list[TaskGet]]" + }, "Body_abort_multipart_upload_v0_files__file_id__abort_post": { "properties": { "client_file": { @@ -7762,6 +8536,36 @@ ], "title": "Body_abort_multipart_upload_v0_files__file_id__abort_post" }, + "Body_clone_study_v0_studies__study_id__clone_post": { + "properties": { + "title": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Title", + "empty": true + }, + "description": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Description", + "empty": true + } + }, + "type": "object", + "title": "Body_clone_study_v0_studies__study_id__clone_post" + }, "Body_complete_multipart_upload_v0_files__file_id__complete_post": { "properties": { "client_file": { @@ -7856,6 +8660,18 @@ "items": {}, "type": "array", "title": "Errors" + }, + "support_id": { + "anyOf": [ + { + "type": "string", + "pattern": "OEC:([a-fA-F0-9]{12})-(\\d{13,14})" + }, + { + "type": "null" + } + ], + "title": "Support Id" } }, "type": "object", @@ -8116,6 +8932,7 @@ "JSONFunctionInputSchema": { "properties": { "schema_content": { + "additionalProperties": true, "type": "object", "title": "JSON Schema", "description": "JSON Schema", @@ -8134,6 +8951,7 @@ "JSONFunctionOutputSchema": { "properties": { "schema_content": { + "additionalProperties": true, "type": "object", "title": "JSON Schema", "description": "JSON Schema", @@ -9086,15 +9904,299 @@ }, "type": "object", "required": [ - "items" + "items" + ], + "title": "OnePage[SolverPort]" + }, + "OnePage_StudyPort_": { + "properties": { + "items": { + "items": { + "$ref": "#/components/schemas/StudyPort" + }, + "type": "array", + "title": "Items" + }, + "total": { + "anyOf": [ + { + "type": "integer", + "minimum": 0 + }, + { + "type": "null" + } + ], + "title": "Total" + } + }, + "type": "object", + "required": [ + "items" + ], + "title": "OnePage[StudyPort]" + }, + "PageRegisteredFunctionJobWithorWithoutStatus": { + "properties": { + "items": { + "items": { + "anyOf": [ + { + "oneOf": [ + { + "$ref": "#/components/schemas/RegisteredProjectFunctionJobWithStatus" + }, + { + "$ref": "#/components/schemas/RegisteredPythonCodeFunctionJobWithStatus" + }, + { + "$ref": "#/components/schemas/RegisteredSolverFunctionJobWithStatus" + } + ], + "discriminator": { + "propertyName": "function_class", + "mapping": { + "PROJECT": "#/components/schemas/RegisteredProjectFunctionJobWithStatus", + "PYTHON_CODE": "#/components/schemas/RegisteredPythonCodeFunctionJobWithStatus", + "SOLVER": "#/components/schemas/RegisteredSolverFunctionJobWithStatus" + } + } + }, + { + "oneOf": [ + { + "$ref": "#/components/schemas/RegisteredProjectFunctionJob" + }, + { + "$ref": "#/components/schemas/RegisteredPythonCodeFunctionJob" + }, + { + "$ref": "#/components/schemas/RegisteredSolverFunctionJob" + } + ], + "discriminator": { + "propertyName": "function_class", + "mapping": { + "PROJECT": "#/components/schemas/RegisteredProjectFunctionJob", + "PYTHON_CODE": "#/components/schemas/RegisteredPythonCodeFunctionJob", + "SOLVER": "#/components/schemas/RegisteredSolverFunctionJob" + } + } + } + ] + }, + "type": "array", + "title": "Items" + }, + "total": { + "anyOf": [ + { + "type": "integer", + "minimum": 0 + }, + { + "type": "null" + } + ], + "title": "Total" + }, + "limit": { + "anyOf": [ + { + "type": "integer", + "minimum": 1 + }, + { + "type": "null" + } + ], + "title": "Limit" + }, + "offset": { + "anyOf": [ + { + "type": "integer", + "minimum": 0 + }, + { + "type": "null" + } + ], + "title": "Offset" + }, + "links": { + "$ref": "#/components/schemas/Links" + } + }, + "type": "object", + "required": [ + "items", + "total", + "limit", + "offset", + "links" + ], + "title": "PageRegisteredFunctionJobWithorWithoutStatus" + }, + "Page_Annotated_Union_RegisteredProjectFunctionJob__RegisteredPythonCodeFunctionJob__RegisteredSolverFunctionJob___FieldInfo_annotation_NoneType__required_True__discriminator__function_class____": { + "properties": { + "items": { + "items": { + "oneOf": [ + { + "$ref": "#/components/schemas/RegisteredProjectFunctionJob" + }, + { + "$ref": "#/components/schemas/RegisteredPythonCodeFunctionJob" + }, + { + "$ref": "#/components/schemas/RegisteredSolverFunctionJob" + } + ], + "discriminator": { + "propertyName": "function_class", + "mapping": { + "PROJECT": "#/components/schemas/RegisteredProjectFunctionJob", + "PYTHON_CODE": "#/components/schemas/RegisteredPythonCodeFunctionJob", + "SOLVER": "#/components/schemas/RegisteredSolverFunctionJob" + } + } + }, + "type": "array", + "title": "Items" + }, + "total": { + "anyOf": [ + { + "type": "integer", + "minimum": 0 + }, + { + "type": "null" + } + ], + "title": "Total" + }, + "limit": { + "anyOf": [ + { + "type": "integer", + "minimum": 1 + }, + { + "type": "null" + } + ], + "title": "Limit" + }, + "offset": { + "anyOf": [ + { + "type": "integer", + "minimum": 0 + }, + { + "type": "null" + } + ], + "title": "Offset" + }, + "links": { + "$ref": "#/components/schemas/Links" + } + }, + "type": "object", + "required": [ + "items", + "total", + "limit", + "offset", + "links" + ], + "title": "Page[Annotated[Union[RegisteredProjectFunctionJob, RegisteredPythonCodeFunctionJob, RegisteredSolverFunctionJob], FieldInfo(annotation=NoneType, required=True, discriminator='function_class')]]" + }, + "Page_Annotated_Union_RegisteredProjectFunction__RegisteredPythonCodeFunction__RegisteredSolverFunction___FieldInfo_annotation_NoneType__required_True__discriminator__function_class____": { + "properties": { + "items": { + "items": { + "oneOf": [ + { + "$ref": "#/components/schemas/RegisteredProjectFunction" + }, + { + "$ref": "#/components/schemas/RegisteredPythonCodeFunction" + }, + { + "$ref": "#/components/schemas/RegisteredSolverFunction" + } + ], + "discriminator": { + "propertyName": "function_class", + "mapping": { + "PROJECT": "#/components/schemas/RegisteredProjectFunction", + "PYTHON_CODE": "#/components/schemas/RegisteredPythonCodeFunction", + "SOLVER": "#/components/schemas/RegisteredSolverFunction" + } + } + }, + "type": "array", + "title": "Items" + }, + "total": { + "anyOf": [ + { + "type": "integer", + "minimum": 0 + }, + { + "type": "null" + } + ], + "title": "Total" + }, + "limit": { + "anyOf": [ + { + "type": "integer", + "minimum": 1 + }, + { + "type": "null" + } + ], + "title": "Limit" + }, + "offset": { + "anyOf": [ + { + "type": "integer", + "minimum": 0 + }, + { + "type": "null" + } + ], + "title": "Offset" + }, + "links": { + "$ref": "#/components/schemas/Links" + } + }, + "type": "object", + "required": [ + "items", + "total", + "limit", + "offset", + "links" ], - "title": "OnePage[SolverPort]" + "title": "Page[Annotated[Union[RegisteredProjectFunction, RegisteredPythonCodeFunction, RegisteredSolverFunction], FieldInfo(annotation=NoneType, required=True, discriminator='function_class')]]" }, - "OnePage_StudyPort_": { + "Page_File_": { "properties": { "items": { "items": { - "$ref": "#/components/schemas/StudyPort" + "$ref": "#/components/schemas/File" }, "type": "array", "title": "Items" @@ -9110,37 +10212,50 @@ } ], "title": "Total" + }, + "limit": { + "anyOf": [ + { + "type": "integer", + "minimum": 1 + }, + { + "type": "null" + } + ], + "title": "Limit" + }, + "offset": { + "anyOf": [ + { + "type": "integer", + "minimum": 0 + }, + { + "type": "null" + } + ], + "title": "Offset" + }, + "links": { + "$ref": "#/components/schemas/Links" } }, "type": "object", "required": [ - "items" + "items", + "total", + "limit", + "offset", + "links" ], - "title": "OnePage[StudyPort]" + "title": "Page[File]" }, - "Page_Annotated_Union_RegisteredProjectFunctionJob__RegisteredPythonCodeFunctionJob__RegisteredSolverFunctionJob___FieldInfo_annotation_NoneType__required_True__discriminator__function_class____": { + "Page_Job_": { "properties": { "items": { "items": { - "oneOf": [ - { - "$ref": "#/components/schemas/RegisteredProjectFunctionJob" - }, - { - "$ref": "#/components/schemas/RegisteredPythonCodeFunctionJob" - }, - { - "$ref": "#/components/schemas/RegisteredSolverFunctionJob" - } - ], - "discriminator": { - "propertyName": "function_class", - "mapping": { - "PROJECT": "#/components/schemas/RegisteredProjectFunctionJob", - "PYTHON_CODE": "#/components/schemas/RegisteredPythonCodeFunctionJob", - "SOLVER": "#/components/schemas/RegisteredSolverFunctionJob" - } - } + "$ref": "#/components/schemas/Job" }, "type": "array", "title": "Items" @@ -9193,31 +10308,13 @@ "offset", "links" ], - "title": "Page[Annotated[Union[RegisteredProjectFunctionJob, RegisteredPythonCodeFunctionJob, RegisteredSolverFunctionJob], FieldInfo(annotation=NoneType, required=True, discriminator='function_class')]]" + "title": "Page[Job]" }, - "Page_Annotated_Union_RegisteredProjectFunction__RegisteredPythonCodeFunction__RegisteredSolverFunction___FieldInfo_annotation_NoneType__required_True__discriminator__function_class____": { + "Page_LicensedItemGet_": { "properties": { "items": { "items": { - "oneOf": [ - { - "$ref": "#/components/schemas/RegisteredProjectFunction" - }, - { - "$ref": "#/components/schemas/RegisteredPythonCodeFunction" - }, - { - "$ref": "#/components/schemas/RegisteredSolverFunction" - } - ], - "discriminator": { - "propertyName": "function_class", - "mapping": { - "PROJECT": "#/components/schemas/RegisteredProjectFunction", - "PYTHON_CODE": "#/components/schemas/RegisteredPythonCodeFunction", - "SOLVER": "#/components/schemas/RegisteredSolverFunction" - } - } + "$ref": "#/components/schemas/LicensedItemGet" }, "type": "array", "title": "Items" @@ -9270,13 +10367,13 @@ "offset", "links" ], - "title": "Page[Annotated[Union[RegisteredProjectFunction, RegisteredPythonCodeFunction, RegisteredSolverFunction], FieldInfo(annotation=NoneType, required=True, discriminator='function_class')]]" + "title": "Page[LicensedItemGet]" }, - "Page_File_": { + "Page_RegisteredFunctionJobCollection_": { "properties": { "items": { "items": { - "$ref": "#/components/schemas/File" + "$ref": "#/components/schemas/RegisteredFunctionJobCollection" }, "type": "array", "title": "Items" @@ -9329,13 +10426,13 @@ "offset", "links" ], - "title": "Page[File]" + "title": "Page[RegisteredFunctionJobCollection]" }, - "Page_Job_": { + "Page_Study_": { "properties": { "items": { "items": { - "$ref": "#/components/schemas/Job" + "$ref": "#/components/schemas/Study" }, "type": "array", "title": "Items" @@ -9388,431 +10485,575 @@ "offset", "links" ], - "title": "Page[Job]" + "title": "Page[Study]" }, - "Page_LicensedItemGet_": { + "PricingPlanClassification": { + "type": "string", + "enum": [ + "TIER", + "LICENSE" + ], + "title": "PricingPlanClassification" + }, + "PricingUnitGetLegacy": { "properties": { - "items": { - "items": { - "$ref": "#/components/schemas/LicensedItemGet" - }, - "type": "array", - "title": "Items" + "pricingUnitId": { + "type": "integer", + "exclusiveMinimum": true, + "title": "Pricingunitid", + "minimum": 0 }, - "total": { + "unitName": { + "type": "string", + "title": "Unitname" + }, + "unitExtraInfo": { + "$ref": "#/components/schemas/UnitExtraInfoTier" + }, + "currentCostPerUnit": { + "type": "number", + "minimum": 0.0, + "title": "Currentcostperunit" + }, + "default": { + "type": "boolean", + "title": "Default" + } + }, + "type": "object", + "required": [ + "pricingUnitId", + "unitName", + "unitExtraInfo", + "currentCostPerUnit", + "default" + ], + "title": "PricingUnitGetLegacy" + }, + "Profile": { + "properties": { + "first_name": { "anyOf": [ { - "type": "integer", - "minimum": 0 + "type": "string", + "maxLength": 255 }, { "type": "null" } ], - "title": "Total" + "title": "First Name" + }, + "last_name": { + "anyOf": [ + { + "type": "string", + "maxLength": 255 + }, + { + "type": "null" + } + ], + "title": "Last Name" + }, + "id": { + "type": "integer", + "exclusiveMinimum": true, + "title": "Id", + "minimum": 0 + }, + "login": { + "type": "string", + "format": "email", + "title": "Login" + }, + "role": { + "$ref": "#/components/schemas/UserRoleEnum" + }, + "groups": { + "anyOf": [ + { + "$ref": "#/components/schemas/Groups" + }, + { + "type": "null" + } + ] + }, + "gravatar_id": { + "anyOf": [ + { + "type": "string", + "maxLength": 40 + }, + { + "type": "null" + } + ], + "title": "Gravatar Id", + "description": "md5 hash value of email to retrieve an avatar image from https://www.gravatar.com" + } + }, + "type": "object", + "required": [ + "id", + "login", + "role" + ], + "title": "Profile", + "example": { + "first_name": "James", + "gravatar_id": "9a8930a5b20d7048e37740bac5c1ca4f", + "groups": { + "all": { + "description": "all users", + "gid": "1", + "label": "Everyone" + }, + "me": { + "description": "primary group", + "gid": "123", + "label": "maxy" + }, + "organizations": [] }, - "limit": { + "id": "20", + "last_name": "Maxwell", + "login": "james-maxwell@itis.swiss", + "role": "USER" + } + }, + "ProfileUpdate": { + "properties": { + "first_name": { "anyOf": [ { - "type": "integer", - "minimum": 1 + "type": "string", + "maxLength": 255 }, { "type": "null" } ], - "title": "Limit" + "title": "First Name" }, - "offset": { + "last_name": { "anyOf": [ { - "type": "integer", - "minimum": 0 + "type": "string", + "maxLength": 255 }, { "type": "null" } ], - "title": "Offset" - }, - "links": { - "$ref": "#/components/schemas/Links" + "title": "Last Name" } }, "type": "object", - "required": [ - "items", - "total", - "limit", - "offset", - "links" - ], - "title": "Page[LicensedItemGet]" + "title": "ProfileUpdate" }, - "Page_RegisteredFunctionJobCollection_": { + "Program": { "properties": { - "items": { - "items": { - "$ref": "#/components/schemas/RegisteredFunctionJobCollection" - }, - "type": "array", - "title": "Items" + "id": { + "type": "string", + "title": "Id", + "description": "Resource identifier" }, - "total": { + "version": { + "type": "string", + "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", + "title": "Version", + "description": "Semantic version number of the resource" + }, + "title": { + "type": "string", + "maxLength": 100, + "title": "Title", + "description": "Human readable name" + }, + "description": { "anyOf": [ { - "type": "integer", - "minimum": 0 + "type": "string", + "maxLength": 1000 }, { "type": "null" } ], - "title": "Total" + "title": "Description", + "description": "Description of the resource" }, - "limit": { + "url": { "anyOf": [ { - "type": "integer", - "minimum": 1 + "type": "string", + "maxLength": 2083, + "minLength": 1, + "format": "uri" }, { "type": "null" } ], - "title": "Limit" + "title": "Url", + "description": "Link to get this resource" }, - "offset": { + "version_display": { "anyOf": [ { - "type": "integer", - "minimum": 0 + "type": "string" }, { "type": "null" } ], - "title": "Offset" - }, - "links": { - "$ref": "#/components/schemas/Links" + "title": "Version Display" } }, "type": "object", "required": [ - "items", - "total", - "limit", - "offset", - "links" + "id", + "version", + "title", + "url", + "version_display" ], - "title": "Page[RegisteredFunctionJobCollection]" + "title": "Program", + "description": "A released program with a specific version", + "example": { + "description": "Simulation framework", + "id": "simcore/services/dynamic/sim4life", + "maintainer": "info@itis.swiss", + "title": "Sim4life", + "url": "https://api.osparc.io/v0/solvers/simcore%2Fservices%2Fdynamic%2Fsim4life/releases/8.0.0", + "version": "8.0.0", + "version_display": "8.0.0" + } }, - "Page_Study_": { + "ProjectFunction": { "properties": { - "items": { - "items": { - "$ref": "#/components/schemas/Study" - }, - "type": "array", - "title": "Items" + "function_class": { + "type": "string", + "const": "PROJECT", + "title": "Function Class", + "default": "PROJECT" }, - "total": { - "anyOf": [ - { - "type": "integer", - "minimum": 0 - }, + "title": { + "type": "string", + "title": "Title", + "default": "" + }, + "description": { + "type": "string", + "title": "Description", + "default": "" + }, + "input_schema": { + "oneOf": [ { - "type": "null" + "$ref": "#/components/schemas/JSONFunctionInputSchema" } ], - "title": "Total" + "title": "Input Schema", + "discriminator": { + "propertyName": "schema_class", + "mapping": { + "application/schema+json": "#/components/schemas/JSONFunctionInputSchema" + } + } }, - "limit": { - "anyOf": [ - { - "type": "integer", - "minimum": 1 - }, + "output_schema": { + "oneOf": [ { - "type": "null" + "$ref": "#/components/schemas/JSONFunctionOutputSchema" } ], - "title": "Limit" + "title": "Output Schema", + "discriminator": { + "propertyName": "schema_class", + "mapping": { + "application/schema+json": "#/components/schemas/JSONFunctionOutputSchema" + } + } }, - "offset": { + "default_inputs": { "anyOf": [ { - "type": "integer", - "minimum": 0 + "additionalProperties": true, + "type": "object" }, { "type": "null" } ], - "title": "Offset" + "title": "Default Inputs" }, - "links": { - "$ref": "#/components/schemas/Links" + "project_id": { + "type": "string", + "format": "uuid", + "title": "Project Id" } }, "type": "object", "required": [ - "items", - "total", - "limit", - "offset", - "links" - ], - "title": "Page[Study]" - }, - "PricingPlanClassification": { - "type": "string", - "enum": [ - "TIER", - "LICENSE" + "input_schema", + "output_schema", + "default_inputs", + "project_id" ], - "title": "PricingPlanClassification" + "title": "ProjectFunction" }, - "PricingUnitGetLegacy": { + "ProjectFunctionJob": { "properties": { - "pricingUnitId": { - "type": "integer", - "exclusiveMinimum": true, - "title": "Pricingunitid", - "minimum": 0 - }, - "unitName": { + "title": { "type": "string", - "title": "Unitname" - }, - "unitExtraInfo": { - "$ref": "#/components/schemas/UnitExtraInfoTier" + "title": "Title", + "default": "" }, - "currentCostPerUnit": { - "type": "number", - "minimum": 0.0, - "title": "Currentcostperunit" + "description": { + "type": "string", + "title": "Description", + "default": "" }, - "default": { - "type": "boolean", - "title": "Default" - } - }, - "type": "object", - "required": [ - "pricingUnitId", - "unitName", - "unitExtraInfo", - "currentCostPerUnit", - "default" - ], - "title": "PricingUnitGetLegacy" - }, - "Profile": { - "properties": { - "first_name": { + "function_uid": { + "type": "string", + "format": "uuid", + "title": "Function Uid" + }, + "inputs": { "anyOf": [ { - "type": "string", - "maxLength": 255 + "additionalProperties": true, + "type": "object" }, { "type": "null" } ], - "title": "First Name" + "title": "Inputs" }, - "last_name": { + "outputs": { "anyOf": [ { - "type": "string", - "maxLength": 255 + "additionalProperties": true, + "type": "object" }, { "type": "null" } ], - "title": "Last Name" - }, - "id": { - "type": "integer", - "exclusiveMinimum": true, - "title": "Id", - "minimum": 0 + "title": "Outputs" }, - "login": { + "function_class": { "type": "string", - "format": "email", - "title": "Login" - }, - "role": { - "$ref": "#/components/schemas/UserRoleEnum" + "const": "PROJECT", + "title": "Function Class", + "default": "PROJECT" }, - "groups": { + "project_job_id": { "anyOf": [ { - "$ref": "#/components/schemas/Groups" + "type": "string", + "format": "uuid" }, { "type": "null" } - ] + ], + "title": "Project Job Id" }, - "gravatar_id": { + "job_creation_task_id": { "anyOf": [ { - "type": "string", - "maxLength": 40 + "type": "string" }, { "type": "null" } ], - "title": "Gravatar Id", - "description": "md5 hash value of email to retrieve an avatar image from https://www.gravatar.com" + "title": "Job Creation Task Id" } }, "type": "object", "required": [ - "id", - "login", - "role" + "function_uid", + "inputs", + "outputs", + "project_job_id", + "job_creation_task_id" ], - "title": "Profile", - "example": { - "first_name": "James", - "gravatar_id": "9a8930a5b20d7048e37740bac5c1ca4f", - "groups": { - "all": { - "description": "all users", - "gid": "1", - "label": "Everyone" - }, - "me": { - "description": "primary group", - "gid": "123", - "label": "maxy" - }, - "organizations": [] - }, - "id": "20", - "last_name": "Maxwell", - "login": "james-maxwell@itis.swiss", - "role": "USER" - } + "title": "ProjectFunctionJob" }, - "ProfileUpdate": { + "PythonCodeFunction": { "properties": { - "first_name": { - "anyOf": [ + "function_class": { + "type": "string", + "const": "PYTHON_CODE", + "title": "Function Class", + "default": "PYTHON_CODE" + }, + "title": { + "type": "string", + "title": "Title", + "default": "" + }, + "description": { + "type": "string", + "title": "Description", + "default": "" + }, + "input_schema": { + "oneOf": [ { - "type": "string", - "maxLength": 255 - }, + "$ref": "#/components/schemas/JSONFunctionInputSchema" + } + ], + "title": "Input Schema", + "discriminator": { + "propertyName": "schema_class", + "mapping": { + "application/schema+json": "#/components/schemas/JSONFunctionInputSchema" + } + } + }, + "output_schema": { + "oneOf": [ { - "type": "null" + "$ref": "#/components/schemas/JSONFunctionOutputSchema" } ], - "title": "First Name" + "title": "Output Schema", + "discriminator": { + "propertyName": "schema_class", + "mapping": { + "application/schema+json": "#/components/schemas/JSONFunctionOutputSchema" + } + } }, - "last_name": { + "default_inputs": { "anyOf": [ { - "type": "string", - "maxLength": 255 + "additionalProperties": true, + "type": "object" }, { "type": "null" } ], - "title": "Last Name" + "title": "Default Inputs" + }, + "code_url": { + "type": "string", + "title": "Code Url" } }, "type": "object", - "title": "ProfileUpdate" + "required": [ + "input_schema", + "output_schema", + "default_inputs", + "code_url" + ], + "title": "PythonCodeFunction" }, - "Program": { + "PythonCodeFunctionJob": { "properties": { - "id": { - "type": "string", - "title": "Id", - "description": "Resource identifier" - }, - "version": { - "type": "string", - "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", - "title": "Version", - "description": "Semantic version number of the resource" - }, "title": { "type": "string", - "maxLength": 100, "title": "Title", - "description": "Human readable name" + "default": "" }, "description": { - "anyOf": [ - { - "type": "string", - "maxLength": 1000 - }, - { - "type": "null" - } - ], + "type": "string", "title": "Description", - "description": "Description of the resource" + "default": "" }, - "url": { + "function_uid": { + "type": "string", + "format": "uuid", + "title": "Function Uid" + }, + "inputs": { "anyOf": [ { - "type": "string", - "maxLength": 2083, - "minLength": 1, - "format": "uri" + "additionalProperties": true, + "type": "object" }, { "type": "null" } ], - "title": "Url", - "description": "Link to get this resource" + "title": "Inputs" }, - "version_display": { + "outputs": { "anyOf": [ { - "type": "string" + "additionalProperties": true, + "type": "object" }, { "type": "null" } ], - "title": "Version Display" + "title": "Outputs" + }, + "function_class": { + "type": "string", + "const": "PYTHON_CODE", + "title": "Function Class", + "default": "PYTHON_CODE" } }, "type": "object", "required": [ - "id", - "version", - "title", - "url", - "version_display" + "function_uid", + "inputs", + "outputs" ], - "title": "Program", - "description": "A released program with a specific version", - "example": { - "description": "Simulation framework", - "id": "simcore/services/dynamic/sim4life", - "maintainer": "info@itis.swiss", - "title": "Sim4life", - "url": "https://api.osparc.io/v0/solvers/simcore%2Fservices%2Fdynamic%2Fsim4life/releases/8.0.0", - "version": "8.0.0", - "version_display": "8.0.0" - } + "title": "PythonCodeFunctionJob" + }, + "RegisteredFunctionJobCollection": { + "properties": { + "title": { + "type": "string", + "title": "Title", + "default": "" + }, + "description": { + "type": "string", + "title": "Description", + "default": "" + }, + "job_ids": { + "items": { + "type": "string", + "format": "uuid" + }, + "type": "array", + "title": "Job Ids", + "default": [] + }, + "uid": { + "type": "string", + "format": "uuid", + "title": "Uid" + }, + "created_at": { + "type": "string", + "format": "date-time", + "title": "Created At" + } + }, + "type": "object", + "required": [ + "uid", + "created_at" + ], + "title": "RegisteredFunctionJobCollection" }, - "ProjectFunction": { + "RegisteredProjectFunction": { "properties": { "function_class": { "type": "string", @@ -9861,6 +11102,7 @@ "default_inputs": { "anyOf": [ { + "additionalProperties": true, "type": "object" }, { @@ -9869,6 +11111,21 @@ ], "title": "Default Inputs" }, + "uid": { + "type": "string", + "format": "uuid", + "title": "Uid" + }, + "created_at": { + "type": "string", + "format": "date-time", + "title": "Created At" + }, + "modified_at": { + "type": "string", + "format": "date-time", + "title": "Modified At" + }, "project_id": { "type": "string", "format": "uuid", @@ -9880,11 +11137,14 @@ "input_schema", "output_schema", "default_inputs", + "uid", + "created_at", + "modified_at", "project_id" ], - "title": "ProjectFunction" + "title": "RegisteredProjectFunction" }, - "ProjectFunctionJob": { + "RegisteredProjectFunctionJob": { "properties": { "title": { "type": "string", @@ -9904,6 +11164,7 @@ "inputs": { "anyOf": [ { + "additionalProperties": true, "type": "object" }, { @@ -9915,6 +11176,7 @@ "outputs": { "anyOf": [ { + "additionalProperties": true, "type": "object" }, { @@ -9929,93 +11191,53 @@ "title": "Function Class", "default": "PROJECT" }, - "project_job_id": { + "uid": { "type": "string", "format": "uuid", - "title": "Project Job Id" - } - }, - "type": "object", - "required": [ - "function_uid", - "inputs", - "outputs", - "project_job_id" - ], - "title": "ProjectFunctionJob" - }, - "PythonCodeFunction": { - "properties": { - "function_class": { - "type": "string", - "const": "PYTHON_CODE", - "title": "Function Class", - "default": "PYTHON_CODE" - }, - "title": { - "type": "string", - "title": "Title", - "default": "" + "title": "Uid" }, - "description": { + "created_at": { "type": "string", - "title": "Description", - "default": "" + "format": "date-time", + "title": "Created At" }, - "input_schema": { - "oneOf": [ + "project_job_id": { + "anyOf": [ { - "$ref": "#/components/schemas/JSONFunctionInputSchema" - } - ], - "title": "Input Schema", - "discriminator": { - "propertyName": "schema_class", - "mapping": { - "application/schema+json": "#/components/schemas/JSONFunctionInputSchema" - } - } - }, - "output_schema": { - "oneOf": [ + "type": "string", + "format": "uuid" + }, { - "$ref": "#/components/schemas/JSONFunctionOutputSchema" + "type": "null" } ], - "title": "Output Schema", - "discriminator": { - "propertyName": "schema_class", - "mapping": { - "application/schema+json": "#/components/schemas/JSONFunctionOutputSchema" - } - } + "title": "Project Job Id" }, - "default_inputs": { + "job_creation_task_id": { "anyOf": [ { - "type": "object" + "type": "string" }, { "type": "null" } ], - "title": "Default Inputs" - }, - "code_url": { - "type": "string", - "title": "Code Url" + "title": "Job Creation Task Id" } }, "type": "object", "required": [ - "input_schema", - "output_schema", - "default_inputs", - "code_url" + "function_uid", + "inputs", + "outputs", + "uid", + "created_at", + "project_job_id", + "job_creation_task_id" ], - "title": "PythonCodeFunction" + "title": "RegisteredProjectFunctionJob" }, - "PythonCodeFunctionJob": { + "RegisteredProjectFunctionJobWithStatus": { "properties": { "title": { "type": "string", @@ -10035,6 +11257,7 @@ "inputs": { "anyOf": [ { + "additionalProperties": true, "type": "object" }, { @@ -10046,6 +11269,7 @@ "outputs": { "anyOf": [ { + "additionalProperties": true, "type": "object" }, { @@ -10056,39 +11280,9 @@ }, "function_class": { "type": "string", - "const": "PYTHON_CODE", + "const": "PROJECT", "title": "Function Class", - "default": "PYTHON_CODE" - } - }, - "type": "object", - "required": [ - "function_uid", - "inputs", - "outputs" - ], - "title": "PythonCodeFunctionJob" - }, - "RegisteredFunctionJobCollection": { - "properties": { - "title": { - "type": "string", - "title": "Title", - "default": "" - }, - "description": { - "type": "string", - "title": "Description", - "default": "" - }, - "job_ids": { - "items": { - "type": "string", - "format": "uuid" - }, - "type": "array", - "title": "Job Ids", - "default": [] + "default": "PROJECT" }, "uid": { "type": "string", @@ -10099,22 +11293,54 @@ "type": "string", "format": "date-time", "title": "Created At" + }, + "status": { + "$ref": "#/components/schemas/FunctionJobStatus" + }, + "project_job_id": { + "anyOf": [ + { + "type": "string", + "format": "uuid" + }, + { + "type": "null" + } + ], + "title": "Project Job Id" + }, + "job_creation_task_id": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Job Creation Task Id" } }, "type": "object", "required": [ + "function_uid", + "inputs", + "outputs", "uid", - "created_at" + "created_at", + "status", + "project_job_id", + "job_creation_task_id" ], - "title": "RegisteredFunctionJobCollection" + "title": "RegisteredProjectFunctionJobWithStatus" }, - "RegisteredProjectFunction": { + "RegisteredPythonCodeFunction": { "properties": { "function_class": { "type": "string", - "const": "PROJECT", + "const": "PYTHON_CODE", "title": "Function Class", - "default": "PROJECT" + "default": "PYTHON_CODE" }, "title": { "type": "string", @@ -10157,6 +11383,7 @@ "default_inputs": { "anyOf": [ { + "additionalProperties": true, "type": "object" }, { @@ -10175,10 +11402,14 @@ "format": "date-time", "title": "Created At" }, - "project_id": { + "modified_at": { "type": "string", - "format": "uuid", - "title": "Project Id" + "format": "date-time", + "title": "Modified At" + }, + "code_url": { + "type": "string", + "title": "Code Url" } }, "type": "object", @@ -10188,11 +11419,80 @@ "default_inputs", "uid", "created_at", - "project_id" + "modified_at", + "code_url" ], - "title": "RegisteredProjectFunction" + "title": "RegisteredPythonCodeFunction" + }, + "RegisteredPythonCodeFunctionJob": { + "properties": { + "title": { + "type": "string", + "title": "Title", + "default": "" + }, + "description": { + "type": "string", + "title": "Description", + "default": "" + }, + "function_uid": { + "type": "string", + "format": "uuid", + "title": "Function Uid" + }, + "inputs": { + "anyOf": [ + { + "additionalProperties": true, + "type": "object" + }, + { + "type": "null" + } + ], + "title": "Inputs" + }, + "outputs": { + "anyOf": [ + { + "additionalProperties": true, + "type": "object" + }, + { + "type": "null" + } + ], + "title": "Outputs" + }, + "function_class": { + "type": "string", + "const": "PYTHON_CODE", + "title": "Function Class", + "default": "PYTHON_CODE" + }, + "uid": { + "type": "string", + "format": "uuid", + "title": "Uid" + }, + "created_at": { + "type": "string", + "format": "date-time", + "title": "Created At" + } + }, + "type": "object", + "required": [ + "function_uid", + "inputs", + "outputs", + "uid", + "created_at" + ], + "title": "RegisteredPythonCodeFunctionJob" }, - "RegisteredProjectFunctionJob": { + "RegisteredPythonCodeFunctionJobWithStatus": { "properties": { "title": { "type": "string", @@ -10212,6 +11512,7 @@ "inputs": { "anyOf": [ { + "additionalProperties": true, "type": "object" }, { @@ -10223,6 +11524,7 @@ "outputs": { "anyOf": [ { + "additionalProperties": true, "type": "object" }, { @@ -10233,9 +11535,9 @@ }, "function_class": { "type": "string", - "const": "PROJECT", + "const": "PYTHON_CODE", "title": "Function Class", - "default": "PROJECT" + "default": "PYTHON_CODE" }, "uid": { "type": "string", @@ -10247,10 +11549,8 @@ "format": "date-time", "title": "Created At" }, - "project_job_id": { - "type": "string", - "format": "uuid", - "title": "Project Job Id" + "status": { + "$ref": "#/components/schemas/FunctionJobStatus" } }, "type": "object", @@ -10260,17 +11560,17 @@ "outputs", "uid", "created_at", - "project_job_id" + "status" ], - "title": "RegisteredProjectFunctionJob" + "title": "RegisteredPythonCodeFunctionJobWithStatus" }, - "RegisteredPythonCodeFunction": { + "RegisteredSolverFunction": { "properties": { "function_class": { "type": "string", - "const": "PYTHON_CODE", + "const": "SOLVER", "title": "Function Class", - "default": "PYTHON_CODE" + "default": "SOLVER" }, "title": { "type": "string", @@ -10313,6 +11613,7 @@ "default_inputs": { "anyOf": [ { + "additionalProperties": true, "type": "object" }, { @@ -10331,9 +11632,20 @@ "format": "date-time", "title": "Created At" }, - "code_url": { + "modified_at": { "type": "string", - "title": "Code Url" + "format": "date-time", + "title": "Modified At" + }, + "solver_key": { + "type": "string", + "pattern": "^simcore/services/((comp|dynamic|frontend))/([a-z0-9][a-z0-9_.-]*/)*([a-z0-9-_]+[a-z0-9])$", + "title": "Solver Key" + }, + "solver_version": { + "type": "string", + "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", + "title": "Solver Version" } }, "type": "object", @@ -10343,11 +11655,13 @@ "default_inputs", "uid", "created_at", - "code_url" + "modified_at", + "solver_key", + "solver_version" ], - "title": "RegisteredPythonCodeFunction" + "title": "RegisteredSolverFunction" }, - "RegisteredPythonCodeFunctionJob": { + "RegisteredSolverFunctionJob": { "properties": { "title": { "type": "string", @@ -10367,6 +11681,7 @@ "inputs": { "anyOf": [ { + "additionalProperties": true, "type": "object" }, { @@ -10378,6 +11693,7 @@ "outputs": { "anyOf": [ { + "additionalProperties": true, "type": "object" }, { @@ -10388,9 +11704,9 @@ }, "function_class": { "type": "string", - "const": "PYTHON_CODE", + "const": "SOLVER", "title": "Function Class", - "default": "PYTHON_CODE" + "default": "SOLVER" }, "uid": { "type": "string", @@ -10401,109 +11717,44 @@ "type": "string", "format": "date-time", "title": "Created At" - } - }, - "type": "object", - "required": [ - "function_uid", - "inputs", - "outputs", - "uid", - "created_at" - ], - "title": "RegisteredPythonCodeFunctionJob" - }, - "RegisteredSolverFunction": { - "properties": { - "function_class": { - "type": "string", - "const": "SOLVER", - "title": "Function Class", - "default": "SOLVER" - }, - "title": { - "type": "string", - "title": "Title", - "default": "" - }, - "description": { - "type": "string", - "title": "Description", - "default": "" }, - "input_schema": { - "oneOf": [ + "solver_job_id": { + "anyOf": [ { - "$ref": "#/components/schemas/JSONFunctionInputSchema" - } - ], - "title": "Input Schema", - "discriminator": { - "propertyName": "schema_class", - "mapping": { - "application/schema+json": "#/components/schemas/JSONFunctionInputSchema" - } - } - }, - "output_schema": { - "oneOf": [ + "type": "string", + "format": "uuid" + }, { - "$ref": "#/components/schemas/JSONFunctionOutputSchema" + "type": "null" } ], - "title": "Output Schema", - "discriminator": { - "propertyName": "schema_class", - "mapping": { - "application/schema+json": "#/components/schemas/JSONFunctionOutputSchema" - } - } + "title": "Solver Job Id" }, - "default_inputs": { + "job_creation_task_id": { "anyOf": [ { - "type": "object" + "type": "string" }, { "type": "null" } ], - "title": "Default Inputs" - }, - "uid": { - "type": "string", - "format": "uuid", - "title": "Uid" - }, - "created_at": { - "type": "string", - "format": "date-time", - "title": "Created At" - }, - "solver_key": { - "type": "string", - "pattern": "^simcore/services/((comp|dynamic|frontend))/([a-z0-9][a-z0-9_.-]*/)*([a-z0-9-_]+[a-z0-9])$", - "title": "Solver Key" - }, - "solver_version": { - "type": "string", - "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", - "title": "Solver Version" + "title": "Job Creation Task Id" } }, "type": "object", "required": [ - "input_schema", - "output_schema", - "default_inputs", + "function_uid", + "inputs", + "outputs", "uid", "created_at", - "solver_key", - "solver_version" + "solver_job_id", + "job_creation_task_id" ], - "title": "RegisteredSolverFunction" + "title": "RegisteredSolverFunctionJob" }, - "RegisteredSolverFunctionJob": { + "RegisteredSolverFunctionJobWithStatus": { "properties": { "title": { "type": "string", @@ -10523,6 +11774,7 @@ "inputs": { "anyOf": [ { + "additionalProperties": true, "type": "object" }, { @@ -10534,6 +11786,7 @@ "outputs": { "anyOf": [ { + "additionalProperties": true, "type": "object" }, { @@ -10558,10 +11811,31 @@ "format": "date-time", "title": "Created At" }, + "status": { + "$ref": "#/components/schemas/FunctionJobStatus" + }, "solver_job_id": { - "type": "string", - "format": "uuid", + "anyOf": [ + { + "type": "string", + "format": "uuid" + }, + { + "type": "null" + } + ], "title": "Solver Job Id" + }, + "job_creation_task_id": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Job Creation Task Id" } }, "type": "object", @@ -10571,26 +11845,28 @@ "outputs", "uid", "created_at", - "solver_job_id" + "status", + "solver_job_id", + "job_creation_task_id" ], - "title": "RegisteredSolverFunctionJob" + "title": "RegisteredSolverFunctionJobWithStatus" }, "RunningState": { "type": "string", "enum": [ "UNKNOWN", - "PUBLISHED", "NOT_STARTED", + "PUBLISHED", "PENDING", + "WAITING_FOR_CLUSTER", "WAITING_FOR_RESOURCES", "STARTED", "SUCCESS", "FAILED", - "ABORTED", - "WAITING_FOR_CLUSTER" + "ABORTED" ], "title": "RunningState", - "description": "State of execution of a project's computational workflow\n\nSEE StateType for task state" + "description": "State of execution of a project's computational workflow\n\nSEE StateType for task state\n\n# Computational backend states explained:\n- UNKNOWN - The backend doesn't know about the task anymore, it has disappeared from the system or it was never created (eg. when we are asking for the task)\n- NOT_STARTED - Default state when the task is created\n- PUBLISHED - The task has been submitted to the computational backend (click on \"Run\" button in the UI)\n- PENDING - Task has been transferred to the Dask scheduler and is waiting for a worker to pick it up (director-v2 --> Dask scheduler)\n - But! it is also transition state (ex. PENDING -> WAITING_FOR_CLUSTER -> PENDING -> WAITING_FOR_RESOURCES -> PENDING -> STARTED)\n- WAITING_FOR_CLUSTER - No cluster (Dask scheduler) is available to run the task; waiting for one to become available\n- WAITING_FOR_RESOURCES - No worker (Dask worker) is available to run the task; waiting for one to become available\n- STARTED - A worker has picked up the task and is executing it\n- SUCCESS - Task finished successfully\n- FAILED - Task finished with an error\n- ABORTED - Task was aborted before completion" }, "ServicePricingPlanGetLegacy": { "properties": { @@ -10774,6 +12050,7 @@ "default_inputs": { "anyOf": [ { + "additionalProperties": true, "type": "object" }, { @@ -10823,6 +12100,7 @@ "inputs": { "anyOf": [ { + "additionalProperties": true, "type": "object" }, { @@ -10834,6 +12112,7 @@ "outputs": { "anyOf": [ { + "additionalProperties": true, "type": "object" }, { @@ -10849,9 +12128,27 @@ "default": "SOLVER" }, "solver_job_id": { - "type": "string", - "format": "uuid", + "anyOf": [ + { + "type": "string", + "format": "uuid" + }, + { + "type": "null" + } + ], "title": "Solver Job Id" + }, + "job_creation_task_id": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Job Creation Task Id" } }, "type": "object", @@ -10859,7 +12156,8 @@ "function_uid", "inputs", "outputs", - "solver_job_id" + "solver_job_id", + "job_creation_task_id" ], "title": "SolverFunctionJob" }, @@ -10882,6 +12180,7 @@ "content_schema": { "anyOf": [ { + "additionalProperties": true, "type": "object" }, { @@ -10965,6 +12264,7 @@ "content_schema": { "anyOf": [ { + "additionalProperties": true, "type": "object" }, { @@ -10993,6 +12293,126 @@ "kind": "input" } }, + "TaskGet": { + "properties": { + "task_id": { + "type": "string", + "title": "Task Id" + }, + "task_name": { + "type": "string", + "title": "Task Name" + }, + "status_href": { + "type": "string", + "title": "Status Href" + }, + "result_href": { + "type": "string", + "title": "Result Href" + }, + "abort_href": { + "type": "string", + "title": "Abort Href" + } + }, + "type": "object", + "required": [ + "task_id", + "status_href", + "result_href", + "abort_href" + ], + "title": "TaskGet" + }, + "TaskProgress": { + "properties": { + "task_id": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Task Id" + }, + "message": { + "type": "string", + "title": "Message", + "default": "" + }, + "percent": { + "type": "number", + "maximum": 1.0, + "minimum": 0.0, + "title": "Percent", + "default": 0.0 + } + }, + "type": "object", + "title": "TaskProgress", + "description": "Helps the user to keep track of the progress. Progress is expected to be\ndefined as a float bound between 0.0 and 1.0" + }, + "TaskResult": { + "properties": { + "result": { + "anyOf": [ + {}, + { + "type": "null" + } + ], + "title": "Result" + }, + "error": { + "anyOf": [ + {}, + { + "type": "null" + } + ], + "title": "Error" + } + }, + "type": "object", + "required": [ + "result", + "error" + ], + "title": "TaskResult" + }, + "TaskStatus": { + "properties": { + "task_progress": { + "$ref": "#/components/schemas/TaskProgress" + }, + "done": { + "type": "boolean", + "title": "Done" + }, + "started": { + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], + "title": "Started" + } + }, + "type": "object", + "required": [ + "task_progress", + "done", + "started" + ], + "title": "TaskStatus" + }, "UnitExtraInfoTier": { "properties": { "CPU": { @@ -11128,7 +12548,6 @@ }, "workspace_path": { "type": "string", - "pattern": "^workspace/.*", "format": "path", "title": "Workspace Path", "description": "The file's relative path within the job's workspace directory. E.g. 'workspace/myfile.txt'" diff --git a/services/api-server/requirements/_base.in b/services/api-server/requirements/_base.in index 031fabf9e4e9..58825ffc491c 100644 --- a/services/api-server/requirements/_base.in +++ b/services/api-server/requirements/_base.in @@ -7,6 +7,7 @@ --constraint ./constraints.txt # intra-repo required dependencies +--requirement ../../../packages/celery-library/requirements/_base.in --requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/postgres-database/requirements/_base.in @@ -22,7 +23,7 @@ fastapi-pagination orjson packaging parse -pydantic[dotenv] +pydantic pyyaml tenacity typer diff --git a/services/api-server/requirements/_base.txt b/services/api-server/requirements/_base.txt index 21a820a91956..2c91b49add5e 100644 --- a/services/api-server/requirements/_base.txt +++ b/services/api-server/requirements/_base.txt @@ -1,30 +1,47 @@ aio-pika==9.5.3 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in aiocache==0.12.3 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/_base.in aiodebug==2.3.0 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in aiodocker==0.24.0 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in aiofiles==24.1.0 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/_base.in # -r requirements/_base.in aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.11.18 - # via +aiohttp==3.12.12 + # via + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -63,6 +80,8 @@ alembic==1.14.0 # via # -r requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in +amqp==5.3.1 + # via kombu annotated-types==0.7.0 # via pydantic anyio==4.7.0 @@ -74,12 +93,17 @@ anyio==4.7.0 # watchfiles arrow==1.3.0 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi asyncpg==0.30.0 @@ -88,8 +112,24 @@ attrs==24.2.0 # via # aiohttp # jsonschema +billiard==4.2.1 + # via celery +celery==5.5.3 + # via -r requirements/../../../packages/celery-library/requirements/_base.in certifi==2024.8.30 # via + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -121,17 +161,40 @@ certifi==2024.8.30 # httpcore # httpx # requests + # sentry-sdk cffi==1.17.1 # via cryptography charset-normalizer==3.4.0 # via requests -click==8.1.7 +click==8.2.1 # via + # celery + # click-didyoumean + # click-plugins + # click-repl # rich-toolkit # typer # uvicorn +click-didyoumean==0.3.1 + # via celery +click-plugins==1.1.1.2 + # via celery +click-repl==0.3.0 + # via celery cryptography==44.0.0 # via + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -161,12 +224,6 @@ cryptography==44.0.0 # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in -deprecated==1.2.15 - # via - # opentelemetry-api - # opentelemetry-exporter-otlp-proto-grpc - # opentelemetry-exporter-otlp-proto-http - # opentelemetry-semantic-conventions dnspython==2.7.0 # via email-validator email-validator==2.2.0 @@ -177,18 +234,23 @@ exceptiongroup==1.2.2 # via aio-pika fast-depends==2.4.12 # via faststream -fastapi==0.115.12 +fastapi==0.116.1 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi-lifespan-manager -fastapi-cli==0.0.6 +fastapi-cli==0.0.8 # via fastapi +fastapi-cloud-cli==0.1.5 + # via fastapi-cli fastapi-lifespan-manager==0.1.4 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -fastapi-pagination==0.12.32 - # via -r requirements/_base.in +fastapi-pagination==0.12.34 + # via + # -c requirements/./constraints.txt + # -r requirements/_base.in faststream==0.5.33 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in flexcache==0.3 @@ -199,7 +261,7 @@ frozenlist==1.5.0 # via # aiohttp # aiosignal -googleapis-common-protos==1.66.0 +googleapis-common-protos==1.70.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http @@ -207,7 +269,7 @@ greenlet==3.1.1 # via sqlalchemy grpcio==1.68.1 # via opentelemetry-exporter-otlp-proto-grpc -h11==0.14.0 +h11==0.16.0 # via # httpcore # uvicorn @@ -215,12 +277,24 @@ h2==4.2.0 # via httpx hpack==4.1.0 # via h2 -httpcore==1.0.7 +httpcore==1.0.9 # via httpx httptools==0.6.4 # via uvicorn httpx==0.27.2 # via + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -250,7 +324,9 @@ httpx==0.27.2 # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in + # -r requirements/../../../packages/simcore-sdk/requirements/_base.in # fastapi + # fastapi-cloud-cli hyperframe==6.1.0 # via h2 idna==3.10 @@ -264,6 +340,18 @@ importlib-metadata==8.5.0 # via opentelemetry-api jinja2==3.1.6 # via + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -293,15 +381,39 @@ jinja2==3.1.6 # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # fastapi +jsonref==1.1.0 + # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema==3.2.0 # via # -c requirements/./constraints.txt + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in -mako==1.3.7 - # via +kombu==5.5.4 + # via celery +mako==1.3.10 + # via + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -343,8 +455,9 @@ multidict==6.1.0 # via # aiohttp # yarl -opentelemetry-api==1.28.2 +opentelemetry-api==1.34.1 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc @@ -360,19 +473,20 @@ opentelemetry-api==1.28.2 # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.28.2 +opentelemetry-exporter-otlp==1.34.1 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.28.2 +opentelemetry-exporter-otlp-proto-common==1.34.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.28.2 +opentelemetry-exporter-otlp-proto-grpc==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.28.2 +opentelemetry-exporter-otlp-proto-http==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.49b2 +opentelemetry-instrumentation==0.55b1 # via # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-asgi @@ -382,44 +496,50 @@ opentelemetry-instrumentation==0.49b2 # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aio-pika==0.49b2 +opentelemetry-instrumentation-aio-pika==0.55b1 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-asgi==0.49b2 +opentelemetry-instrumentation-asgi==0.55b1 # via opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-asyncpg==0.49b2 +opentelemetry-instrumentation-asyncpg==0.55b1 # via - # -r requirements/../../../packages/postgres-database/requirements/_base.in - # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in -opentelemetry-instrumentation-fastapi==0.49b2 + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-fastapi==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-httpx==0.49b2 +opentelemetry-instrumentation-httpx==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-logging==0.49b2 +opentelemetry-instrumentation-logging==0.55b1 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.49b2 +opentelemetry-instrumentation-redis==0.55b1 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.49b2 +opentelemetry-instrumentation-requests==0.55b1 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.28.2 +opentelemetry-proto==1.34.1 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.28.2 +opentelemetry-sdk==1.34.1 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.49b2 +opentelemetry-semantic-conventions==0.55b1 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi @@ -429,7 +549,7 @@ opentelemetry-semantic-conventions==0.49b2 # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.49b2 +opentelemetry-util-http==0.55b1 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi @@ -437,6 +557,18 @@ opentelemetry-util-http==0.49b2 # opentelemetry-instrumentation-requests orjson==3.10.12 # via + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -465,6 +597,14 @@ orjson==3.10.12 # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in @@ -488,6 +628,7 @@ packaging==24.2 # via # -r requirements/../../../packages/simcore-sdk/requirements/_base.in # -r requirements/_base.in + # kombu # opentelemetry-instrumentation pamqp==3.3.0 # via aiormq @@ -499,16 +640,19 @@ platformdirs==4.3.6 # via pint prometheus-client==0.21.1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +prompt-toolkit==3.0.51 + # via click-repl propcache==0.2.1 # via # aiohttp # yarl -protobuf==5.29.1 +protobuf==5.29.5 # via # googleapis-common-protos # opentelemetry-proto psutil==6.1.0 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in psycopg2-binary==2.9.10 @@ -517,8 +661,20 @@ pycparser==2.22 # via cffi pycryptodome==3.21.0 # via stream-zip -pydantic==2.10.3 - # via +pydantic==2.11.7 + # via + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -547,6 +703,17 @@ pydantic==2.10.3 # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in @@ -577,13 +744,22 @@ pydantic==2.10.3 # -r requirements/_base.in # fast-depends # fastapi + # fastapi-cloud-cli # fastapi-pagination # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.1 +pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.10.0 - # via +pydantic-extra-types==2.10.5 + # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in @@ -602,8 +778,20 @@ pydantic-extra-types==2.10.0 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in -pydantic-settings==2.6.1 - # via +pydantic-settings==2.7.0 + # via + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -632,6 +820,10 @@ pydantic-settings==2.6.1 # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -644,12 +836,15 @@ pygments==2.18.0 # via rich pyinstrument==5.0.0 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in pyrsistent==0.20.0 # via jsonschema python-dateutil==2.9.0.post0 - # via arrow + # via + # arrow + # celery python-dotenv==1.0.1 # via # pydantic-settings @@ -658,6 +853,18 @@ python-multipart==0.0.19 # via fastapi pyyaml==6.0.2 # via + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -686,12 +893,25 @@ pyyaml==6.0.2 # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/_base.in # uvicorn redis==5.2.1 # via + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -720,21 +940,31 @@ redis==5.2.1 # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -requests==2.32.3 + # kombu +requests==2.32.4 # via opentelemetry-exporter-otlp-proto-http -rich==13.9.4 +rich==14.1.0 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in # rich-toolkit # typer -rich-toolkit==0.12.0 - # via fastapi-cli -setuptools==75.6.0 +rich-toolkit==0.15.0 + # via + # fastapi-cli + # fastapi-cloud-cli +rignore==0.6.4 + # via fastapi-cloud-cli +sentry-sdk==2.35.0 + # via fastapi-cloud-cli +setuptools==80.9.0 # via jsonschema shellingham==1.5.4 # via typer @@ -745,9 +975,22 @@ six==1.17.0 sniffio==1.3.1 # via # anyio + # asgi-lifespan # httpx sqlalchemy==1.4.54 # via + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -780,8 +1023,20 @@ sqlalchemy==1.4.54 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/_base.in # alembic -starlette==0.41.3 - # via +starlette==0.47.2 + # via + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -813,34 +1068,41 @@ starlette==0.41.3 # fastapi stream-zip==0.0.83 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in tenacity==9.0.0 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/_base.in # -r requirements/_base.in toolz==1.0.0 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in tqdm==4.67.1 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/_base.in -typer==0.15.1 +typer==0.16.1 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in # fastapi-cli + # fastapi-cloud-cli types-python-dateutil==2.9.0.20241206 # via arrow -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # aiodebug # alembic @@ -850,15 +1112,37 @@ typing-extensions==4.12.2 # faststream # flexcache # flexparser + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http # opentelemetry-sdk + # opentelemetry-semantic-conventions # pint # pydantic # pydantic-core # pydantic-extra-types # rich-toolkit + # starlette # typer -urllib3==2.2.3 - # via + # typing-inspection +typing-inspection==0.4.1 + # via pydantic +tzdata==2025.2 + # via kombu +urllib3==2.5.0 + # via + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -888,25 +1172,34 @@ urllib3==2.2.3 # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # requests + # sentry-sdk uvicorn==0.34.2 # via # fastapi # fastapi-cli + # fastapi-cloud-cli uvloop==0.21.0 # via uvicorn +vine==5.1.0 + # via + # amqp + # celery + # kombu watchfiles==1.0.0 # via uvicorn +wcwidth==0.2.13 + # via prompt-toolkit websockets==14.1 # via uvicorn wrapt==1.17.0 # via - # deprecated # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis yarl==1.18.3 # via + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in diff --git a/services/api-server/requirements/_test.in b/services/api-server/requirements/_test.in index 805e1f7a7afe..067ce3a73052 100644 --- a/services/api-server/requirements/_test.in +++ b/services/api-server/requirements/_test.in @@ -17,12 +17,14 @@ asgi_lifespan click docker faker +fakeredis jsonref moto[server] # mock out tests based on AWS-S3 pact-python pyinstrument pytest pytest-asyncio +pytest-celery pytest-cov pytest-docker pytest-mock diff --git a/services/api-server/requirements/_test.txt b/services/api-server/requirements/_test.txt index 6fc93990cdb6..f0e66f382de4 100644 --- a/services/api-server/requirements/_test.txt +++ b/services/api-server/requirements/_test.txt @@ -2,7 +2,7 @@ aiohappyeyeballs==2.6.1 # via # -c requirements/_base.txt # aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -17,6 +17,10 @@ alembic==1.14.0 # via # -c requirements/_base.txt # -r requirements/_test.in +amqp==5.3.1 + # via + # -c requirements/_base.txt + # kombu annotated-types==0.7.0 # via # -c requirements/_base.txt @@ -27,7 +31,9 @@ anyio==4.7.0 # httpx # starlette asgi-lifespan==2.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in attrs==24.2.0 # via # -c requirements/_base.txt @@ -43,6 +49,10 @@ aws-sam-translator==1.55.0 # cfn-lint aws-xray-sdk==2.14.0 # via moto +billiard==4.2.1 + # via + # -c requirements/_base.txt + # celery boto3==1.38.1 # via # aws-sam-translator @@ -55,6 +65,10 @@ botocore==1.38.1 # s3transfer botocore-stubs==1.37.4 # via types-boto3 +celery==5.5.3 + # via + # -c requirements/_base.txt + # pytest-celery certifi==2024.8.30 # via # -c requirements/../../../requirements/constraints.txt @@ -75,13 +89,29 @@ charset-normalizer==3.4.0 # via # -c requirements/_base.txt # requests -click==8.1.7 +click==8.2.1 # via # -c requirements/_base.txt # -r requirements/_test.in + # celery + # click-didyoumean + # click-plugins + # click-repl # flask # pact-python # uvicorn +click-didyoumean==0.3.1 + # via + # -c requirements/_base.txt + # celery +click-plugins==1.1.1.2 + # via + # -c requirements/_base.txt + # celery +click-repl==0.3.0 + # via + # -c requirements/_base.txt + # celery coverage==7.6.12 # via pytest-cov cryptography==44.0.0 @@ -91,10 +121,14 @@ cryptography==44.0.0 # moto # python-jose # sshpubkeys +debugpy==1.8.16 + # via pytest-celery docker==7.1.0 # via # -r requirements/_test.in # moto + # pytest-celery + # pytest-docker-tools ecdsa==0.19.0 # via # moto @@ -102,7 +136,9 @@ ecdsa==0.19.0 # sshpubkeys faker==36.1.1 # via -r requirements/_test.in -fastapi==0.115.12 +fakeredis==2.31.0 + # via -r requirements/_test.in +fastapi==0.116.1 # via # -c requirements/_base.txt # pact-python @@ -110,7 +146,7 @@ flask==2.1.3 # via # flask-cors # moto -flask-cors==5.0.1 +flask-cors==6.0.1 # via moto frozenlist==1.5.0 # via @@ -123,12 +159,12 @@ greenlet==3.1.1 # via # -c requirements/_base.txt # sqlalchemy -h11==0.14.0 +h11==0.16.0 # via # -c requirements/_base.txt # httpcore # uvicorn -httpcore==1.0.7 +httpcore==1.0.9 # via # -c requirements/_base.txt # httpx @@ -170,7 +206,9 @@ jsonpickle==4.0.2 jsonpointer==3.0.0 # via jsonpatch jsonref==1.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in jsonschema==3.2.0 # via # -c requirements/_base.txt @@ -181,7 +219,11 @@ jsonschema==3.2.0 # openapi-spec-validator junit-xml==1.9 # via cfn-lint -mako==1.3.7 +kombu==5.5.4 + # via + # -c requirements/_base.txt + # celery +mako==1.3.10 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -201,9 +243,9 @@ multidict==6.1.0 # -c requirements/_base.txt # aiohttp # yarl -mypy==1.15.0 +mypy==1.16.1 # via sqlalchemy -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via mypy networkx==2.8.8 # via cfn-lint @@ -215,15 +257,24 @@ packaging==24.2 # via # -c requirements/_base.txt # aioresponses + # kombu # pytest pact-python==2.3.1 # via -r requirements/_test.in +pathspec==0.12.1 + # via mypy pbr==6.1.1 # via # jschema-to-python # sarif-om pluggy==1.5.0 - # via pytest + # via + # pytest + # pytest-cov +prompt-toolkit==3.0.51 + # via + # -c requirements/_base.txt + # click-repl propcache==0.2.1 # via # -c requirements/_base.txt @@ -233,6 +284,7 @@ psutil==6.1.0 # via # -c requirements/_base.txt # pact-python + # pytest-celery pyasn1==0.4.8 # via # python-jose @@ -241,15 +293,19 @@ pycparser==2.22 # via # -c requirements/_base.txt # cffi -pydantic==2.10.3 +pydantic==2.11.7 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # fastapi -pydantic-core==2.27.1 +pydantic-core==2.33.2 # via # -c requirements/_base.txt # pydantic +pygments==2.18.0 + # via + # -c requirements/_base.txt + # pytest pyinstrument==5.0.0 # via # -c requirements/_base.txt @@ -260,20 +316,25 @@ pyrsistent==0.20.0 # via # -c requirements/_base.txt # jsonschema -pytest==8.3.5 +pytest==8.4.1 # via # -r requirements/_test.in # pytest-asyncio # pytest-cov # pytest-docker + # pytest-docker-tools # pytest-mock -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 + # via -r requirements/_test.in +pytest-celery==1.1.3 # via -r requirements/_test.in -pytest-cov==6.0.0 +pytest-cov==6.2.1 # via -r requirements/_test.in -pytest-docker==3.2.0 +pytest-docker==3.2.3 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-docker-tools==3.1.9 + # via pytest-celery +pytest-mock==3.14.1 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in @@ -281,6 +342,7 @@ python-dateutil==2.9.0.post0 # via # -c requirements/_base.txt # botocore + # celery # moto python-jose==3.4.0 # via moto @@ -295,11 +357,16 @@ pyyaml==6.0.2 # moto # openapi-spec-validator # responses +redis==5.2.1 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # fakeredis referencing==0.35.1 # via # -c requirements/../../../requirements/constraints.txt # types-jsonschema -requests==2.32.3 +requests==2.32.4 # via # -c requirements/_base.txt # docker @@ -320,13 +387,14 @@ s3transfer==0.12.0 # via boto3 sarif-om==1.0.4 # via cfn-lint -setuptools==75.6.0 +setuptools==80.9.0 # via # -c requirements/_base.txt # jsonschema # moto # openapi-spec-validator # pbr + # pytest-celery six==1.17.0 # via # -c requirements/_base.txt @@ -341,6 +409,8 @@ sniffio==1.3.1 # anyio # asgi-lifespan # httpx +sortedcontainers==2.4.0 + # via fakeredis sqlalchemy==1.4.54 # via # -c requirements/../../../requirements/constraints.txt @@ -351,11 +421,15 @@ sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy sshpubkeys==3.3.1 # via moto -starlette==0.41.3 +starlette==0.47.2 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # fastapi +tenacity==9.0.0 + # via + # -c requirements/_base.txt + # pytest-celery types-aiofiles==24.1.0.20241221 # via -r requirements/_test.in types-awscrt==0.23.10 @@ -366,7 +440,7 @@ types-jsonschema==4.23.0.20241208 # via -r requirements/_test.in types-s3transfer==0.11.3 # via types-boto3 -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # alembic @@ -376,10 +450,19 @@ typing-extensions==4.12.2 # pydantic # pydantic-core # sqlalchemy2-stubs + # starlette # types-boto3 -tzdata==2025.1 - # via faker -urllib3==2.2.3 + # typing-inspection +typing-inspection==0.4.1 + # via + # -c requirements/_base.txt + # pydantic +tzdata==2025.2 + # via + # -c requirements/_base.txt + # faker + # kombu +urllib3==2.5.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -391,6 +474,16 @@ uvicorn==0.34.2 # via # -c requirements/_base.txt # pact-python +vine==5.1.0 + # via + # -c requirements/_base.txt + # amqp + # celery + # kombu +wcwidth==0.2.13 + # via + # -c requirements/_base.txt + # prompt-toolkit werkzeug==2.1.2 # via # flask diff --git a/services/api-server/requirements/_tools.txt b/services/api-server/requirements/_tools.txt index e86a0164c220..46d3488fc275 100644 --- a/services/api-server/requirements/_tools.txt +++ b/services/api-server/requirements/_tools.txt @@ -10,7 +10,7 @@ cfgv==3.4.0 # via pre-commit change-case==0.5.2 # via -r requirements/_tools.in -click==8.1.7 +click==8.2.1 # via # -c requirements/_base.txt # -c requirements/_test.txt @@ -41,11 +41,11 @@ markupsafe==3.0.2 # jinja2 mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via # -c requirements/_test.txt # black @@ -59,7 +59,10 @@ packaging==24.2 # black # build pathspec==0.12.1 - # via black + # via + # -c requirements/_test.txt + # black + # mypy pip==25.0.1 # via pip-tools pip-tools==7.4.1 @@ -87,14 +90,14 @@ pyyaml==6.0.2 # watchdog ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.6.0 +setuptools==80.9.0 # via # -c requirements/_base.txt # -c requirements/_test.txt # pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # -c requirements/_test.txt diff --git a/services/api-server/requirements/ci.txt b/services/api-server/requirements/ci.txt index cc1799cee075..9d4fff8972ab 100644 --- a/services/api-server/requirements/ci.txt +++ b/services/api-server/requirements/ci.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +simcore-celery-library @ ../../packages/celery-library/ simcore-common-library @ ../../packages/common-library simcore-models-library @ ../../packages/models-library simcore-postgres-database @ ../../packages/postgres-database/ diff --git a/services/api-server/requirements/constraints.txt b/services/api-server/requirements/constraints.txt index 22d919bdae39..d1cf3b3257c5 100644 --- a/services/api-server/requirements/constraints.txt +++ b/services/api-server/requirements/constraints.txt @@ -34,3 +34,7 @@ aws-sam-translator<1.56.0 # # aws-sam-translator<1.55.0 (from -c ./constraints.txt (line 32)) # # aws-sam-translator>=1.57.0 (from cfn-lint==0.72.10->-c ./constraints.txt (line 33)) cfn-lint<0.72.1 + +# the osparc client currently requires fastapi-pagination<0.13.0 as it requires links fields in the response (from version 0.13.0 they are all optional) +# https://github.com/ITISFoundation/osparc-simcore/issues/8254 +fastapi-pagination<0.13.0 diff --git a/services/api-server/requirements/dev.txt b/services/api-server/requirements/dev.txt index 5afc552d7533..85f3f1c428e0 100644 --- a/services/api-server/requirements/dev.txt +++ b/services/api-server/requirements/dev.txt @@ -12,6 +12,7 @@ --requirement _tools.txt # installs this repo's packages +--editable ../../packages/celery-library/ --editable ../../packages/common-library --editable ../../packages/models-library --editable ../../packages/postgres-database diff --git a/services/api-server/requirements/prod.txt b/services/api-server/requirements/prod.txt index 9d4d747507e9..9df71af1a631 100644 --- a/services/api-server/requirements/prod.txt +++ b/services/api-server/requirements/prod.txt @@ -10,6 +10,7 @@ --requirement _base.txt # installs this repo's packages +simcore-celery-library @ ../../packages/celery-library/ simcore-models-library @ ../../packages/models-library simcore-common-library @ ../../packages/common-library/ simcore-postgres-database @ ../../packages/postgres-database/ diff --git a/services/api-server/setup.cfg b/services/api-server/setup.cfg index 21f72b9aecc1..30f46cc2b6fe 100644 --- a/services/api-server/setup.cfg +++ b/services/api-server/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.9.0 +current_version = 0.13.0 commit = True message = services/api-server version: {current_version} → {new_version} tag = False @@ -11,12 +11,12 @@ commit_args = --no-verify asyncio_mode = auto asyncio_default_fixture_loop_scope = function addopts = --strict-markers -markers = +markers = slow: marks tests as slow (deselect with '-m "not slow"') acceptance_test: "marks tests as 'acceptance tests' i.e. does the system do what the user expects? Typically those are workflows." testit: "marks test to run during development" [mypy] -plugins = +plugins = pydantic.mypy sqlalchemy.ext.mypy.plugin diff --git a/services/api-server/src/simcore_service_api_server/_constants.py b/services/api-server/src/simcore_service_api_server/_constants.py index 7bfbfd43907e..8d2ffb058564 100644 --- a/services/api-server/src/simcore_service_api_server/_constants.py +++ b/services/api-server/src/simcore_service_api_server/_constants.py @@ -1,9 +1,17 @@ from typing import Final -MSG_BACKEND_SERVICE_UNAVAILABLE: Final[ - str -] = "backend service is disabled or unreachable" +from common_library.user_messages import user_message -MSG_INTERNAL_ERROR_USER_FRIENDLY_TEMPLATE: Final[ - str -] = "Oops! Something went wrong, but we've noted it down and we'll sort it out ASAP. Thanks for your patience!" +MSG_BACKEND_SERVICE_UNAVAILABLE: Final[str] = user_message( + "The service is currently unavailable. Please try again later.", _version=1 +) + +MSG_INTERNAL_ERROR_USER_FRIENDLY_TEMPLATE: Final[str] = user_message( + "Something went wrong on our end. We've been notified and will resolve this issue as soon as possible. Thank you for your patience.", + _version=2, +) + +MSG_CLIENT_ERROR_USER_FRIENDLY_TEMPLATE: Final[str] = user_message( + "Something went wrong with your request.", + _version=1, +) diff --git a/services/api-server/src/simcore_service_api_server/_meta.py b/services/api-server/src/simcore_service_api_server/_meta.py index 33a0480a25ce..c92dbc70fdf0 100644 --- a/services/api-server/src/simcore_service_api_server/_meta.py +++ b/services/api-server/src/simcore_service_api_server/_meta.py @@ -1,7 +1,4 @@ -""" Application's metadata - -""" - +"""Application's metadata""" from typing import Final @@ -17,10 +14,9 @@ VERSION: Final[Version] = info.version API_VERSION: Final[VersionStr] = info.__version__ API_VTAG: Final[str] = info.api_prefix_path_tag -APP_NAME: Final[str] = PROJECT_NAME +APP_NAME: Final[str] = info.app_name SUMMARY: Final[str] = info.get_summary() - # # https://patorjk.com/software/taag/#p=display&f=JS%20Stick%20Letters&t=API-server%0A # diff --git a/services/api-server/src/simcore_service_api_server/_service_function_jobs.py b/services/api-server/src/simcore_service_api_server/_service_function_jobs.py new file mode 100644 index 000000000000..e71ef46de027 --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/_service_function_jobs.py @@ -0,0 +1,329 @@ +from dataclasses import dataclass +from typing import overload + +import jsonschema +from common_library.exclude import as_dict_exclude_none +from models_library.functions import ( + FunctionClass, + FunctionID, + FunctionInputs, + FunctionJobCollectionID, + FunctionJobID, + FunctionSchemaClass, + ProjectFunctionJob, + RegisteredFunction, + RegisteredFunctionJob, + RegisteredFunctionJobPatch, + RegisteredProjectFunctionJobPatch, + RegisteredSolverFunctionJobPatch, + SolverFunctionJob, + SolverJobID, + TaskID, +) +from models_library.functions_errors import ( + FunctionInputsValidationError, + UnsupportedFunctionClassError, +) +from models_library.products import ProductName +from models_library.projects import ProjectID +from models_library.projects_nodes_io import NodeID +from models_library.rest_pagination import PageMetaInfoLimitOffset, PageOffsetInt +from models_library.rpc_pagination import PageLimitInt +from models_library.users import UserID +from pydantic import ValidationError +from simcore_service_api_server._service_functions import FunctionService +from simcore_service_api_server.services_rpc.storage import StorageService + +from ._service_jobs import JobService +from .models.api_resources import JobLinks +from .models.domain.functions import PreRegisteredFunctionJobData +from .models.schemas.jobs import JobInputs, JobPricingSpecification +from .services_http.webserver import AuthSession +from .services_rpc.wb_api_server import WbApiRpcClient + + +def join_inputs( + default_inputs: FunctionInputs | None, + function_inputs: FunctionInputs | None, +) -> FunctionInputs: + if default_inputs is None: + return function_inputs + + if function_inputs is None: + return default_inputs + + # last dict will override defaults + return {**default_inputs, **function_inputs} + + +@dataclass(frozen=True, kw_only=True) +class FunctionJobService: + user_id: UserID + product_name: ProductName + _web_rpc_client: WbApiRpcClient + _storage_client: StorageService + _job_service: JobService + _function_service: FunctionService + _webserver_api: AuthSession + + async def list_function_jobs( + self, + *, + filter_by_function_id: FunctionID | None = None, + filter_by_function_job_ids: list[FunctionJobID] | None = None, + filter_by_function_job_collection_id: FunctionJobCollectionID | None = None, + pagination_offset: PageOffsetInt | None = None, + pagination_limit: PageLimitInt | None = None, + ) -> tuple[list[RegisteredFunctionJob], PageMetaInfoLimitOffset]: + """Lists all function jobs for a user with pagination""" + + pagination_kwargs = as_dict_exclude_none( + pagination_offset=pagination_offset, pagination_limit=pagination_limit + ) + + return await self._web_rpc_client.list_function_jobs( + user_id=self.user_id, + product_name=self.product_name, + filter_by_function_id=filter_by_function_id, + filter_by_function_job_ids=filter_by_function_job_ids, + filter_by_function_job_collection_id=filter_by_function_job_collection_id, + **pagination_kwargs, + ) + + async def validate_function_inputs( + self, *, function_id: FunctionID, inputs: FunctionInputs + ) -> tuple[bool, str]: + function = await self._web_rpc_client.get_function( + function_id=function_id, + user_id=self.user_id, + product_name=self.product_name, + ) + + if ( + function.input_schema is None + or function.input_schema.schema_content is None + ): + return True, "No input schema defined for this function" + + if function.input_schema.schema_class == FunctionSchemaClass.json_schema: + try: + jsonschema.validate( + instance=inputs, schema=function.input_schema.schema_content + ) + except ValidationError as err: + return False, str(err) + return True, "Inputs are valid" + + return ( + False, + f"Unsupported function schema class {function.input_schema.schema_class}", + ) + + async def create_function_job_inputs( # pylint: disable=no-self-use + self, + *, + function: RegisteredFunction, + function_inputs: FunctionInputs, + ) -> JobInputs: + joined_inputs = join_inputs( + function.default_inputs, + function_inputs, + ) + return JobInputs( + values=joined_inputs or {}, + ) + + async def pre_register_function_job( + self, + *, + function: RegisteredFunction, + job_inputs: JobInputs, + ) -> PreRegisteredFunctionJobData: + + if function.input_schema is not None: + is_valid, validation_str = await self.validate_function_inputs( + function_id=function.uid, + inputs=job_inputs.values, + ) + if not is_valid: + raise FunctionInputsValidationError(error=validation_str) + + if function.function_class == FunctionClass.PROJECT: + job = await self._web_rpc_client.register_function_job( + function_job=ProjectFunctionJob( + function_uid=function.uid, + title=f"Function job of function {function.uid}", + description=function.description, + inputs=job_inputs.values, + outputs=None, + project_job_id=None, + job_creation_task_id=None, + ), + user_id=self.user_id, + product_name=self.product_name, + ) + + elif function.function_class == FunctionClass.SOLVER: + job = await self._web_rpc_client.register_function_job( + function_job=SolverFunctionJob( + function_uid=function.uid, + title=f"Function job of function {function.uid}", + description=function.description, + inputs=job_inputs.values, + outputs=None, + solver_job_id=None, + job_creation_task_id=None, + ), + user_id=self.user_id, + product_name=self.product_name, + ) + else: + raise UnsupportedFunctionClassError( + function_class=function.function_class, + ) + + return PreRegisteredFunctionJobData( + function_job_id=job.uid, + job_inputs=job_inputs, + ) + + @overload + async def patch_registered_function_job( + self, + *, + user_id: UserID, + product_name: ProductName, + function_job_id: FunctionJobID, + function_class: FunctionClass, + job_creation_task_id: TaskID | None, + ) -> RegisteredFunctionJob: ... + + @overload + async def patch_registered_function_job( + self, + *, + user_id: UserID, + product_name: ProductName, + function_job_id: FunctionJobID, + function_class: FunctionClass, + job_creation_task_id: TaskID | None, + project_job_id: ProjectID | None, + ) -> RegisteredFunctionJob: ... + + @overload + async def patch_registered_function_job( + self, + *, + user_id: UserID, + product_name: ProductName, + function_job_id: FunctionJobID, + function_class: FunctionClass, + job_creation_task_id: TaskID | None, + solver_job_id: SolverJobID | None, + ) -> RegisteredFunctionJob: ... + + async def patch_registered_function_job( + self, + *, + user_id: UserID, + product_name: ProductName, + function_job_id: FunctionJobID, + function_class: FunctionClass, + job_creation_task_id: TaskID | None, + project_job_id: ProjectID | None = None, + solver_job_id: SolverJobID | None = None, + ) -> RegisteredFunctionJob: + # Only allow one of project_job_id or solver_job_id depending on function_class + patch: RegisteredFunctionJobPatch + if function_class == FunctionClass.PROJECT: + patch = RegisteredProjectFunctionJobPatch( + title=None, + description=None, + inputs=None, + outputs=None, + job_creation_task_id=job_creation_task_id, + project_job_id=project_job_id, + ) + elif function_class == FunctionClass.SOLVER: + patch = RegisteredSolverFunctionJobPatch( + title=None, + description=None, + inputs=None, + outputs=None, + job_creation_task_id=job_creation_task_id, + solver_job_id=solver_job_id, + ) + else: + raise UnsupportedFunctionClassError( + function_class=function_class, + ) + return await self._web_rpc_client.patch_registered_function_job( + user_id=user_id, + product_name=product_name, + function_job_id=function_job_id, + registered_function_job_patch=patch, + ) + + async def run_function( + self, + *, + function: RegisteredFunction, + pre_registered_function_job_data: PreRegisteredFunctionJobData, + pricing_spec: JobPricingSpecification | None, + job_links: JobLinks, + x_simcore_parent_project_uuid: NodeID | None, + x_simcore_parent_node_id: NodeID | None, + ) -> RegisteredFunctionJob: + """N.B. this function does not check access rights. Use get_cached_function_job for that""" + + if function.function_class == FunctionClass.PROJECT: + study_job = await self._job_service.create_studies_job( + study_id=function.project_id, + job_inputs=pre_registered_function_job_data.job_inputs, + hidden=True, + job_links=job_links, + x_simcore_parent_project_uuid=x_simcore_parent_project_uuid, + x_simcore_parent_node_id=x_simcore_parent_node_id, + ) + await self._job_service.start_study_job( + study_id=function.project_id, + job_id=study_job.id, + pricing_spec=pricing_spec, + ) + return await self.patch_registered_function_job( + user_id=self.user_id, + product_name=self.product_name, + function_job_id=pre_registered_function_job_data.function_job_id, + function_class=FunctionClass.PROJECT, + job_creation_task_id=None, + project_job_id=study_job.id, + ) + + if function.function_class == FunctionClass.SOLVER: + solver_job = await self._job_service.create_solver_job( + solver_key=function.solver_key, + version=function.solver_version, + inputs=pre_registered_function_job_data.job_inputs, + job_links=job_links, + hidden=True, + x_simcore_parent_project_uuid=x_simcore_parent_project_uuid, + x_simcore_parent_node_id=x_simcore_parent_node_id, + ) + await self._job_service.start_solver_job( + solver_key=function.solver_key, + version=function.solver_version, + job_id=solver_job.id, + pricing_spec=pricing_spec, + ) + return await self.patch_registered_function_job( + user_id=self.user_id, + product_name=self.product_name, + function_job_id=pre_registered_function_job_data.function_job_id, + function_class=FunctionClass.SOLVER, + job_creation_task_id=None, + solver_job_id=solver_job.id, + ) + + raise UnsupportedFunctionClassError( + function_class=function.function_class, + ) diff --git a/services/api-server/src/simcore_service_api_server/_service_function_jobs_task_client.py b/services/api-server/src/simcore_service_api_server/_service_function_jobs_task_client.py new file mode 100644 index 000000000000..7b35041aeb67 --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/_service_function_jobs_task_client.py @@ -0,0 +1,409 @@ +# pylint: disable=too-many-instance-attributes +import contextlib +import logging +from dataclasses import dataclass + +from celery_library.errors import TaskNotFoundError +from common_library.exclude import as_dict_exclude_none +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs +from models_library.functions import ( + FunctionClass, + FunctionID, + FunctionInputs, + FunctionJobCollectionID, + FunctionJobID, + FunctionJobStatus, + FunctionOutputs, + RegisteredFunction, + RegisteredFunctionJob, + RegisteredFunctionJobWithStatus, + TaskID, +) +from models_library.functions_errors import ( + FunctionExecuteAccessDeniedError, + FunctionsExecuteApiAccessDeniedError, + UnsupportedFunctionClassError, + UnsupportedFunctionFunctionJobClassCombinationError, +) +from models_library.products import ProductName +from models_library.projects import ProjectID +from models_library.projects_nodes_io import NodeID +from models_library.projects_state import RunningState +from models_library.rest_pagination import PageMetaInfoLimitOffset, PageOffsetInt +from models_library.rpc_pagination import PageLimitInt +from models_library.users import UserID +from servicelib.celery.models import ExecutionMetadata, TasksQueue, TaskUUID +from servicelib.celery.task_manager import TaskManager +from simcore_service_api_server.models.schemas.functions import ( + FunctionJobCreationTaskStatus, +) +from sqlalchemy.ext.asyncio import AsyncEngine + +from ._service_function_jobs import FunctionJobService +from ._service_functions import FunctionService +from ._service_jobs import JobService +from .api.dependencies.authentication import Identity +from .exceptions.function_errors import ( + FunctionJobCacheNotFoundError, +) +from .models.api_resources import JobLinks +from .models.domain.celery_models import ApiServerOwnerMetadata +from .models.schemas.functions import FunctionJobCreationTaskStatus +from .models.schemas.jobs import JobInputs, JobPricingSpecification +from .services_http.webserver import AuthSession +from .services_rpc.storage import StorageService +from .services_rpc.wb_api_server import WbApiRpcClient + +_logger = logging.getLogger(__name__) + + +def join_inputs( + default_inputs: FunctionInputs | None, + function_inputs: FunctionInputs | None, +) -> FunctionInputs: + if default_inputs is None: + return function_inputs + + if function_inputs is None: + return default_inputs + + # last dict will override defaults + return {**default_inputs, **function_inputs} + + +async def _celery_task_status( + job_creation_task_id: TaskID | None, + task_manager: TaskManager, + user_id: UserID, + product_name: ProductName, +) -> FunctionJobCreationTaskStatus: + if job_creation_task_id is None: + return FunctionJobCreationTaskStatus.NOT_YET_SCHEDULED + owner_metadata = ApiServerOwnerMetadata( + user_id=user_id, + product_name=product_name, + ) + try: + task_status = await task_manager.get_task_status( + task_uuid=TaskUUID(job_creation_task_id), owner_metadata=owner_metadata + ) + return FunctionJobCreationTaskStatus[task_status.task_state] + except TaskNotFoundError as err: + user_msg = f"Job creation task not found for task_uuid={TaskUUID(job_creation_task_id)}" + _logger.exception( + **create_troubleshooting_log_kwargs( + user_msg, + error=err, + error_context={ + "task_uuid": TaskUUID(job_creation_task_id), + "owner_metadata": owner_metadata, + "user_id": user_id, + "product_name": product_name, + }, + tip="This probably means the celery task failed, because the task should have created the project_id.", + ) + ) + return FunctionJobCreationTaskStatus.ERROR + + +@dataclass(frozen=True, kw_only=True) +class FunctionJobTaskClientService: + user_id: UserID + product_name: ProductName + _web_rpc_client: WbApiRpcClient + _storage_client: StorageService + _job_service: JobService + _function_service: FunctionService + _function_job_service: FunctionJobService + _webserver_api: AuthSession + _celery_task_manager: TaskManager + _async_pg_engine: AsyncEngine + + async def list_function_jobs_with_status( + self, + *, + filter_by_function_id: FunctionID | None = None, + filter_by_function_job_ids: list[FunctionJobID] | None = None, + filter_by_function_job_collection_id: FunctionJobCollectionID | None = None, + pagination_offset: PageOffsetInt | None = None, + pagination_limit: PageLimitInt | None = None, + ) -> tuple[ + list[RegisteredFunctionJobWithStatus], + PageMetaInfoLimitOffset, + ]: + """Lists all function jobs for a user with pagination""" + + pagination_kwargs = as_dict_exclude_none( + pagination_offset=pagination_offset, pagination_limit=pagination_limit + ) + + ( + function_jobs_list_ws, + meta, + ) = await self._web_rpc_client.list_function_jobs_with_status( + user_id=self.user_id, + product_name=self.product_name, + filter_by_function_id=filter_by_function_id, + filter_by_function_job_ids=filter_by_function_job_ids, + filter_by_function_job_collection_id=filter_by_function_job_collection_id, + **pagination_kwargs, + ) + + for function_job_wso in function_jobs_list_ws: + if function_job_wso.outputs is None or ( + function_job_wso.status.status + not in ( + RunningState.SUCCESS, + RunningState.FAILED, + ) + ): + function_job_wso.status = await self.inspect_function_job( + function=await self._function_service.get_function( + function_id=function_job_wso.function_uid, + ), + function_job=function_job_wso, + ) + + if function_job_wso.status.status == RunningState.SUCCESS: + function_job_wso.outputs = await self.function_job_outputs( + function_job=function_job_wso, + function=await self._function_service.get_function( + function_id=function_job_wso.function_uid, + ), + stored_job_outputs=None, + ) + return function_jobs_list_ws, meta + + async def inspect_function_job( + self, + function: RegisteredFunction, + function_job: RegisteredFunctionJob, + ) -> FunctionJobStatus: + """Raises FunctionJobProjectNotRegisteredError if no project is associated with job""" + stored_job_status = await self._web_rpc_client.get_function_job_status( + function_job_id=function_job.uid, + user_id=self.user_id, + product_name=self.product_name, + ) + + if stored_job_status.status in (RunningState.SUCCESS, RunningState.FAILED): + return stored_job_status + + status: str + if ( + function.function_class == FunctionClass.PROJECT + and function_job.function_class == FunctionClass.PROJECT + ): + if function_job.project_job_id is None: + status = await _celery_task_status( + job_creation_task_id=function_job.job_creation_task_id, + task_manager=self._celery_task_manager, + user_id=self.user_id, + product_name=self.product_name, + ) + else: + job_status = await self._job_service.inspect_study_job( + job_id=function_job.project_job_id, + ) + status = job_status.state + elif (function.function_class == FunctionClass.SOLVER) and ( + function_job.function_class == FunctionClass.SOLVER + ): + if function_job.solver_job_id is None: + status = await _celery_task_status( + job_creation_task_id=function_job.job_creation_task_id, + task_manager=self._celery_task_manager, + user_id=self.user_id, + product_name=self.product_name, + ) + else: + job_status = await self._job_service.inspect_solver_job( + solver_key=function.solver_key, + version=function.solver_version, + job_id=function_job.solver_job_id, + ) + status = job_status.state + else: + raise UnsupportedFunctionFunctionJobClassCombinationError( + function_class=function.function_class, + function_job_class=function_job.function_class, + ) + + new_job_status = FunctionJobStatus(status=status) + + return await self._web_rpc_client.update_function_job_status( + function_job_id=function_job.uid, + user_id=self.user_id, + product_name=self.product_name, + job_status=new_job_status, + check_write_permissions=False, + ) + + async def get_cached_function_job( + self, + *, + function: RegisteredFunction, + job_inputs: JobInputs, + ) -> RegisteredFunctionJob: + """ + N.B. this function checks access rights + + raises FunctionsExecuteApiAccessDeniedError if user cannot execute functions + raises FunctionJobCacheNotFoundError if no cached job is found + + """ + + user_api_access_rights = ( + await self._web_rpc_client.get_functions_user_api_access_rights( + user_id=self.user_id, product_name=self.product_name + ) + ) + if not user_api_access_rights.execute_functions: + raise FunctionsExecuteApiAccessDeniedError( + user_id=self.user_id, + function_id=function.uid, + ) + + user_permissions = await self._web_rpc_client.get_function_user_permissions( + function_id=function.uid, + user_id=self.user_id, + product_name=self.product_name, + ) + if not user_permissions.execute: + raise FunctionExecuteAccessDeniedError( + user_id=self.user_id, + function_id=function.uid, + ) + + if cached_function_jobs := await self._web_rpc_client.find_cached_function_jobs( + function_id=function.uid, + inputs=job_inputs.values, + user_id=self.user_id, + product_name=self.product_name, + ): + for cached_function_job in cached_function_jobs: + job_status = await self.inspect_function_job( + function=function, + function_job=cached_function_job, + ) + if job_status.status == RunningState.SUCCESS: + return cached_function_job + + raise FunctionJobCacheNotFoundError + + async def function_job_outputs( + self, + *, + function: RegisteredFunction, + function_job: RegisteredFunctionJob, + stored_job_outputs: FunctionOutputs | None, + ) -> FunctionOutputs: + if stored_job_outputs is not None: + return stored_job_outputs + + job_status = await self.inspect_function_job( + function=function, function_job=function_job + ) + + if job_status.status != RunningState.SUCCESS: + return None + + if ( + function.function_class == FunctionClass.PROJECT + and function_job.function_class == FunctionClass.PROJECT + ): + if function_job.project_job_id is None: + return None + new_outputs = dict( + ( + await self._job_service.get_study_job_outputs( + study_id=function.project_id, + job_id=function_job.project_job_id, + ) + ).results + ) + elif ( + function.function_class == FunctionClass.SOLVER + and function_job.function_class == FunctionClass.SOLVER + ): + if function_job.solver_job_id is None: + return None + new_outputs = dict( + ( + await self._job_service.get_solver_job_outputs( + solver_key=function.solver_key, + version=function.solver_version, + job_id=function_job.solver_job_id, + async_pg_engine=self._async_pg_engine, + ) + ).results + ) + else: + raise UnsupportedFunctionClassError(function_class=function.function_class) + + return await self._web_rpc_client.update_function_job_outputs( + function_job_id=function_job.uid, + user_id=self.user_id, + product_name=self.product_name, + outputs=new_outputs, + check_write_permissions=False, + ) + + async def create_function_job_creation_task( + self, + *, + function: RegisteredFunction, + function_inputs: FunctionInputs, + user_identity: Identity, + pricing_spec: JobPricingSpecification | None, + job_links: JobLinks, + parent_project_uuid: ProjectID | None = None, + parent_node_id: NodeID | None = None, + ) -> RegisteredFunctionJob: + job_inputs = await self._function_job_service.create_function_job_inputs( + function=function, function_inputs=function_inputs + ) + + # check if results are cached + with contextlib.suppress(FunctionJobCacheNotFoundError): + return await self.get_cached_function_job( + function=function, + job_inputs=job_inputs, + ) + + pre_registered_function_job_data = ( + await self._function_job_service.pre_register_function_job( + function=function, + job_inputs=job_inputs, + ) + ) + + # run function in celery task + + owner_metadata = ApiServerOwnerMetadata( + user_id=user_identity.user_id, product_name=user_identity.product_name + ) + + task_uuid = await self._celery_task_manager.submit_task( + ExecutionMetadata( + name="run_function", + ephemeral=False, + queue=TasksQueue.API_WORKER_QUEUE, + ), + owner_metadata=owner_metadata, + user_identity=user_identity, + function=function, + pre_registered_function_job_data=pre_registered_function_job_data, + pricing_spec=pricing_spec, + job_links=job_links, + x_simcore_parent_project_uuid=parent_project_uuid, + x_simcore_parent_node_id=parent_node_id, + ) + + return await self._function_job_service.patch_registered_function_job( + user_id=user_identity.user_id, + product_name=user_identity.product_name, + function_job_id=pre_registered_function_job_data.function_job_id, + function_class=function.function_class, + job_creation_task_id=TaskID(task_uuid), + ) diff --git a/services/api-server/src/simcore_service_api_server/_service_functions.py b/services/api-server/src/simcore_service_api_server/_service_functions.py new file mode 100644 index 000000000000..708ba0149be9 --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/_service_functions.py @@ -0,0 +1,78 @@ +# pylint: disable=no-self-use + +from collections.abc import Callable +from dataclasses import dataclass + +from common_library.exclude import as_dict_exclude_none +from models_library.functions import FunctionClass, FunctionID, RegisteredFunction +from models_library.functions_errors import UnsupportedFunctionClassError +from models_library.products import ProductName +from models_library.rest_pagination import ( + MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE, + PageMetaInfoLimitOffset, + PageOffsetInt, +) +from models_library.rpc_pagination import PageLimitInt +from models_library.users import UserID + +from .models.api_resources import JobLinks +from .services_http.solver_job_models_converters import ( + get_solver_job_rest_interface_links, +) +from .services_http.study_job_models_converters import ( + get_study_job_rest_interface_links, +) +from .services_rpc.wb_api_server import WbApiRpcClient + +DEFAULT_PAGINATION_LIMIT = MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE - 1 + + +@dataclass(frozen=True, kw_only=True) +class FunctionService: + user_id: UserID + product_name: ProductName + _web_rpc_client: WbApiRpcClient + + async def list_functions( + self, + *, + pagination_offset: PageOffsetInt | None = None, + pagination_limit: PageLimitInt | None = None, + ) -> tuple[list[RegisteredFunction], PageMetaInfoLimitOffset]: + """Lists all functions for a user with pagination""" + + pagination_kwargs = as_dict_exclude_none( + pagination_offset=pagination_offset, pagination_limit=pagination_limit + ) + + return await self._web_rpc_client.list_functions( + user_id=self.user_id, + product_name=self.product_name, + **pagination_kwargs, + ) + + async def get_function_job_links( + self, function: RegisteredFunction, url_for: Callable + ) -> JobLinks: + if function.function_class == FunctionClass.SOLVER: + return get_solver_job_rest_interface_links( + url_for=url_for, + solver_key=function.solver_key, + version=function.solver_version, + ) + if function.function_class == FunctionClass.PROJECT: + return get_study_job_rest_interface_links( + url_for=url_for, + study_id=function.project_id, + ) + raise UnsupportedFunctionClassError( + function_class=function.function_class, + ) + + async def get_function(self, function_id: FunctionID) -> RegisteredFunction: + """Fetch a function by its ID""" + return await self._web_rpc_client.get_function( + user_id=self.user_id, + product_name=self.product_name, + function_id=function_id, + ) diff --git a/services/api-server/src/simcore_service_api_server/_service_jobs.py b/services/api-server/src/simcore_service_api_server/_service_jobs.py index faac214897f8..c01eac1fa23a 100644 --- a/services/api-server/src/simcore_service_api_server/_service_jobs.py +++ b/services/api-server/src/simcore_service_api_server/_service_jobs.py @@ -1,44 +1,109 @@ +# pylint: disable=too-many-instance-attributes + import logging -from collections.abc import Callable from dataclasses import dataclass +from pathlib import Path +from uuid import UUID from common_library.exclude import as_dict_exclude_none -from models_library.api_schemas_webserver.projects import ProjectCreateNew, ProjectGet +from fastapi import status +from fastapi.exceptions import HTTPException +from models_library.api_schemas_rpc_async_jobs.async_jobs import AsyncJobGet +from models_library.api_schemas_webserver.projects import ( + ProjectCreateNew, + ProjectGet, + ProjectPatch, +) +from models_library.api_schemas_webserver.projects_nodes import NodeOutputs +from models_library.function_services_catalog.services import file_picker from models_library.products import ProductName from models_library.projects import ProjectID -from models_library.projects_nodes_io import NodeID -from models_library.rest_pagination import ( - PageMetaInfoLimitOffset, - PageOffsetInt, -) +from models_library.projects_nodes import InputID, InputTypes +from models_library.projects_nodes_io import BaseFileLink, NodeID +from models_library.rest_pagination import PageMetaInfoLimitOffset, PageOffsetInt +from models_library.rpc.webserver.projects import ProjectJobRpcGet from models_library.rpc_pagination import PageLimitInt from models_library.users import UserID -from pydantic import HttpUrl +from models_library.wallets import ZERO_CREDITS from servicelib.logging_utils import log_context -from simcore_service_api_server.models.basic_types import NameValueTuple +from sqlalchemy.ext.asyncio import AsyncEngine -from .models.schemas.jobs import Job, JobInputs +from ._service_solvers import SolverService +from .exceptions.backend_errors import JobAssetsMissingError +from .exceptions.custom_errors import ( + InsufficientCreditsError, + MissingWalletError, + SolverServiceListJobsFiltersError, +) +from .models.api_resources import JobLinks, RelativeResourceName, compose_resource_name +from .models.basic_types import NameValueTuple, VersionStr +from .models.domain.files import File as DomainFile +from .models.schemas.files import File as SchemaFile +from .models.schemas.jobs import ( + ArgumentTypes, + Job, + JobID, + JobInputs, + JobOutputs, + JobPricingSpecification, + JobStatus, +) from .models.schemas.programs import Program -from .models.schemas.solvers import Solver +from .models.schemas.solvers import Solver, SolverKeyId +from .models.schemas.studies import Study, StudyID +from .services_http.director_v2 import DirectorV2Api +from .services_http.jobs import start_project from .services_http.solver_job_models_converters import ( create_job_from_project, create_job_inputs_from_node_inputs, + create_jobstatus_from_task, create_new_project_for_job, ) +from .services_http.solver_job_outputs import ResultsTypes, get_solver_output_results +from .services_http.storage import StorageApi, to_file_api_model +from .services_http.study_job_models_converters import ( + create_job_from_study, + create_job_outputs_from_project_outputs, + get_project_and_file_inputs_from_job_inputs, +) from .services_http.webserver import AuthSession +from .services_rpc.director_v2 import DirectorV2Service +from .services_rpc.storage import StorageService from .services_rpc.wb_api_server import WbApiRpcClient _logger = logging.getLogger(__name__) +def compose_solver_job_resource_name(solver_key, solver_version, job_id) -> str: + """Creates a unique resource name for solver's jobs""" + return Job.compose_resource_name( + parent_name=Solver.compose_resource_name(solver_key, solver_version), + job_id=job_id, + ) + + +def compose_study_job_resource_name(study_key, job_id) -> str: + """Creates a unique resource name for study's jobs""" + return Job.compose_resource_name( + parent_name=Study.compose_resource_name(study_key), + job_id=job_id, + ) + + @dataclass(frozen=True, kw_only=True) class JobService: _web_rest_client: AuthSession _web_rpc_client: WbApiRpcClient + _storage_rpc_client: StorageService + _director2_api: DirectorV2Api + _storage_rest_client: StorageApi + _directorv2_rpc_client: DirectorV2Service + _solver_service: SolverService + user_id: UserID product_name: ProductName - async def list_jobs( + async def _list_jobs( self, job_parent_resource_name: str, *, @@ -91,14 +156,72 @@ async def list_jobs( return jobs, projects_page.meta - async def create_job( + async def list_solver_jobs( + self, + *, + pagination_offset: PageOffsetInt | None = None, + pagination_limit: PageLimitInt | None = None, + filter_by_solver_key: SolverKeyId | None = None, + filter_by_solver_version: VersionStr | None = None, + filter_any_custom_metadata: list[NameValueTuple] | None = None, + ) -> tuple[list[Job], PageMetaInfoLimitOffset]: + """Lists all solver jobs for a user with pagination""" + + # 1. Compose job parent resource name prefix + collection_or_resource_ids = [ + "solvers", # solver_id, "releases", solver_version, "jobs", + ] + if filter_by_solver_key: + collection_or_resource_ids.append(filter_by_solver_key) + if filter_by_solver_version: + collection_or_resource_ids.append("releases") + collection_or_resource_ids.append(filter_by_solver_version) + elif filter_by_solver_version: + raise SolverServiceListJobsFiltersError + + job_parent_resource_name = compose_resource_name(*collection_or_resource_ids) + + # 2. list jobs under job_parent_resource_name + return await self._list_jobs( + job_parent_resource_name=job_parent_resource_name, + filter_any_custom_metadata=filter_any_custom_metadata, + pagination_offset=pagination_offset, + pagination_limit=pagination_limit, + ) + + async def list_study_jobs( + self, + *, + filter_by_study_id: StudyID | None = None, + pagination_offset: PageOffsetInt | None = None, + pagination_limit: PageLimitInt | None = None, + ) -> tuple[list[Job], PageMetaInfoLimitOffset]: + """Lists all solver jobs for a user with pagination""" + + # 1. Compose job parent resource name prefix + collection_or_resource_ids: list[str] = [ + "study", # study_id, "jobs", + ] + if filter_by_study_id: + collection_or_resource_ids.append(f"{filter_by_study_id}") + + job_parent_resource_name = compose_resource_name(*collection_or_resource_ids) + + # 2. list jobs under job_parent_resource_name + return await self._list_jobs( + job_parent_resource_name=job_parent_resource_name, + pagination_offset=pagination_offset, + pagination_limit=pagination_limit, + ) + + async def create_project_marked_as_job( self, *, solver_or_program: Solver | Program, inputs: JobInputs, parent_project_uuid: ProjectID | None, parent_node_id: NodeID | None, - url_for: Callable[..., HttpUrl], + job_links: JobLinks, hidden: bool, project_name: str | None, description: str | None, @@ -131,6 +254,7 @@ async def create_job( user_id=self.user_id, project_uuid=new_project.uuid, job_parent_resource_name=pre_job.runner_name, + storage_assets_deleted=False, ) assert new_project # nosec @@ -138,7 +262,9 @@ async def create_job( # for consistency, it rebuild job job = create_job_from_project( - solver_or_program=solver_or_program, project=new_project, url_for=url_for + solver_or_program=solver_or_program, + project=new_project, + job_links=job_links, ) assert job.id == pre_job.id # nosec assert job.name == pre_job.name # nosec @@ -147,3 +273,305 @@ async def create_job( job_id=job.id, ) return job, new_project + + async def start_log_export( + self, + job_id: JobID, + ) -> AsyncJobGet: + file_ids = await self._directorv2_rpc_client.get_computation_task_log_file_ids( + project_id=job_id + ) + return await self._storage_rpc_client.start_data_export( + paths_to_export=[ + Path(elm.file_id) for elm in file_ids if elm.file_id is not None + ], + ) + + async def get_job( + self, job_parent_resource_name: RelativeResourceName, job_id: JobID + ) -> ProjectJobRpcGet: + """This method can be used to check that the project exists and has the correct parent resource.""" + return await self._web_rpc_client.get_project_marked_as_job( + product_name=self.product_name, + user_id=self.user_id, + project_id=job_id, + job_parent_resource_name=job_parent_resource_name, + ) + + async def get_solver_job_outputs( + self, + solver_key: SolverKeyId, + version: VersionStr, + job_id: JobID, + async_pg_engine: AsyncEngine, + ) -> JobOutputs: + job_name = compose_solver_job_resource_name(solver_key, version, job_id) + _logger.debug("Get Job '%s' outputs", job_name) + + project_marked_as_job = await self.get_job( + job_id=job_id, + job_parent_resource_name=Solver.compose_resource_name( + key=solver_key, version=version + ), + ) + node_ids = list(project_marked_as_job.workbench.keys()) + assert len(node_ids) == 1 # nosec + + if project_marked_as_job.storage_assets_deleted: + _logger.warning("Storage data for job '%s' has been deleted", job_name) + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, detail="Assets have been deleted" + ) + + product_price = await self._web_rest_client.get_product_price() + if product_price.usd_per_credit is not None: + wallet = await self._web_rest_client.get_project_wallet( + project_id=project_marked_as_job.uuid + ) + if wallet is None: + raise MissingWalletError(job_id=project_marked_as_job.uuid) + wallet_with_credits = await self._web_rest_client.get_wallet( + wallet_id=wallet.wallet_id + ) + if wallet_with_credits.available_credits <= ZERO_CREDITS: + raise InsufficientCreditsError( + wallet_name=wallet_with_credits.name, + wallet_credit_amount=wallet_with_credits.available_credits, + ) + + outputs: dict[str, ResultsTypes] = await get_solver_output_results( + user_id=self.user_id, + project_uuid=job_id, + node_uuid=UUID(node_ids[0]), + db_engine=async_pg_engine, + ) + + results: dict[str, ArgumentTypes] = {} + for name, value in outputs.items(): + if isinstance(value, BaseFileLink): + file_id: UUID = DomainFile.create_id(*value.path.split("/")) + + found = await self._storage_rest_client.search_owned_files( + user_id=self.user_id, file_id=file_id, limit=1 + ) + if found: + assert len(found) == 1 # nosec + results[name] = SchemaFile.from_domain_model( + to_file_api_model(found[0]) + ) + else: + api_file = await self._storage_rest_client.create_soft_link( + user_id=self.user_id, + target_s3_path=value.path, + as_file_id=file_id, + ) + results[name] = SchemaFile.from_domain_model(api_file) + else: + results[name] = value + + return JobOutputs(job_id=job_id, results=results) + + async def get_study_job_outputs( + self, + study_id: StudyID, + job_id: JobID, + ) -> JobOutputs: + job_name = compose_study_job_resource_name(study_id, job_id) + _logger.debug("Getting Job Outputs for '%s'", job_name) + + project_outputs = await self._web_rest_client.get_project_outputs( + project_id=job_id + ) + return await create_job_outputs_from_project_outputs( + job_id, project_outputs, self.user_id, self._storage_rest_client + ) + + async def delete_job_assets( + self, job_parent_resource_name: RelativeResourceName, job_id: JobID + ) -> None: + """Marks job project as hidden and deletes S3 assets associated it""" + await self._web_rest_client.patch_project( + project_id=job_id, patch_params=ProjectPatch(hidden=True) + ) + await self._storage_rest_client.delete_project_s3_assets( + user_id=self.user_id, project_id=job_id + ) + await self._web_rpc_client.mark_project_as_job( + product_name=self.product_name, + user_id=self.user_id, + project_uuid=job_id, + job_parent_resource_name=job_parent_resource_name, + storage_assets_deleted=True, + ) + + async def create_solver_job( + self, + *, + solver_key: SolverKeyId, + version: VersionStr, + inputs: JobInputs, + hidden: bool, + job_links: JobLinks, + x_simcore_parent_project_uuid: ProjectID | None, + x_simcore_parent_node_id: NodeID | None, + ) -> Job: + + solver = await self._solver_service.get_solver( + solver_key=solver_key, + solver_version=version, + ) + job, _ = await self.create_project_marked_as_job( + project_name=None, + description=None, + solver_or_program=solver, + inputs=inputs, + hidden=hidden, + parent_project_uuid=x_simcore_parent_project_uuid, + parent_node_id=x_simcore_parent_node_id, + job_links=job_links, + ) + + return job + + async def inspect_solver_job( + self, + *, + solver_key: SolverKeyId, + version: VersionStr, + job_id: JobID, + ) -> JobStatus: + assert solver_key # nosec + assert version # nosec + task = await self._director2_api.get_computation( + project_id=job_id, user_id=self.user_id + ) + job_status: JobStatus = create_jobstatus_from_task(task) + return job_status + + async def start_solver_job( + self, + *, + solver_key: SolverKeyId, + version: VersionStr, + job_id: JobID, + pricing_spec: JobPricingSpecification | None, + ) -> JobStatus: + """ + Raises ProjectAlreadyStartedError if the project is already started + """ + job_name = compose_solver_job_resource_name(solver_key, version, job_id) + _logger.debug("Start Job '%s'", job_name) + job_parent_resource_name = Solver.compose_resource_name(solver_key, version) + job = await self.get_job( + job_id=job_id, job_parent_resource_name=job_parent_resource_name + ) + if job.storage_assets_deleted: + raise JobAssetsMissingError(job_id=job_id) + await start_project( + pricing_spec=pricing_spec, + job_id=job_id, + expected_job_name=job_name, + webserver_api=self._web_rest_client, + ) + return await self.inspect_solver_job( + solver_key=solver_key, + version=version, + job_id=job_id, + ) + + async def create_studies_job( + self, + *, + study_id: StudyID, + job_inputs: JobInputs, + x_simcore_parent_project_uuid: ProjectID | None, + x_simcore_parent_node_id: NodeID | None, + job_links: JobLinks, + hidden: bool, + ) -> Job: + + project = await self._web_rest_client.clone_project( + project_id=study_id, + hidden=hidden, + parent_project_uuid=x_simcore_parent_project_uuid, + parent_node_id=x_simcore_parent_node_id, + ) + job = create_job_from_study( + study_key=study_id, + project=project, + job_inputs=job_inputs, + job_links=job_links, + ) + + await self._web_rest_client.patch_project( + project_id=job.id, + patch_params=ProjectPatch(name=job.name), + ) + + await self._web_rpc_client.mark_project_as_job( + product_name=self.product_name, + user_id=self.user_id, + project_uuid=job.id, + job_parent_resource_name=job.runner_name, + storage_assets_deleted=False, + ) + + project_inputs = await self._web_rest_client.get_project_inputs( + project_id=project.uuid + ) + + file_param_nodes = {} + for node_id, node in project.workbench.items(): + if ( + node.key == file_picker.META.key + and node.outputs is not None + and len(node.outputs) == 0 + ): + file_param_nodes[node.label] = node_id + + file_inputs: dict[InputID, InputTypes] = {} + + ( + new_project_inputs, + new_project_file_inputs, + ) = get_project_and_file_inputs_from_job_inputs( + project_inputs, file_inputs, job_inputs + ) + + for node_label, file_link in new_project_file_inputs.items(): + await self._web_rest_client.update_node_outputs( + project_id=project.uuid, + node_id=UUID(file_param_nodes[node_label]), + new_node_outputs=NodeOutputs(outputs={"outFile": file_link}), + ) + + if len(new_project_inputs) > 0: + await self._web_rest_client.update_project_inputs( + project_id=project.uuid, new_inputs=new_project_inputs + ) + return job + + async def inspect_study_job(self, *, job_id: JobID) -> JobStatus: + task = await self._director2_api.get_computation( + project_id=job_id, user_id=self.user_id + ) + job_status: JobStatus = create_jobstatus_from_task(task) + return job_status + + async def start_study_job( + self, + *, + job_id: JobID, + study_id: StudyID, + pricing_spec: JobPricingSpecification | None, + ): + job_name = compose_study_job_resource_name(study_id, job_id) + await start_project( + job_id=job_id, + expected_job_name=job_name, + webserver_api=self._web_rest_client, + pricing_spec=pricing_spec, + ) + return await self.inspect_study_job( + job_id=job_id, + ) diff --git a/services/api-server/src/simcore_service_api_server/_service_solvers.py b/services/api-server/src/simcore_service_api_server/_service_solvers.py index 3c0aac79721a..458f53432e1e 100644 --- a/services/api-server/src/simcore_service_api_server/_service_solvers.py +++ b/services/api-server/src/simcore_service_api_server/_service_solvers.py @@ -10,18 +10,11 @@ from models_library.rpc_pagination import PageLimitInt from models_library.services_enums import ServiceType from models_library.users import UserID -from simcore_service_api_server.models.basic_types import NameValueTuple -from ._service_jobs import JobService from ._service_utils import check_user_product_consistency from .exceptions.backend_errors import ( ProgramOrSolverOrStudyNotFoundError, ) -from .exceptions.custom_errors import ( - SolverServiceListJobsFiltersError, -) -from .models.api_resources import compose_resource_name -from .models.schemas.jobs import Job from .models.schemas.solvers import Solver, SolverKeyId from .services_rpc.catalog import CatalogService @@ -29,7 +22,6 @@ @dataclass(frozen=True, kw_only=True) class SolverService: catalog_service: CatalogService - job_service: JobService user_id: UserID product_name: ProductName @@ -41,13 +33,6 @@ def __post_init__(self): product_name=self.product_name, ) - check_user_product_consistency( - service_cls_name=self.__class__.__name__, - service_provider=self.job_service, - user_id=self.user_id, - product_name=self.product_name, - ) - async def get_solver( self, *, @@ -84,39 +69,6 @@ async def get_latest_release( return Solver.create_from_service(service) - async def list_jobs( - self, - *, - pagination_offset: PageOffsetInt | None = None, - pagination_limit: PageLimitInt | None = None, - filter_by_solver_key: SolverKeyId | None = None, - filter_by_solver_version: VersionStr | None = None, - filter_any_custom_metadata: list[NameValueTuple] | None = None, - ) -> tuple[list[Job], PageMetaInfoLimitOffset]: - """Lists all solver jobs for a user with pagination""" - - # 1. Compose job parent resource name prefix - collection_or_resource_ids = [ - "solvers", # solver_id, "releases", solver_version, "jobs", - ] - if filter_by_solver_key: - collection_or_resource_ids.append(filter_by_solver_key) - if filter_by_solver_version: - collection_or_resource_ids.append("releases") - collection_or_resource_ids.append(filter_by_solver_version) - elif filter_by_solver_version: - raise SolverServiceListJobsFiltersError - - job_parent_resource_name = compose_resource_name(*collection_or_resource_ids) - - # 2. list jobs under job_parent_resource_name - return await self.job_service.list_jobs( - job_parent_resource_name=job_parent_resource_name, - filter_any_custom_metadata=filter_any_custom_metadata, - pagination_offset=pagination_offset, - pagination_limit=pagination_limit, - ) - async def solver_release_history( self, *, diff --git a/services/api-server/src/simcore_service_api_server/_service_studies.py b/services/api-server/src/simcore_service_api_server/_service_studies.py deleted file mode 100644 index 733315f33305..000000000000 --- a/services/api-server/src/simcore_service_api_server/_service_studies.py +++ /dev/null @@ -1,58 +0,0 @@ -from dataclasses import dataclass - -from models_library.products import ProductName -from models_library.rest_pagination import ( - MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE, - PageMetaInfoLimitOffset, - PageOffsetInt, -) -from models_library.rpc_pagination import PageLimitInt -from models_library.users import UserID - -from ._service_jobs import JobService -from ._service_utils import check_user_product_consistency -from .models.api_resources import compose_resource_name -from .models.schemas.jobs import Job -from .models.schemas.studies import StudyID - -DEFAULT_PAGINATION_LIMIT = MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE - 1 - - -@dataclass(frozen=True, kw_only=True) -class StudyService: - job_service: JobService - user_id: UserID - product_name: ProductName - - def __post_init__(self): - check_user_product_consistency( - service_cls_name=self.__class__.__name__, - service_provider=self.job_service, - user_id=self.user_id, - product_name=self.product_name, - ) - - async def list_jobs( - self, - *, - filter_by_study_id: StudyID | None = None, - pagination_offset: PageOffsetInt | None = None, - pagination_limit: PageLimitInt | None = None, - ) -> tuple[list[Job], PageMetaInfoLimitOffset]: - """Lists all solver jobs for a user with pagination""" - - # 1. Compose job parent resource name prefix - collection_or_resource_ids: list[str] = [ - "study", # study_id, "jobs", - ] - if filter_by_study_id: - collection_or_resource_ids.append(f"{filter_by_study_id}") - - job_parent_resource_name = compose_resource_name(*collection_or_resource_ids) - - # 2. list jobs under job_parent_resource_name - return await self.job_service.list_jobs( - job_parent_resource_name=job_parent_resource_name, - pagination_offset=pagination_offset, - pagination_limit=pagination_limit, - ) diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py b/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py new file mode 100644 index 000000000000..5e6a05a48193 --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/api/dependencies/celery.py @@ -0,0 +1,10 @@ +from celery_library.task_manager import CeleryTaskManager +from fastapi import FastAPI +from servicelib.celery.task_manager import TaskManager + + +def get_task_manager(app: FastAPI) -> TaskManager: + assert hasattr(app.state, "task_manager") # nosec + task_manager = app.state.task_manager + assert isinstance(task_manager, CeleryTaskManager) # nosec + return task_manager diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/functions.py b/services/api-server/src/simcore_service_api_server/api/dependencies/functions.py new file mode 100644 index 000000000000..354a0541f488 --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/api/dependencies/functions.py @@ -0,0 +1,87 @@ +from typing import Annotated + +from fastapi import Depends +from models_library.functions import ( + FunctionJob, + FunctionJobID, + FunctionJobStatus, + FunctionOutputs, + RegisteredFunction, +) +from models_library.products import ProductName +from models_library.users import UserID +from simcore_service_api_server.api.dependencies.authentication import ( + get_current_user_id, + get_product_name, +) +from simcore_service_api_server.api.dependencies.webserver_rpc import ( + get_wb_api_rpc_client, +) +from simcore_service_api_server.services_rpc.wb_api_server import WbApiRpcClient + + +async def get_stored_job_outputs( + function_job_id: FunctionJobID, + wb_api_rpc: Annotated[WbApiRpcClient, Depends(get_wb_api_rpc_client)], + user_id: Annotated[UserID, Depends(get_current_user_id)], + product_name: Annotated[ProductName, Depends(get_product_name)], +) -> FunctionOutputs: + + return await wb_api_rpc.get_function_job_outputs( + function_job_id=function_job_id, user_id=user_id, product_name=product_name + ) + + +async def get_function_job_dependency( + function_job_id: FunctionJobID, + wb_api_rpc: Annotated[WbApiRpcClient, Depends(get_wb_api_rpc_client)], + user_id: Annotated[UserID, Depends(get_current_user_id)], + product_name: Annotated[ProductName, Depends(get_product_name)], +) -> FunctionJob: + return await wb_api_rpc.get_function_job( + function_job_id=function_job_id, user_id=user_id, product_name=product_name + ) + + +async def get_function_from_functionjob( + function_job: Annotated[FunctionJob, Depends(get_function_job_dependency)], + wb_api_rpc: Annotated[WbApiRpcClient, Depends(get_wb_api_rpc_client)], + user_id: Annotated[UserID, Depends(get_current_user_id)], + product_name: Annotated[ProductName, Depends(get_product_name)], +) -> RegisteredFunction: + return await wb_api_rpc.get_function( + function_id=function_job.function_uid, + user_id=user_id, + product_name=product_name, + ) + + +async def get_function_from_functionjobid( + function_job_id: FunctionJobID, + wb_api_rpc: Annotated[WbApiRpcClient, Depends(get_wb_api_rpc_client)], + user_id: Annotated[UserID, Depends(get_current_user_id)], + product_name: Annotated[ProductName, Depends(get_product_name)], +) -> RegisteredFunction: + function_job = await get_function_job_dependency( + function_job_id=function_job_id, + wb_api_rpc=wb_api_rpc, + user_id=user_id, + product_name=product_name, + ) + return await get_function_from_functionjob( + function_job=function_job, + wb_api_rpc=wb_api_rpc, + user_id=user_id, + product_name=product_name, + ) + + +async def get_stored_job_status( + function_job_id: FunctionJobID, + wb_api_rpc: Annotated[WbApiRpcClient, Depends(get_wb_api_rpc_client)], + user_id: Annotated[UserID, Depends(get_current_user_id)], + product_name: Annotated[ProductName, Depends(get_product_name)], +) -> FunctionJobStatus: + return await wb_api_rpc.get_function_job_status( + function_job_id=function_job_id, user_id=user_id, product_name=product_name + ) diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/models_schemas_function_filters.py b/services/api-server/src/simcore_service_api_server/api/dependencies/models_schemas_function_filters.py index bd0e4e49cbee..1c2a9a791ce7 100644 --- a/services/api-server/src/simcore_service_api_server/api/dependencies/models_schemas_function_filters.py +++ b/services/api-server/src/simcore_service_api_server/api/dependencies/models_schemas_function_filters.py @@ -1,7 +1,16 @@ from typing import Annotated from fastapi import Query -from models_library.functions import FunctionIDString, FunctionJobCollectionsListFilters +from models_library.functions import ( + FunctionID, + FunctionIDString, + FunctionJobCollectionID, + FunctionJobCollectionsListFilters, + FunctionJobID, +) +from simcore_service_api_server.models.schemas.functions import ( + FunctionJobsListFilters, +) from ._utils import get_query_params @@ -20,3 +29,31 @@ def get_function_job_collections_filters( return FunctionJobCollectionsListFilters( has_function_id=has_function_id, ) + + +def get_function_jobs_filters( + # pylint: disable=unsubscriptable-object + function_id: Annotated[ + FunctionID | None, + Query(**get_query_params(FunctionJobsListFilters.model_fields["function_id"])), + ] = None, + function_job_ids: Annotated[ + list[FunctionJobID] | None, + Query( + **get_query_params(FunctionJobsListFilters.model_fields["function_job_ids"]) + ), + ] = None, + function_job_collection_id: Annotated[ + FunctionJobCollectionID | None, + Query( + **get_query_params( + FunctionJobsListFilters.model_fields["function_job_collection_id"] + ) + ), + ] = None, +) -> FunctionJobsListFilters: + return FunctionJobsListFilters( + function_id=function_id, + function_job_ids=function_job_ids, + function_job_collection_id=function_job_collection_id, + ) diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/models_schemas_jobs_filters.py b/services/api-server/src/simcore_service_api_server/api/dependencies/models_schemas_jobs_filters.py index c4683f653d1c..f18198cbba12 100644 --- a/services/api-server/src/simcore_service_api_server/api/dependencies/models_schemas_jobs_filters.py +++ b/services/api-server/src/simcore_service_api_server/api/dependencies/models_schemas_jobs_filters.py @@ -18,7 +18,9 @@ def get_job_metadata_filter( *Format*: `key:pattern` where pattern can contain glob wildcards """ ), - example=["key1:val*", "key2:exactval"], + examples=[ + ["key1:val*", "key2:exactval"], + ], ), ] = None, ) -> JobMetadataFilter | None: diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/rabbitmq.py b/services/api-server/src/simcore_service_api_server/api/dependencies/rabbitmq.py index e90c60861eb9..c08ae53ba743 100644 --- a/services/api-server/src/simcore_service_api_server/api/dependencies/rabbitmq.py +++ b/services/api-server/src/simcore_service_api_server/api/dependencies/rabbitmq.py @@ -3,12 +3,12 @@ from fastapi import Depends, FastAPI from pydantic import NonNegativeInt -from servicelib.aiohttp.application_setup import ApplicationSetupError from servicelib.fastapi.dependencies import get_app from servicelib.rabbitmq import RabbitMQClient from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient from tenacity import before_sleep_log, retry, stop_after_delay, wait_fixed +from ...exceptions.custom_errors import ApplicationSetupError from ...services_http.log_streaming import LogDistributor _MAX_WAIT_FOR_LOG_DISTRIBUTOR_SECONDS: Final[int] = 10 @@ -17,7 +17,7 @@ def get_rabbitmq_rpc_client( - app: Annotated[FastAPI, Depends(get_app)] + app: Annotated[FastAPI, Depends(get_app)], ) -> RabbitMQRPCClient: assert app.state.rabbitmq_rpc_client # nosec return cast(RabbitMQRPCClient, app.state.rabbitmq_rpc_client) @@ -42,7 +42,7 @@ def get_log_distributor(app: Annotated[FastAPI, Depends(get_app)]) -> LogDistrib async def wait_till_log_distributor_ready(app) -> None: if not hasattr(app.state, "log_distributor"): msg = f"Api server's log_distributor was not ready within {_MAX_WAIT_FOR_LOG_DISTRIBUTOR_SECONDS=} seconds" - raise ApplicationSetupError(msg) + raise ApplicationSetupError(tip=msg) def get_log_check_timeout(app: Annotated[FastAPI, Depends(get_app)]) -> NonNegativeInt: diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/services.py b/services/api-server/src/simcore_service_api_server/api/dependencies/services.py index 9c5a29a4f618..63835769f456 100644 --- a/services/api-server/src/simcore_service_api_server/api/dependencies/services.py +++ b/services/api-server/src/simcore_service_api_server/api/dependencies/services.py @@ -3,21 +3,32 @@ from collections.abc import Callable from typing import Annotated -from fastapi import Depends, HTTPException, Request, status +from fastapi import Depends, FastAPI, HTTPException, Request, status from models_library.products import ProductName from models_library.users import UserID +from servicelib.fastapi.dependencies import get_app from servicelib.rabbitmq import RabbitMQRPCClient +from sqlalchemy.ext.asyncio import AsyncEngine +from ..._service_function_jobs import FunctionJobService +from ..._service_function_jobs_task_client import FunctionJobTaskClientService +from ..._service_functions import FunctionService from ..._service_jobs import JobService from ..._service_programs import ProgramService from ..._service_solvers import SolverService -from ..._service_studies import StudyService +from ...services_http.director_v2 import DirectorV2Api +from ...services_http.storage import StorageApi +from ...services_http.webserver import AuthSession from ...services_rpc.catalog import CatalogService +from ...services_rpc.director_v2 import DirectorV2Service +from ...services_rpc.storage import StorageService from ...services_rpc.wb_api_server import WbApiRpcClient from ...utils.client_base import BaseServiceClientApi +from ..dependencies.celery import get_task_manager +from ..dependencies.database import get_db_asyncpg_engine from .authentication import get_current_user_id, get_product_name from .rabbitmq import get_rabbitmq_rpc_client -from .webserver_http import AuthSession, get_webserver_session +from .webserver_http import get_webserver_session from .webserver_rpc import get_wb_api_rpc_client @@ -60,11 +71,58 @@ def get_catalog_service( ) +def get_storage_service( + rpc_client: Annotated[RabbitMQRPCClient, Depends(get_rabbitmq_rpc_client)], + user_id: Annotated[UserID, Depends(get_current_user_id)], + product_name: Annotated[ProductName, Depends(get_product_name)], +) -> StorageService: + return StorageService( + _rpc_client=rpc_client, + _user_id=user_id, + _product_name=product_name, + ) + + +def get_directorv2_service( + rpc_client: Annotated[RabbitMQRPCClient, Depends(get_rabbitmq_rpc_client)], +) -> DirectorV2Service: + return DirectorV2Service(_rpc_client=rpc_client) + + +def get_solver_service( + catalog_service: Annotated[CatalogService, Depends(get_catalog_service)], + user_id: Annotated[UserID, Depends(get_current_user_id)], + product_name: Annotated[ProductName, Depends(get_product_name)], +) -> SolverService: + """ + "Assembles" the SolverService layer to the underlying service and client interfaces + in the context of the rest controller (i.e. api/dependencies) + """ + return SolverService( + catalog_service=catalog_service, + user_id=user_id, + product_name=product_name, + ) + + +def get_program_service( + catalog_service: Annotated[CatalogService, Depends(get_catalog_service)], +) -> ProgramService: + return ProgramService( + catalog_service=catalog_service, + ) + + def get_job_service( web_rest_api: Annotated[AuthSession, Depends(get_webserver_session)], + director2_api: Annotated[DirectorV2Api, Depends(get_api_client(DirectorV2Api))], + storage_api: Annotated[StorageApi, Depends(get_api_client(StorageApi))], web_rpc_api: Annotated[WbApiRpcClient, Depends(get_wb_api_rpc_client)], + storage_service: Annotated[StorageService, Depends(get_storage_service)], + directorv2_service: Annotated[DirectorV2Service, Depends(get_directorv2_service)], user_id: Annotated[UserID, Depends(get_current_user_id)], product_name: Annotated[ProductName, Depends(get_product_name)], + solver_service: Annotated[SolverService, Depends(get_solver_service)], ) -> JobService: """ "Assembles" the JobsService layer to the underlying service and client interfaces @@ -73,44 +131,73 @@ def get_job_service( return JobService( _web_rest_client=web_rest_api, _web_rpc_client=web_rpc_api, + _storage_rpc_client=storage_service, + _directorv2_rpc_client=directorv2_service, + _director2_api=director2_api, + _storage_rest_client=storage_api, + _solver_service=solver_service, user_id=user_id, product_name=product_name, ) -def get_solver_service( - catalog_service: Annotated[CatalogService, Depends(get_catalog_service)], - job_service: Annotated[JobService, Depends(get_job_service)], +def get_function_service( + web_rpc_api: Annotated[WbApiRpcClient, Depends(get_wb_api_rpc_client)], user_id: Annotated[UserID, Depends(get_current_user_id)], product_name: Annotated[ProductName, Depends(get_product_name)], -) -> SolverService: - """ - "Assembles" the SolverService layer to the underlying service and client interfaces - in the context of the rest controller (i.e. api/dependencies) - """ - return SolverService( - catalog_service=catalog_service, - job_service=job_service, +) -> FunctionService: + return FunctionService( + _web_rpc_client=web_rpc_api, user_id=user_id, product_name=product_name, ) -def get_study_service( +def get_function_job_service( + web_rpc_api: Annotated[WbApiRpcClient, Depends(get_wb_api_rpc_client)], job_service: Annotated[JobService, Depends(get_job_service)], + function_service: Annotated[FunctionService, Depends(get_function_service)], user_id: Annotated[UserID, Depends(get_current_user_id)], product_name: Annotated[ProductName, Depends(get_product_name)], -) -> StudyService: - return StudyService( - job_service=job_service, + webserver_api: Annotated[AuthSession, Depends(get_webserver_session)], + storage_service: Annotated[StorageService, Depends(get_storage_service)], +) -> FunctionJobService: + return FunctionJobService( + _web_rpc_client=web_rpc_api, + _job_service=job_service, + _function_service=function_service, + _storage_client=storage_service, + _webserver_api=webserver_api, user_id=user_id, product_name=product_name, ) -def get_program_service( - catalog_service: Annotated[CatalogService, Depends(get_catalog_service)], -) -> ProgramService: - return ProgramService( - catalog_service=catalog_service, +def get_function_job_task_client_service( + app: Annotated[FastAPI, Depends(get_app)], + web_rpc_api: Annotated[WbApiRpcClient, Depends(get_wb_api_rpc_client)], + job_service: Annotated[JobService, Depends(get_job_service)], + function_service: Annotated[FunctionService, Depends(get_function_service)], + function_job_service: Annotated[ + FunctionJobService, Depends(get_function_job_service) + ], + user_id: Annotated[UserID, Depends(get_current_user_id)], + product_name: Annotated[ProductName, Depends(get_product_name)], + webserver_api: Annotated[AuthSession, Depends(get_webserver_session)], + storage_service: Annotated[StorageService, Depends(get_storage_service)], + async_pg_engine: Annotated[AsyncEngine, Depends(get_db_asyncpg_engine)], +) -> FunctionJobTaskClientService: + + task_manager = get_task_manager(app) + return FunctionJobTaskClientService( + _web_rpc_client=web_rpc_api, + _job_service=job_service, + _function_service=function_service, + _function_job_service=function_job_service, + _storage_client=storage_service, + _webserver_api=webserver_api, + user_id=user_id, + product_name=product_name, + _celery_task_manager=task_manager, + _async_pg_engine=async_pg_engine, ) diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/tasks.py b/services/api-server/src/simcore_service_api_server/api/dependencies/tasks.py new file mode 100644 index 000000000000..645aba6a8ff4 --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/api/dependencies/tasks.py @@ -0,0 +1,13 @@ +from typing import Annotated + +from fastapi import Depends +from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient + +from ...services_rpc.async_jobs import AsyncJobClient +from .rabbitmq import get_rabbitmq_rpc_client + + +def get_async_jobs_client( + rabbitmq_rpc_client: Annotated[RabbitMQRPCClient, Depends(get_rabbitmq_rpc_client)], +) -> AsyncJobClient: + return AsyncJobClient(_rabbitmq_rpc_client=rabbitmq_rpc_client) diff --git a/services/api-server/src/simcore_service_api_server/api/dependencies/webserver_http.py b/services/api-server/src/simcore_service_api_server/api/dependencies/webserver_http.py index d70a64575e2b..df4325dc1ffe 100644 --- a/services/api-server/src/simcore_service_api_server/api/dependencies/webserver_http.py +++ b/services/api-server/src/simcore_service_api_server/api/dependencies/webserver_http.py @@ -4,14 +4,16 @@ from common_library.json_serialization import json_dumps from cryptography.fernet import Fernet from fastapi import Depends, FastAPI, HTTPException, status -from fastapi.requests import Request -from servicelib.rest_constants import X_PRODUCT_NAME_HEADER from ..._constants import MSG_BACKEND_SERVICE_UNAVAILABLE from ...core.settings import ApplicationSettings, WebServerSettings from ...services_http.webserver import AuthSession from .application import get_app, get_settings -from .authentication import Identity, get_active_user_email, get_current_identity +from .authentication import ( + Identity, + get_active_user_email, + get_current_identity, +) def _get_settings( @@ -26,19 +28,16 @@ def _get_settings( return settings -def _get_encrypt(request: Request) -> Fernet | None: - e: Fernet | None = getattr(request.app.state, "webserver_fernet", None) - return e - - def get_session_cookie( identity: Annotated[str, Depends(get_active_user_email)], settings: Annotated[WebServerSettings, Depends(_get_settings)], - fernet: Annotated[Fernet | None, Depends(_get_encrypt)], + app: Annotated[FastAPI, Depends(get_app)], ) -> dict: # Based on aiohttp_session and aiohttp_security # SEE services/web/server/tests/unit/with_dbs/test_login.py + fernet: Fernet | None = getattr(app.state, "webserver_fernet", None) + if fernet is None: raise HTTPException( status.HTTP_503_SERVICE_UNAVAILABLE, detail=MSG_BACKEND_SERVICE_UNAVAILABLE @@ -68,14 +67,14 @@ def get_webserver_session( Lifetime of AuthSession wrapper is one request because it needs different session cookies Lifetime of embedded client is attached to the app lifetime """ - product_header: dict[str, str] = {X_PRODUCT_NAME_HEADER: f"{identity.product_name}"} - session = AuthSession.create(app, session_cookies, product_header) + session = AuthSession.create( + app, + session_cookies=session_cookies, + product_name=identity.product_name, + user_id=identity.user_id, + ) assert isinstance(session, AuthSession) # nosec return session -__all__: tuple[str, ...] = ( - "AuthSession", - "get_session_cookie", - "get_webserver_session", -) +__all__: tuple[str, ...] = ("AuthSession",) diff --git a/services/api-server/src/simcore_service_api_server/api/root.py b/services/api-server/src/simcore_service_api_server/api/root.py index 8c6dbc6de1a4..b5a295ee7283 100644 --- a/services/api-server/src/simcore_service_api_server/api/root.py +++ b/services/api-server/src/simcore_service_api_server/api/root.py @@ -18,6 +18,7 @@ solvers_jobs_read, studies, studies_jobs, + tasks, users, wallets, ) @@ -65,6 +66,7 @@ def create_router(settings: ApplicationSettings): router.include_router( functions_routes.function_router, tags=["functions"], prefix=_FUNCTIONS_PREFIX ) + router.include_router(tasks.router, tags=["tasks"], prefix="/tasks") # NOTE: multiple-files upload is currently disabled # Web form to upload files at http://localhost:8000/v0/upload-form-view diff --git a/services/api-server/src/simcore_service_api_server/api/routes/files.py b/services/api-server/src/simcore_service_api_server/api/routes/files.py index 28028cf8b255..69f779f13e19 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/files.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/files.py @@ -11,22 +11,18 @@ from fastapi.exceptions import HTTPException from fastapi_pagination.api import create_page from models_library.api_schemas_storage.storage_schemas import ( - ETag, FileUploadCompletionBody, - LinkType, ) from models_library.basic_types import SHA256Str from models_library.projects_nodes_io import NodeID -from pydantic import AnyUrl, ByteSize, PositiveInt, TypeAdapter, ValidationError +from pydantic import PositiveInt, ValidationError from servicelib.fastapi.requests_decorators import cancel_on_disconnect +from servicelib.logging_utils import log_context from simcore_sdk.node_ports_common.constants import SIMCORE_LOCATION from simcore_sdk.node_ports_common.file_io_utils import UploadableFileObject from simcore_sdk.node_ports_common.filemanager import ( UploadedFile, UploadedFolder, - abort_upload, - complete_file_upload, - get_upload_links_from_s3, ) from simcore_sdk.node_ports_common.filemanager import upload_path as storage_upload_path from starlette.datastructures import URL @@ -291,29 +287,26 @@ async def upload_files(files: list[UploadFile] = FileParam(...)): response_model=ClientFileUploadData, responses=_FILE_STATUS_CODES, ) -@cancel_on_disconnect async def get_upload_links( request: Request, client_file: UserFileToProgramJob | UserFile, user_id: Annotated[PositiveInt, Depends(get_current_user_id)], webserver_api: Annotated[AuthSession, Depends(get_webserver_session)], + storage_client: Annotated[StorageApi, Depends(get_api_client(StorageApi))], ): """Get upload links for uploading a file to storage""" assert request # nosec file_meta = await _create_domain_file( webserver_api=webserver_api, file_id=None, client_file=client_file ) - _, upload_links = await get_upload_links_from_s3( - user_id=user_id, - store_name=None, - store_id=SIMCORE_LOCATION, - s3_object=file_meta.storage_file_id, - client_session=None, - link_type=LinkType.PRESIGNED, - file_size=ByteSize(client_file.filesize), - is_directory=False, - sha256_checksum=file_meta.sha256_checksum, - ) + + with log_context( + logger=_logger, level=logging.DEBUG, msg=f"Getting upload links for {file_meta}" + ): + upload_links = await storage_client.get_file_upload_links( + user_id=user_id, file=file_meta, client_file=client_file + ) + completion_url: URL = request.url_for( "complete_multipart_upload", file_id=file_meta.id ) @@ -420,12 +413,7 @@ async def abort_multipart_upload( file = await _create_domain_file( webserver_api=webserver_api, file_id=file_id, client_file=client_file ) - abort_link: URL = await storage_client.create_abort_upload_link( - file=file, query={"user_id": str(user_id)} - ) - await abort_upload( - abort_upload_link=TypeAdapter(AnyUrl).validate_python(str(abort_link)) - ) + await storage_client.abort_file_upload(user_id=user_id, file=file) @router.post( @@ -433,7 +421,6 @@ async def abort_multipart_upload( response_model=OutputFile, responses=_FILE_STATUS_CODES, ) -@cancel_on_disconnect async def complete_multipart_upload( request: Request, file_id: UUID, @@ -449,13 +436,8 @@ async def complete_multipart_upload( file = await _create_domain_file( webserver_api=webserver_api, file_id=file_id, client_file=client_file ) - complete_link: URL = await storage_client.create_complete_upload_link( - file=file, query={"user_id": str(user_id)} - ) - - e_tag: ETag | None = await complete_file_upload( - uploaded_parts=uploaded_parts.parts, - upload_completion_link=TypeAdapter(AnyUrl).validate_python(f"{complete_link}"), + e_tag = await storage_client.complete_file_upload( + user_id=user_id, file=file, uploaded_parts=uploaded_parts.parts ) assert e_tag is not None # nosec diff --git a/services/api-server/src/simcore_service_api_server/api/routes/function_job_collections_routes.py b/services/api-server/src/simcore_service_api_server/api/routes/function_job_collections_routes.py index eb50f8bd0efc..a7a667fd3cbe 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/function_job_collections_routes.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/function_job_collections_routes.py @@ -1,8 +1,8 @@ -import asyncio from typing import Annotated, Final from fastapi import APIRouter, Depends, status from fastapi_pagination.api import create_page +from fastapi_pagination.bases import AbstractPage from models_library.api_schemas_webserver.functions import ( FunctionJobCollection, FunctionJobCollectionID, @@ -12,31 +12,41 @@ RegisteredFunctionJobCollection, ) from models_library.products import ProductName -from models_library.users import UserID # Import UserID +from models_library.users import UserID +from servicelib.utils import limited_gather +from ..._service_function_jobs import FunctionJobService +from ..._service_function_jobs_task_client import ( + FunctionJobTaskClientService, +) from ...models.pagination import Page, PaginationParams from ...models.schemas.errors import ErrorGet -from ...services_http.director_v2 import DirectorV2Api from ...services_rpc.wb_api_server import WbApiRpcClient from ..dependencies.authentication import get_current_user_id, get_product_name +from ..dependencies.functions import ( + get_function_from_functionjobid, +) from ..dependencies.models_schemas_function_filters import ( get_function_job_collections_filters, ) -from ..dependencies.services import get_api_client +from ..dependencies.services import ( + get_function_job_service, + get_function_job_task_client_service, +) from ..dependencies.webserver_rpc import get_wb_api_rpc_client from ._constants import ( FMSG_CHANGELOG_ADDED_IN_VERSION, FMSG_CHANGELOG_NEW_IN_VERSION, create_route_description, ) -from .function_jobs_routes import function_job_status, get_function_job +from .function_jobs_routes import get_function_job # pylint: disable=too-many-arguments function_job_collections_router = APIRouter() FIRST_RELEASE_VERSION = "0.8.0" - +JOB_LIST_PAGE_RELEASE_VERSION = "0.11.0" _COMMON_FUNCTION_JOB_COLLECTION_ERROR_RESPONSES: Final[dict] = { status.HTTP_404_NOT_FOUND: { @@ -54,7 +64,7 @@ CHANGE_LOGS = {} for endpoint in ENDPOINTS: CHANGE_LOGS[endpoint] = [ - FMSG_CHANGELOG_NEW_IN_VERSION.format("0.8.0"), + FMSG_CHANGELOG_NEW_IN_VERSION.format(FIRST_RELEASE_VERSION), ] if endpoint in [ "list_function_job_collections", @@ -86,7 +96,7 @@ async def list_function_job_collections( ], user_id: Annotated[UserID, Depends(get_current_user_id)], product_name: Annotated[ProductName, Depends(get_product_name)], -): +) -> AbstractPage[RegisteredFunctionJobCollection]: function_job_collection_list, meta = await wb_api_rpc.list_function_job_collections( pagination_offset=page_params.offset, pagination_limit=page_params.limit, @@ -107,7 +117,9 @@ async def list_function_job_collections( responses={**_COMMON_FUNCTION_JOB_COLLECTION_ERROR_RESPONSES}, description=create_route_description( base="Get function job collection", - changelog=[FMSG_CHANGELOG_NEW_IN_VERSION.format(FIRST_RELEASE_VERSION)], + changelog=[ + FMSG_CHANGELOG_NEW_IN_VERSION.format(FIRST_RELEASE_VERSION), + ], ), ) async def get_function_job_collection( @@ -168,31 +180,72 @@ async def delete_function_job_collection( @function_job_collections_router.get( "/{function_job_collection_id:uuid}/function_jobs", - response_model=list[RegisteredFunctionJob], responses={**_COMMON_FUNCTION_JOB_COLLECTION_ERROR_RESPONSES}, description=create_route_description( base="Get the function jobs in function job collection", - changelog=[FMSG_CHANGELOG_NEW_IN_VERSION.format(FIRST_RELEASE_VERSION)], + changelog=[ + FMSG_CHANGELOG_NEW_IN_VERSION.format(FIRST_RELEASE_VERSION), + ], ), ) async def function_job_collection_list_function_jobs( function_job_collection_id: FunctionJobCollectionID, - wb_api_rpc: Annotated[WbApiRpcClient, Depends(get_wb_api_rpc_client)], - user_id: Annotated[UserID, Depends(get_current_user_id)], - product_name: Annotated[ProductName, Depends(get_product_name)], + function_job_service: Annotated[ + FunctionJobService, Depends(get_function_job_service) + ], ) -> list[RegisteredFunctionJob]: - function_job_collection = await get_function_job_collection( + return await function_job_collection_list_function_jobs_list( function_job_collection_id=function_job_collection_id, - wb_api_rpc=wb_api_rpc, - user_id=user_id, - product_name=product_name, + function_job_service=function_job_service, ) - return [ - await get_function_job( - job_id, wb_api_rpc=wb_api_rpc, user_id=user_id, product_name=product_name - ) - for job_id in function_job_collection.job_ids - ] + + +@function_job_collections_router.get( + "/{function_job_collection_id:uuid}/function_jobs/page", + responses={**_COMMON_FUNCTION_JOB_COLLECTION_ERROR_RESPONSES}, + response_model=Page[RegisteredFunctionJob], + description=create_route_description( + base="Get the function jobs in function job collection", + changelog=[ + FMSG_CHANGELOG_NEW_IN_VERSION.format(JOB_LIST_PAGE_RELEASE_VERSION), + ], + ), +) +async def function_job_collection_list_function_jobs_page( + function_job_collection_id: FunctionJobCollectionID, + function_job_service: Annotated[ + FunctionJobService, Depends(get_function_job_service) + ], + page_params: Annotated[PaginationParams, Depends()], +) -> AbstractPage[RegisteredFunctionJob]: + function_jobs_list, meta = await function_job_service.list_function_jobs( + filter_by_function_job_collection_id=function_job_collection_id, + pagination_offset=page_params.offset, + pagination_limit=page_params.limit, + ) + return create_page(function_jobs_list, total=meta.total, params=page_params) + + +@function_job_collections_router.get( + "/{function_job_collection_id:uuid}/function_jobs/list", + responses={**_COMMON_FUNCTION_JOB_COLLECTION_ERROR_RESPONSES}, + description=create_route_description( + base="Get the function jobs in function job collection", + changelog=[ + FMSG_CHANGELOG_NEW_IN_VERSION.format(JOB_LIST_PAGE_RELEASE_VERSION), + ], + ), +) +async def function_job_collection_list_function_jobs_list( + function_job_collection_id: FunctionJobCollectionID, + function_job_service: Annotated[ + FunctionJobService, Depends(get_function_job_service) + ], +) -> list[RegisteredFunctionJob]: + function_jobs_list, _ = await function_job_service.list_function_jobs( + filter_by_function_job_collection_id=function_job_collection_id, + ) + return function_jobs_list @function_job_collections_router.get( @@ -207,9 +260,11 @@ async def function_job_collection_list_function_jobs( async def function_job_collection_status( function_job_collection_id: FunctionJobCollectionID, wb_api_rpc: Annotated[WbApiRpcClient, Depends(get_wb_api_rpc_client)], - director2_api: Annotated[DirectorV2Api, Depends(get_api_client(DirectorV2Api))], user_id: Annotated[UserID, Depends(get_current_user_id)], # Updated type product_name: Annotated[ProductName, Depends(get_product_name)], + function_job_task_client_service: Annotated[ + FunctionJobTaskClientService, Depends(get_function_job_task_client_service) + ], ) -> FunctionJobCollectionStatus: function_job_collection = await get_function_job_collection( function_job_collection_id=function_job_collection_id, @@ -218,16 +273,23 @@ async def function_job_collection_status( product_name=product_name, ) - job_statuses = await asyncio.gather( + job_statuses = await limited_gather( *[ - function_job_status( - job_id, - wb_api_rpc=wb_api_rpc, - director2_api=director2_api, - user_id=user_id, - product_name=product_name, + function_job_task_client_service.inspect_function_job( + function_job=await get_function_job( + function_job_id=function_job_id, + wb_api_rpc=wb_api_rpc, + user_id=user_id, + product_name=product_name, + ), + function=await get_function_from_functionjobid( + function_job_id=function_job_id, + wb_api_rpc=wb_api_rpc, + user_id=user_id, + product_name=product_name, + ), ) - for job_id in function_job_collection.job_ids + for function_job_id in function_job_collection.job_ids ] ) return FunctionJobCollectionStatus( diff --git a/services/api-server/src/simcore_service_api_server/api/routes/function_jobs_routes.py b/services/api-server/src/simcore_service_api_server/api/routes/function_jobs_routes.py index cd461b95fb3a..1977284b1c61 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/function_jobs_routes.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/function_jobs_routes.py @@ -1,9 +1,10 @@ +from logging import getLogger from typing import Annotated, Final -from fastapi import APIRouter, Depends, status +from fastapi import APIRouter, Depends, FastAPI, HTTPException, Query, status from fastapi_pagination.api import create_page +from models_library.api_schemas_long_running_tasks.tasks import TaskGet from models_library.api_schemas_webserver.functions import ( - Function, FunctionClass, FunctionJob, FunctionJobID, @@ -11,36 +12,55 @@ FunctionOutputs, RegisteredFunctionJob, ) +from models_library.functions import RegisteredFunction from models_library.functions_errors import ( UnsupportedFunctionClassError, - UnsupportedFunctionFunctionJobClassCombinationError, ) from models_library.products import ProductName from models_library.users import UserID -from sqlalchemy.ext.asyncio import AsyncEngine +from servicelib.fastapi.dependencies import get_app -from ...models.pagination import Page, PaginationParams +from ..._service_function_jobs import FunctionJobService +from ..._service_function_jobs_task_client import ( + FunctionJobTaskClientService, +) +from ..._service_functions import FunctionService +from ..._service_jobs import JobService +from ...models.domain.functions import PageRegisteredFunctionJobWithorWithoutStatus +from ...models.pagination import PaginationParams from ...models.schemas.errors import ErrorGet -from ...services_http.director_v2 import DirectorV2Api -from ...services_http.storage import StorageApi -from ...services_http.webserver import AuthSession +from ...models.schemas.functions import FunctionJobsListFilters from ...services_rpc.wb_api_server import WbApiRpcClient from ..dependencies.authentication import get_current_user_id, get_product_name -from ..dependencies.database import get_db_asyncpg_engine -from ..dependencies.services import get_api_client -from ..dependencies.webserver_http import get_webserver_session +from ..dependencies.functions import ( + get_function_from_functionjob, + get_function_job_dependency, + get_stored_job_outputs, +) +from ..dependencies.models_schemas_function_filters import get_function_jobs_filters +from ..dependencies.services import ( + get_function_job_service, + get_function_job_task_client_service, + get_function_service, + get_job_service, +) from ..dependencies.webserver_rpc import get_wb_api_rpc_client -from . import solvers_jobs, solvers_jobs_read, studies_jobs from ._constants import ( FMSG_CHANGELOG_ADDED_IN_VERSION, FMSG_CHANGELOG_NEW_IN_VERSION, create_route_description, ) +_logger = getLogger(__name__) + # pylint: disable=too-many-arguments # pylint: disable=cyclic-import +JOB_LIST_FILTER_PAGE_RELEASE_VERSION = "0.11.0" +JOB_LOG_RELEASE_VERSION = "0.11.0" +WITH_STATUS_RELEASE_VERSION = "0.13.0" + function_job_router = APIRouter() _COMMON_FUNCTION_JOB_ERROR_RESPONSES: Final[dict] = { @@ -70,26 +90,65 @@ "add `created_at` field in the registered function-related objects", ) ) + if endpoint == "list_function_jobs": + CHANGE_LOGS[endpoint].append( + FMSG_CHANGELOG_ADDED_IN_VERSION.format( + JOB_LIST_FILTER_PAGE_RELEASE_VERSION, + "add filter by `function_id`, `function_job_ids` and `function_job_collection_id`", + ) + ) + + if endpoint in ["list_function_jobs"]: + CHANGE_LOGS[endpoint].append( + FMSG_CHANGELOG_ADDED_IN_VERSION.format( + WITH_STATUS_RELEASE_VERSION, + "add include_status bool query parameter to list function jobs with their status", + ) + ) @function_job_router.get( "", - response_model=Page[RegisteredFunctionJob], + response_model=PageRegisteredFunctionJobWithorWithoutStatus, description=create_route_description( base="List function jobs", changelog=CHANGE_LOGS["list_function_jobs"] ), ) async def list_function_jobs( - wb_api_rpc: Annotated[WbApiRpcClient, Depends(get_wb_api_rpc_client)], page_params: Annotated[PaginationParams, Depends()], - user_id: Annotated[UserID, Depends(get_current_user_id)], - product_name: Annotated[ProductName, Depends(get_product_name)], + function_job_task_client_service: Annotated[ + FunctionJobTaskClientService, Depends(get_function_job_task_client_service) + ], + function_job_service: Annotated[ + FunctionJobService, Depends(get_function_job_service) + ], + filters: Annotated[FunctionJobsListFilters, Depends(get_function_jobs_filters)], + include_status: Annotated[ # noqa: FBT002 + bool, Query(description="Include job status in response") + ] = False, ): - function_jobs_list, meta = await wb_api_rpc.list_function_jobs( + if include_status: + function_jobs_list_ws, meta = ( + await function_job_task_client_service.list_function_jobs_with_status( + pagination_offset=page_params.offset, + pagination_limit=page_params.limit, + filter_by_function_job_ids=filters.function_job_ids, + filter_by_function_job_collection_id=filters.function_job_collection_id, + filter_by_function_id=filters.function_id, + ) + ) + return create_page( + function_jobs_list_ws, + total=meta.total, + params=page_params, + ) + + function_jobs_list, meta = await function_job_service.list_function_jobs( pagination_offset=page_params.offset, pagination_limit=page_params.limit, - user_id=user_id, - product_name=product_name, + filter_by_function_job_ids=filters.function_job_ids, + filter_by_function_job_collection_id=filters.function_job_collection_id, + filter_by_function_id=filters.function_id, ) return create_page( @@ -168,56 +227,26 @@ async def delete_function_job( ), ) async def function_job_status( - function_job_id: FunctionJobID, - director2_api: Annotated[DirectorV2Api, Depends(get_api_client(DirectorV2Api))], - user_id: Annotated[UserID, Depends(get_current_user_id)], - product_name: Annotated[ProductName, Depends(get_product_name)], - wb_api_rpc: Annotated[WbApiRpcClient, Depends(get_wb_api_rpc_client)], + function_job: Annotated[ + RegisteredFunctionJob, Depends(get_function_job_dependency) + ], + function: Annotated[RegisteredFunction, Depends(get_function_from_functionjob)], + function_job_task_client_service: Annotated[ + FunctionJobTaskClientService, Depends(get_function_job_task_client_service) + ], ) -> FunctionJobStatus: - - function, function_job = await get_function_from_functionjobid( - wb_api_rpc=wb_api_rpc, - function_job_id=function_job_id, - user_id=user_id, - product_name=product_name, - ) - - if ( - function.function_class == FunctionClass.PROJECT - and function_job.function_class == FunctionClass.PROJECT - ): - job_status = await studies_jobs.inspect_study_job( - study_id=function.project_id, - job_id=function_job.project_job_id, - user_id=user_id, - director2_api=director2_api, - ) - return FunctionJobStatus(status=job_status.state) - - if (function.function_class == FunctionClass.SOLVER) and ( - function_job.function_class == FunctionClass.SOLVER - ): - job_status = await solvers_jobs.inspect_job( - solver_key=function.solver_key, - version=function.solver_version, - job_id=function_job.solver_job_id, - user_id=user_id, - director2_api=director2_api, - ) - return FunctionJobStatus(status=job_status.state) - - raise UnsupportedFunctionFunctionJobClassCombinationError( - function_class=function.function_class, - function_job_class=function_job.function_class, + return await function_job_task_client_service.inspect_function_job( + function=function, function_job=function_job ) async def get_function_from_functionjobid( wb_api_rpc: WbApiRpcClient, function_job_id: FunctionJobID, + function_service: Annotated[FunctionService, Depends(get_function_service)], user_id: Annotated[UserID, Depends(get_current_user_id)], product_name: Annotated[ProductName, Depends(get_product_name)], -) -> tuple[Function, FunctionJob]: +) -> tuple[RegisteredFunction, RegisteredFunctionJob]: function_job = await get_function_job( wb_api_rpc=wb_api_rpc, function_job_id=function_job_id, @@ -225,14 +254,9 @@ async def get_function_from_functionjobid( product_name=product_name, ) - from .functions_routes import get_function - return ( - await get_function( - wb_api_rpc=wb_api_rpc, + await function_service.get_function( function_id=function_job.function_uid, - user_id=user_id, - product_name=product_name, ), function_job, ) @@ -248,52 +272,90 @@ async def get_function_from_functionjobid( ), ) async def function_job_outputs( + function_job: Annotated[ + RegisteredFunctionJob, Depends(get_function_job_dependency) + ], + function_job_task_client_service: Annotated[ + FunctionJobTaskClientService, Depends(get_function_job_task_client_service) + ], + function: Annotated[RegisteredFunction, Depends(get_function_from_functionjob)], + stored_job_outputs: Annotated[FunctionOutputs, Depends(get_stored_job_outputs)], +) -> FunctionOutputs: + return await function_job_task_client_service.function_job_outputs( + function_job=function_job, + function=function, + stored_job_outputs=stored_job_outputs, + ) + + +@function_job_router.post( + "/{function_job_id:uuid}/log", + response_model=TaskGet, + responses={**_COMMON_FUNCTION_JOB_ERROR_RESPONSES}, + description=create_route_description( + base="Get function job logs task", + changelog=[ + FMSG_CHANGELOG_NEW_IN_VERSION.format(JOB_LOG_RELEASE_VERSION), + ], + ), +) +async def get_function_job_logs_task( function_job_id: FunctionJobID, - webserver_api: Annotated[AuthSession, Depends(get_webserver_session)], + app: Annotated[FastAPI, Depends(get_app)], + wb_api_rpc: Annotated[WbApiRpcClient, Depends(get_wb_api_rpc_client)], + job_service: Annotated[JobService, Depends(get_job_service)], + function_service: Annotated[FunctionService, Depends(get_function_service)], user_id: Annotated[UserID, Depends(get_current_user_id)], product_name: Annotated[ProductName, Depends(get_product_name)], - storage_client: Annotated[StorageApi, Depends(get_api_client(StorageApi))], - wb_api_rpc: Annotated[WbApiRpcClient, Depends(get_wb_api_rpc_client)], - async_pg_engine: Annotated[AsyncEngine, Depends(get_db_asyncpg_engine)], -) -> FunctionOutputs: +): function, function_job = await get_function_from_functionjobid( wb_api_rpc=wb_api_rpc, function_job_id=function_job_id, + function_service=function_service, user_id=user_id, product_name=product_name, ) + app_router = app.router if ( function.function_class == FunctionClass.PROJECT and function_job.function_class == FunctionClass.PROJECT ): - return dict( - ( - await studies_jobs.get_study_job_outputs( - study_id=function.project_id, - job_id=function_job.project_job_id, - user_id=user_id, - webserver_api=webserver_api, - storage_client=storage_client, - ) - ).results + if function_job.project_job_id is None: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Could not find project job", + ) + async_job_get = await job_service.start_log_export( + job_id=function_job.project_job_id, + ) + _task_id = f"{async_job_get.job_id}" + return TaskGet( + task_id=_task_id, + task_name=async_job_get.job_name, + status_href=app_router.url_path_for("get_task_status", task_id=_task_id), + abort_href=app_router.url_path_for("cancel_task", task_id=_task_id), + result_href=app_router.url_path_for("get_task_result", task_id=_task_id), ) if ( function.function_class == FunctionClass.SOLVER and function_job.function_class == FunctionClass.SOLVER ): - return dict( - ( - await solvers_jobs_read.get_job_outputs( - solver_key=function.solver_key, - version=function.solver_version, - job_id=function_job.solver_job_id, - user_id=user_id, - webserver_api=webserver_api, - storage_client=storage_client, - async_pg_engine=async_pg_engine, - ) - ).results + if function_job.solver_job_id is None: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Could not find solver job", + ) + async_job_get = await job_service.start_log_export( + job_id=function_job.solver_job_id, + ) + _task_id = f"{async_job_get.job_id}" + return TaskGet( + task_id=_task_id, + task_name=async_job_get.job_name, + status_href=app_router.url_path_for("get_task_status", task_id=_task_id), + abort_href=app_router.url_path_for("cancel_task", task_id=_task_id), + result_href=app_router.url_path_for("get_task_result", task_id=_task_id), ) raise UnsupportedFunctionClassError(function_class=function.function_class) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py index 4551c313c164..4cb2a58d87e9 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/functions_routes.py @@ -1,54 +1,53 @@ +# pylint: disable=too-many-positional-arguments from collections.abc import Callable -from typing import Annotated, Final +from typing import Annotated, Final, Literal -import jsonschema -from fastapi import APIRouter, Depends, Request, status +from fastapi import APIRouter, Depends, Header, Request, status from fastapi_pagination.api import create_page -from jsonschema import ValidationError +from fastapi_pagination.bases import AbstractPage from models_library.api_schemas_api_server.functions import ( Function, - FunctionClass, FunctionID, FunctionInputs, FunctionInputSchema, FunctionInputsList, - FunctionJobCollection, FunctionOutputSchema, - FunctionSchemaClass, - ProjectFunctionJob, RegisteredFunction, RegisteredFunctionJob, RegisteredFunctionJobCollection, - SolverFunctionJob, -) -from models_library.functions_errors import ( - FunctionInputsValidationError, - UnsupportedFunctionClassError, ) +from models_library.functions import FunctionJobCollection, FunctionJobID from models_library.products import ProductName -from models_library.projects_state import RunningState +from models_library.projects import ProjectID +from models_library.projects_nodes_io import NodeID from models_library.users import UserID from servicelib.fastapi.dependencies import get_reverse_url_mapper -from simcore_service_api_server._service_jobs import JobService +from servicelib.utils import limited_gather -from ..._service_solvers import SolverService +from ..._service_function_jobs import FunctionJobService +from ..._service_function_jobs_task_client import FunctionJobTaskClientService +from ..._service_functions import FunctionService from ...models.pagination import Page, PaginationParams from ...models.schemas.errors import ErrorGet -from ...models.schemas.jobs import JobInputs -from ...services_http.director_v2 import DirectorV2Api -from ...services_http.webserver import AuthSession +from ...models.schemas.jobs import JobPricingSpecification from ...services_rpc.wb_api_server import WbApiRpcClient -from ..dependencies.authentication import get_current_user_id, get_product_name -from ..dependencies.services import get_api_client, get_job_service, get_solver_service -from ..dependencies.webserver_http import get_webserver_session +from ..dependencies.authentication import ( + Identity, + get_current_identity, + get_current_user_id, + get_product_name, +) +from ..dependencies.services import ( + get_function_job_service, + get_function_job_task_client_service, + get_function_service, +) from ..dependencies.webserver_rpc import get_wb_api_rpc_client -from . import solvers_jobs, studies_jobs from ._constants import ( FMSG_CHANGELOG_ADDED_IN_VERSION, FMSG_CHANGELOG_NEW_IN_VERSION, create_route_description, ) -from .function_jobs_routes import register_function_job # pylint: disable=too-many-arguments # pylint: disable=cyclic-import @@ -131,12 +130,10 @@ async def register_function( ) async def get_function( function_id: FunctionID, - wb_api_rpc: Annotated[WbApiRpcClient, Depends(get_wb_api_rpc_client)], - user_id: Annotated[UserID, Depends(get_current_user_id)], - product_name: Annotated[ProductName, Depends(get_product_name)], + function_service: Annotated[FunctionService, Depends(get_function_service)], ) -> RegisteredFunction: - return await wb_api_rpc.get_function( - function_id=function_id, user_id=user_id, product_name=product_name + return await function_service.get_function( + function_id=function_id, ) @@ -149,16 +146,12 @@ async def get_function( ), ) async def list_functions( - wb_api_rpc: Annotated[WbApiRpcClient, Depends(get_wb_api_rpc_client)], + function_service: Annotated[FunctionService, Depends(get_function_service)], page_params: Annotated[PaginationParams, Depends()], - user_id: Annotated[UserID, Depends(get_current_user_id)], - product_name: Annotated[ProductName, Depends(get_product_name)], -): - functions_list, meta = await wb_api_rpc.list_functions( +) -> AbstractPage[RegisteredFunction]: + functions_list, meta = await function_service.list_functions( pagination_offset=page_params.offset, pagination_limit=page_params.limit, - user_id=user_id, - product_name=product_name, ) return create_page( @@ -178,17 +171,15 @@ async def list_functions( ) async def list_function_jobs_for_functionid( function_id: FunctionID, - wb_api_rpc: Annotated[WbApiRpcClient, Depends(get_wb_api_rpc_client)], + function_job_service: Annotated[ + FunctionJobService, Depends(get_function_job_service) + ], page_params: Annotated[PaginationParams, Depends()], - user_id: Annotated[UserID, Depends(get_current_user_id)], - product_name: Annotated[ProductName, Depends(get_product_name)], -): - function_jobs_list, meta = await wb_api_rpc.list_function_jobs( +) -> AbstractPage[RegisteredFunctionJob]: + function_jobs_list, meta = await function_job_service.list_function_jobs( pagination_offset=page_params.offset, pagination_limit=page_params.limit, filter_by_function_id=function_id, - user_id=user_id, - product_name=product_name, ) return create_page( @@ -251,20 +242,6 @@ async def update_function_description( return returned_function -def _join_inputs( - default_inputs: FunctionInputs | None, - function_inputs: FunctionInputs | None, -) -> FunctionInputs: - if default_inputs is None: - return function_inputs - - if function_inputs is None: - return default_inputs - - # last dict will override defaults - return {**default_inputs, **function_inputs} - - @function_router.get( "/{function_id:uuid}/input_schema", response_model=FunctionInputSchema, @@ -322,29 +299,13 @@ async def get_function_outputschema( async def validate_function_inputs( function_id: FunctionID, inputs: FunctionInputs, - wb_api_rpc: Annotated[WbApiRpcClient, Depends(get_wb_api_rpc_client)], - user_id: Annotated[UserID, Depends(get_current_user_id)], - product_name: Annotated[ProductName, Depends(get_product_name)], + function_job_service: Annotated[ + FunctionJobService, Depends(get_function_job_service) + ], ) -> tuple[bool, str]: - function = await wb_api_rpc.get_function( - function_id=function_id, user_id=user_id, product_name=product_name - ) - - if function.input_schema is None or function.input_schema.schema_content is None: - return True, "No input schema defined for this function" - - if function.input_schema.schema_class == FunctionSchemaClass.json_schema: - try: - jsonschema.validate( - instance=inputs, schema=function.input_schema.schema_content - ) - except ValidationError as err: - return False, str(err) - return True, "Inputs are valid" - - return ( - False, - f"Unsupported function schema class {function.input_schema.schema_class}", + return await function_job_service.validate_function_inputs( + function_id=function_id, + inputs=inputs, ) @@ -357,129 +318,41 @@ async def validate_function_inputs( changelog=CHANGE_LOGS["run_function"], ), ) -async def run_function( # noqa: PLR0913 +async def run_function( request: Request, - wb_api_rpc: Annotated[WbApiRpcClient, Depends(get_wb_api_rpc_client)], - webserver_api: Annotated[AuthSession, Depends(get_webserver_session)], + user_identity: Annotated[Identity, Depends(get_current_identity)], + to_run_function: Annotated[RegisteredFunction, Depends(get_function)], url_for: Annotated[Callable, Depends(get_reverse_url_mapper)], - director2_api: Annotated[DirectorV2Api, Depends(get_api_client(DirectorV2Api))], - function_id: FunctionID, function_inputs: FunctionInputs, - user_id: Annotated[UserID, Depends(get_current_user_id)], - product_name: Annotated[str, Depends(get_product_name)], - solver_service: Annotated[SolverService, Depends(get_solver_service)], - job_service: Annotated[JobService, Depends(get_job_service)], + function_service: Annotated[FunctionService, Depends(get_function_service)], + function_job_task_client_service: Annotated[ + FunctionJobTaskClientService, Depends(get_function_job_task_client_service) + ], + x_simcore_parent_project_uuid: Annotated[ProjectID | Literal["null"], Header()], + x_simcore_parent_node_id: Annotated[NodeID | Literal["null"], Header()], ) -> RegisteredFunctionJob: - - from .function_jobs_routes import function_job_status - - to_run_function = await wb_api_rpc.get_function( - function_id=function_id, user_id=user_id, product_name=product_name + # preprocess inputs + parent_project_uuid = ( + x_simcore_parent_project_uuid + if isinstance(x_simcore_parent_project_uuid, ProjectID) + else None ) - - joined_inputs = _join_inputs( - to_run_function.default_inputs, - function_inputs, + parent_node_id = ( + x_simcore_parent_node_id + if isinstance(x_simcore_parent_node_id, NodeID) + else None ) - - if to_run_function.input_schema is not None: - is_valid, validation_str = await validate_function_inputs( - function_id=to_run_function.uid, - inputs=joined_inputs, - wb_api_rpc=wb_api_rpc, - user_id=user_id, - product_name=product_name, - ) - if not is_valid: - raise FunctionInputsValidationError(error=validation_str) - - if cached_function_jobs := await wb_api_rpc.find_cached_function_jobs( - function_id=to_run_function.uid, - inputs=joined_inputs, - user_id=user_id, - product_name=product_name, - ): - for cached_function_job in cached_function_jobs: - job_status = await function_job_status( - wb_api_rpc=wb_api_rpc, - director2_api=director2_api, - function_job_id=cached_function_job.uid, - user_id=user_id, - product_name=product_name, - ) - if job_status.status == RunningState.SUCCESS: - return cached_function_job - - if to_run_function.function_class == FunctionClass.PROJECT: - study_job = await studies_jobs.create_study_job( - study_id=to_run_function.project_id, - job_inputs=JobInputs(values=joined_inputs or {}), - webserver_api=webserver_api, - wb_api_rpc=wb_api_rpc, - url_for=url_for, - x_simcore_parent_project_uuid=None, - x_simcore_parent_node_id=None, - user_id=user_id, - product_name=product_name, - ) - await studies_jobs.start_study_job( - request=request, - study_id=to_run_function.project_id, - job_id=study_job.id, - user_id=user_id, - webserver_api=webserver_api, - director2_api=director2_api, - ) - return await register_function_job( - wb_api_rpc=wb_api_rpc, - function_job=ProjectFunctionJob( - function_uid=to_run_function.uid, - title=f"Function job of function {to_run_function.uid}", - description=to_run_function.description, - inputs=joined_inputs, - outputs=None, - project_job_id=study_job.id, - ), - user_id=user_id, - product_name=product_name, - ) - - if to_run_function.function_class == FunctionClass.SOLVER: - solver_job = await solvers_jobs.create_solver_job( - solver_key=to_run_function.solver_key, - version=to_run_function.solver_version, - inputs=JobInputs(values=joined_inputs or {}), - solver_service=solver_service, - job_service=job_service, - url_for=url_for, - x_simcore_parent_project_uuid=None, - x_simcore_parent_node_id=None, - ) - await solvers_jobs.start_job( - request=request, - solver_key=to_run_function.solver_key, - version=to_run_function.solver_version, - job_id=solver_job.id, - user_id=user_id, - webserver_api=webserver_api, - director2_api=director2_api, - ) - return await register_function_job( - wb_api_rpc=wb_api_rpc, - function_job=SolverFunctionJob( - function_uid=to_run_function.uid, - title=f"Function job of function {to_run_function.uid}", - description=to_run_function.description, - inputs=joined_inputs, - outputs=None, - solver_job_id=solver_job.id, - ), - user_id=user_id, - product_name=product_name, - ) - - raise UnsupportedFunctionClassError( - function_class=to_run_function.function_class, + pricing_spec = JobPricingSpecification.create_from_headers(request.headers) + job_links = await function_service.get_function_job_links(to_run_function, url_for) + + return await function_job_task_client_service.create_function_job_creation_task( + function=to_run_function, + function_inputs=function_inputs, + user_identity=user_identity, + pricing_spec=pricing_spec, + job_links=job_links, + parent_project_uuid=parent_project_uuid, + parent_node_id=parent_node_id, ) @@ -520,48 +393,70 @@ async def delete_function( changelog=CHANGE_LOGS["map_function"], ), ) -async def map_function( # noqa: PLR0913 - function_id: FunctionID, - function_inputs_list: FunctionInputsList, +async def map_function( request: Request, - wb_api_rpc: Annotated[WbApiRpcClient, Depends(get_wb_api_rpc_client)], - webserver_api: Annotated[AuthSession, Depends(get_webserver_session)], + user_identity: Annotated[Identity, Depends(get_current_identity)], + to_run_function: Annotated[RegisteredFunction, Depends(get_function)], + function_inputs_list: FunctionInputsList, url_for: Annotated[Callable, Depends(get_reverse_url_mapper)], - director2_api: Annotated[DirectorV2Api, Depends(get_api_client(DirectorV2Api))], - user_id: Annotated[UserID, Depends(get_current_user_id)], - product_name: Annotated[str, Depends(get_product_name)], - solver_service: Annotated[SolverService, Depends(get_solver_service)], - job_service: Annotated[JobService, Depends(get_job_service)], + function_job_task_client_service: Annotated[ + FunctionJobTaskClientService, Depends(get_function_job_task_client_service) + ], + function_service: Annotated[FunctionService, Depends(get_function_service)], + web_api_rpc_client: Annotated[WbApiRpcClient, Depends(get_wb_api_rpc_client)], + x_simcore_parent_project_uuid: Annotated[ProjectID | Literal["null"], Header()], + x_simcore_parent_node_id: Annotated[NodeID | Literal["null"], Header()], ) -> RegisteredFunctionJobCollection: - function_jobs = [] - function_jobs = [ - await run_function( - wb_api_rpc=wb_api_rpc, - function_id=function_id, - function_inputs=function_inputs, - product_name=product_name, - user_id=user_id, - webserver_api=webserver_api, - url_for=url_for, - director2_api=director2_api, - request=request, - solver_service=solver_service, - job_service=job_service, + parent_project_uuid = ( + x_simcore_parent_project_uuid + if isinstance(x_simcore_parent_project_uuid, ProjectID) + else None + ) + parent_node_id = ( + x_simcore_parent_node_id + if isinstance(x_simcore_parent_node_id, NodeID) + else None + ) + pricing_spec = JobPricingSpecification.create_from_headers(request.headers) + job_links = await function_service.get_function_job_links(to_run_function, url_for) + + async def _run_single_function(function_inputs: FunctionInputs) -> FunctionJobID: + result = ( + await function_job_task_client_service.create_function_job_creation_task( + function=to_run_function, + function_inputs=function_inputs, + user_identity=user_identity, + pricing_spec=pricing_spec, + job_links=job_links, + parent_project_uuid=parent_project_uuid, + parent_node_id=parent_node_id, + ) ) - for function_inputs in function_inputs_list - ] + return result.uid + + # Run all tasks concurrently, allowing them to complete even if some fail + results = await limited_gather( + *[ + _run_single_function(function_inputs) + for function_inputs in function_inputs_list + ], + reraise=False, + limit=1, + ) - function_job_collection_description = f"Function job collection of map of function {function_id} with {len(function_inputs_list)} inputs" - # Import here to avoid circular import - from .function_job_collections_routes import register_function_job_collection + # Check if any tasks raised exceptions and raise the first one found + for result in results: + if isinstance(result, BaseException): + raise result - return await register_function_job_collection( - wb_api_rpc=wb_api_rpc, + # At this point, all results are FunctionJobID since we've checked for exceptions + function_job_collection_description = f"Function job collection of map of function {to_run_function.uid} with {len(function_inputs_list)} inputs" + return await web_api_rpc_client.register_function_job_collection( function_job_collection=FunctionJobCollection( title="Function job collection of function map", description=function_job_collection_description, - job_ids=[function_job.uid for function_job in function_jobs], + job_ids=results, # type: ignore ), - user_id=user_id, - product_name=product_name, + user_id=user_identity.user_id, + product_name=user_identity.product_name, ) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/programs.py b/services/api-server/src/simcore_service_api_server/api/routes/programs.py index 2ccd9d5ed7b3..86910bf754a8 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/programs.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/programs.py @@ -16,6 +16,7 @@ complete_file_upload, get_upload_links_from_s3, ) +from simcore_service_api_server.models.api_resources import JobLinks from ..._service_jobs import JobService from ..._service_programs import ProgramService @@ -153,7 +154,6 @@ async def create_program_job( user_id: Annotated[PositiveInt, Depends(get_current_user_id)], program_service: Annotated[ProgramService, Depends(get_program_service)], job_service: Annotated[JobService, Depends(get_job_service)], - url_for: Annotated[Callable, Depends(get_reverse_url_mapper)], x_simcore_parent_project_uuid: Annotated[ProjectID | None, Header()] = None, x_simcore_parent_node_id: Annotated[NodeID | None, Header()] = None, name: Annotated[ @@ -171,15 +171,20 @@ async def create_program_job( name=program_key, version=version, ) + job_rest_interface_links = JobLinks( + url_template=None, + runner_url_template=None, + outputs_url_template=None, + ) - job, project = await job_service.create_job( + job, project = await job_service.create_project_marked_as_job( project_name=name, description=description, solver_or_program=program, inputs=inputs, parent_project_uuid=x_simcore_parent_project_uuid, parent_node_id=x_simcore_parent_node_id, - url_for=url_for, + job_links=job_rest_interface_links, hidden=False, ) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/solvers.py b/services/api-server/src/simcore_service_api_server/api/routes/solvers.py index f6dee01f5695..d0dcfb2c30b6 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/solvers.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/solvers.py @@ -343,7 +343,7 @@ async def list_solver_ports( ) solver_ports = [SolverPort.model_validate(port.model_dump()) for port in ports] - return OnePage[SolverPort].model_validate(dict(items=solver_ports)) + return OnePage[SolverPort].model_validate({"items": solver_ports}) @router.get( diff --git a/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs.py b/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs.py index 83b5d83f5f03..1bca0e13f826 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs.py @@ -5,15 +5,13 @@ from typing import Annotated, Any from fastapi import APIRouter, Depends, Header, Query, Request, status -from fastapi.encoders import jsonable_encoder from fastapi.responses import JSONResponse from models_library.clusters import ClusterID from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from pydantic.types import PositiveInt -from ..._service_jobs import JobService -from ..._service_solvers import SolverService +from ..._service_jobs import JobService, compose_solver_job_resource_name from ...exceptions.backend_errors import ProjectAlreadyStartedError from ...exceptions.service_errors_utils import DEFAULT_BACKEND_SERVICE_STATUS_CODES from ...models.basic_types import VersionStr @@ -24,17 +22,18 @@ JobInputs, JobMetadata, JobMetadataUpdate, + JobPricingSpecification, JobStatus, ) from ...models.schemas.solvers import Solver, SolverKeyId from ...services_http.director_v2 import DirectorV2Api -from ...services_http.jobs import replace_custom_metadata, start_project, stop_project +from ...services_http.jobs import replace_custom_metadata, stop_project from ...services_http.solver_job_models_converters import ( - create_jobstatus_from_task, + get_solver_job_rest_interface_links, ) from ..dependencies.application import get_reverse_url_mapper from ..dependencies.authentication import get_current_user_id -from ..dependencies.services import get_api_client, get_job_service, get_solver_service +from ..dependencies.services import get_api_client, get_job_service from ..dependencies.webserver_http import AuthSession, get_webserver_session from ._constants import ( FMSG_CHANGELOG_ADDED_IN_VERSION, @@ -48,14 +47,6 @@ router = APIRouter() -def compose_job_resource_name(solver_key, solver_version, job_id) -> str: - """Creates a unique resource name for solver's jobs""" - return Job.compose_resource_name( - parent_name=Solver.compose_resource_name(solver_key, solver_version), - job_id=job_id, - ) - - # JOBS --------------- # # - Similar to docker container's API design (container = job and image = solver) @@ -97,7 +88,6 @@ async def create_solver_job( # noqa: PLR0913 solver_key: SolverKeyId, version: VersionStr, inputs: JobInputs, - solver_service: Annotated[SolverService, Depends(get_solver_service)], job_service: Annotated[JobService, Depends(get_job_service)], url_for: Annotated[Callable, Depends(get_reverse_url_mapper)], hidden: Annotated[bool, Query()] = True, @@ -109,24 +99,18 @@ async def create_solver_job( # noqa: PLR0913 NOTE: This operation does **not** start the job """ - # ensures user has access to solver - solver = await solver_service.get_solver( + return await job_service.create_solver_job( solver_key=solver_key, - solver_version=version, - ) - job, _ = await job_service.create_job( - project_name=None, - description=None, - solver_or_program=solver, + version=version, inputs=inputs, - url_for=url_for, hidden=hidden, - parent_project_uuid=x_simcore_parent_project_uuid, - parent_node_id=x_simcore_parent_node_id, + x_simcore_parent_project_uuid=x_simcore_parent_project_uuid, + x_simcore_parent_node_id=x_simcore_parent_node_id, + job_links=get_solver_job_rest_interface_links( + url_for=url_for, solver_key=solver_key, version=version + ), ) - return job - @router.delete( "/{solver_key:path}/releases/{version}/jobs/{job_id:uuid}", @@ -145,12 +129,42 @@ async def delete_job( job_id: JobID, webserver_api: Annotated[AuthSession, Depends(get_webserver_session)], ): - job_name = compose_job_resource_name(solver_key, version, job_id) + job_name = compose_solver_job_resource_name(solver_key, version, job_id) _logger.debug("Deleting Job '%s'", job_name) await webserver_api.delete_project(project_id=job_id) +@router.delete( + "/{solver_key:path}/releases/{version}/jobs/{job_id:uuid}/assets", + status_code=status.HTTP_204_NO_CONTENT, + responses=JOBS_STATUS_CODES, + description=create_route_description( + base="Deletes assets associated with an existing solver job. N.B. this renders the solver job un-startable", + changelog=[ + FMSG_CHANGELOG_NEW_IN_VERSION.format("0.12"), + ], + ), +) +async def delete_job_assets( + solver_key: SolverKeyId, + version: VersionStr, + job_id: JobID, + job_service: Annotated[JobService, Depends(get_job_service)], +): + job_parent_resource_name = Solver.compose_resource_name(solver_key, version) + + # check that job exists and is accessible to user + project_job_rpc_get = await job_service.get_job( + job_parent_resource_name=job_parent_resource_name, job_id=job_id + ) + assert project_job_rpc_get.uuid == job_id # nosec + + await job_service.delete_job_assets( + job_parent_resource_name=job_parent_resource_name, job_id=job_id + ) + + @router.post( "/{solver_key:path}/releases/{version}/jobs/{job_id:uuid}:start", status_code=status.HTTP_202_ACCEPTED, @@ -165,6 +179,10 @@ async def delete_job( "description": "Cluster not found", "model": ErrorGet, }, + status.HTTP_409_CONFLICT: { + "description": "Job assets missing", + "model": ErrorGet, + }, status.HTTP_422_UNPROCESSABLE_ENTITY: { "description": "Configuration error", "model": ErrorGet, @@ -190,41 +208,27 @@ async def start_job( solver_key: SolverKeyId, version: VersionStr, job_id: JobID, - user_id: Annotated[PositiveInt, Depends(get_current_user_id)], - director2_api: Annotated[DirectorV2Api, Depends(get_api_client(DirectorV2Api))], - webserver_api: Annotated[AuthSession, Depends(get_webserver_session)], + job_service: Annotated[JobService, Depends(get_job_service)], cluster_id: Annotated[ # pylint: disable=unused-argument # noqa: ARG001 ClusterID | None, Query(deprecated=True) ] = None, ): - job_name = compose_job_resource_name(solver_key, version, job_id) - _logger.debug("Start Job '%s'", job_name) + pricing_spec = JobPricingSpecification.create_from_headers(headers=request.headers) try: - await start_project( - request=request, - job_id=job_id, - expected_job_name=job_name, - webserver_api=webserver_api, - ) - except ProjectAlreadyStartedError: - job_status = await inspect_job( + return await job_service.start_solver_job( solver_key=solver_key, version=version, job_id=job_id, - user_id=user_id, - director2_api=director2_api, + pricing_spec=pricing_spec, + ) + except ProjectAlreadyStartedError: + job_status = await job_service.inspect_solver_job( + solver_key=solver_key, version=version, job_id=job_id ) return JSONResponse( - status_code=status.HTTP_200_OK, content=jsonable_encoder(job_status) + status_code=status.HTTP_200_OK, content=job_status.model_dump(mode="json") ) - return await inspect_job( - solver_key=solver_key, - version=version, - job_id=job_id, - user_id=user_id, - director2_api=director2_api, - ) @router.post( @@ -245,7 +249,7 @@ async def stop_job( user_id: Annotated[PositiveInt, Depends(get_current_user_id)], director2_api: Annotated[DirectorV2Api, Depends(get_api_client(DirectorV2Api))], ): - job_name = compose_job_resource_name(solver_key, version, job_id) + job_name = compose_solver_job_resource_name(solver_key, version, job_id) _logger.debug("Stopping Job '%s'", job_name) return await stop_project( @@ -268,15 +272,14 @@ async def inspect_job( solver_key: SolverKeyId, version: VersionStr, job_id: JobID, - user_id: Annotated[PositiveInt, Depends(get_current_user_id)], - director2_api: Annotated[DirectorV2Api, Depends(get_api_client(DirectorV2Api))], + job_service: Annotated[JobService, Depends(get_job_service)], ) -> JobStatus: - job_name = compose_job_resource_name(solver_key, version, job_id) + job_name = compose_solver_job_resource_name(solver_key, version, job_id) _logger.debug("Inspecting Job '%s'", job_name) - task = await director2_api.get_computation(project_id=job_id, user_id=user_id) - job_status: JobStatus = create_jobstatus_from_task(task) - return job_status + return await job_service.inspect_solver_job( + solver_key=solver_key, version=version, job_id=job_id + ) @router.patch( @@ -298,7 +301,7 @@ async def replace_job_custom_metadata( webserver_api: Annotated[AuthSession, Depends(get_webserver_session)], url_for: Annotated[Callable, Depends(get_reverse_url_mapper)], ): - job_name = compose_job_resource_name(solver_key, version, job_id) + job_name = compose_solver_job_resource_name(solver_key, version, job_id) _logger.debug("Custom metadata for '%s'", job_name) return await replace_custom_metadata( diff --git a/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs_read.py b/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs_read.py index f46ea4d98922..31c06ab52a02 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs_read.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/solvers_jobs_read.py @@ -4,7 +4,7 @@ from collections import deque from collections.abc import Callable from functools import partial -from typing import Annotated, Any, Union +from typing import Annotated, Any from uuid import UUID from fastapi import APIRouter, Depends, Request, status @@ -12,32 +12,22 @@ from fastapi.responses import RedirectResponse from fastapi_pagination.api import create_page from models_library.api_schemas_webserver.projects import ProjectGet -from models_library.projects_nodes_io import BaseFileLink from models_library.users import UserID -from models_library.wallets import ZERO_CREDITS from pydantic import HttpUrl, NonNegativeInt from pydantic.types import PositiveInt from servicelib.logging_utils import log_context from sqlalchemy.ext.asyncio import AsyncEngine from starlette.background import BackgroundTask +from ..._service_jobs import JobService, compose_solver_job_resource_name from ..._service_solvers import SolverService -from ...exceptions.custom_errors import InsufficientCreditsError, MissingWalletError +from ...exceptions.custom_errors import MissingWalletError from ...exceptions.service_errors_utils import DEFAULT_BACKEND_SERVICE_STATUS_CODES from ...models.api_resources import parse_resources_ids from ...models.basic_types import LogStreamingResponse, NameValueTuple, VersionStr -from ...models.domain.files import File as DomainFile from ...models.pagination import Page, PaginationParams from ...models.schemas.errors import ErrorGet -from ...models.schemas.files import File as SchemaFile -from ...models.schemas.jobs import ( - ArgumentTypes, - Job, - JobID, - JobLog, - JobMetadata, - JobOutputs, -) +from ...models.schemas.jobs import Job, JobID, JobLog, JobMetadata, JobOutputs from ...models.schemas.jobs_filters import JobMetadataFilter from ...models.schemas.model_adapter import ( PricingUnitGetLegacy, @@ -50,26 +40,23 @@ raise_if_job_not_associated_with_solver, ) from ...services_http.log_streaming import LogDistributor, LogStreamer -from ...services_http.solver_job_models_converters import create_job_from_project -from ...services_http.solver_job_outputs import ResultsTypes, get_solver_output_results -from ...services_http.storage import StorageApi, to_file_api_model +from ...services_http.solver_job_models_converters import ( + create_job_from_project, + get_solver_job_rest_interface_links, +) from ..dependencies.application import get_reverse_url_mapper from ..dependencies.authentication import get_current_user_id from ..dependencies.database import get_db_asyncpg_engine from ..dependencies.models_schemas_jobs_filters import get_job_metadata_filter from ..dependencies.rabbitmq import get_log_check_timeout, get_log_distributor -from ..dependencies.services import get_api_client, get_solver_service +from ..dependencies.services import get_api_client, get_job_service, get_solver_service from ..dependencies.webserver_http import AuthSession, get_webserver_session from ._constants import ( FMSG_CHANGELOG_NEW_IN_VERSION, FMSG_CHANGELOG_REMOVED_IN_VERSION_FORMAT, create_route_description, ) -from .solvers_jobs import ( - JOBS_STATUS_CODES, - METADATA_STATUS_CODES, - compose_job_resource_name, -) +from .solvers_jobs import JOBS_STATUS_CODES, METADATA_STATUS_CODES from .wallets import WALLET_STATUS_CODES _logger = logging.getLogger(__name__) @@ -111,7 +98,7 @@ _LOGSTREAM_STATUS_CODES: dict[int | str, dict[str, Any]] = { status.HTTP_200_OK: { "description": "Returns a JobLog or an ErrorGet", - "model": Union[JobLog, ErrorGet], + "model": JobLog | ErrorGet, }, status.HTTP_409_CONFLICT: { "description": "Conflict: Logs are already being streamed", @@ -140,11 +127,11 @@ async def list_all_solvers_jobs( filter_job_metadata_params: Annotated[ JobMetadataFilter | None, Depends(get_job_metadata_filter) ], - solver_service: Annotated[SolverService, Depends(get_solver_service)], + job_service: Annotated[JobService, Depends(get_job_service)], url_for: Annotated[Callable, Depends(get_reverse_url_mapper)], ): - jobs, meta = await solver_service.list_jobs( + jobs, meta = await job_service.list_solver_jobs( filter_any_custom_metadata=( [ NameValueTuple(filter_metadata.name, filter_metadata.pattern) @@ -203,9 +190,14 @@ async def list_jobs( ) jobs: deque[Job] = deque() + job_rest_interface_links = get_solver_job_rest_interface_links( + url_for=url_for, solver_key=solver_key, version=solver.version + ) for prj in projects_page.data: job = create_job_from_project( - solver_or_program=solver, project=prj, url_for=url_for + solver_or_program=solver, + project=prj, + job_links=job_rest_interface_links, ) assert job.id == prj.uuid # nosec assert job.name == prj.name # nosec @@ -247,9 +239,16 @@ async def list_jobs_paginated( projects_page = await webserver_api.get_projects_w_solver_page( solver_name=solver.name, limit=page_params.limit, offset=page_params.offset ) + job_rest_interface_links = get_solver_job_rest_interface_links( + url_for=url_for, solver_key=solver_key, version=version + ) jobs: list[Job] = [ - create_job_from_project(solver_or_program=solver, project=prj, url_for=url_for) + create_job_from_project( + solver_or_program=solver, + project=prj, + job_links=job_rest_interface_links, + ) for prj in projects_page.data ] @@ -275,7 +274,8 @@ async def get_job( ): """Gets job of a given solver""" _logger.debug( - "Getting Job '%s'", compose_job_resource_name(solver_key, version, job_id) + "Getting Job '%s'", + compose_solver_job_resource_name(solver_key, version, job_id), ) solver = await solver_service.get_solver( @@ -284,8 +284,14 @@ async def get_job( ) project: ProjectGet = await webserver_api.get_project(project_id=job_id) + job_rest_interface_links = get_solver_job_rest_interface_links( + url_for=url_for, solver_key=solver_key, version=version + ) + job = create_job_from_project( - solver_or_program=solver, project=project, url_for=url_for + solver_or_program=solver, + project=project, + job_links=job_rest_interface_links, ) assert job.id == job_id # nosec return job # nosec @@ -294,66 +300,28 @@ async def get_job( @router.get( "/{solver_key:path}/releases/{version}/jobs/{job_id:uuid}/outputs", response_model=JobOutputs, - responses=_OUTPUTS_STATUS_CODES, + responses=_OUTPUTS_STATUS_CODES + | { + status.HTTP_409_CONFLICT: { + "description": "Job assets missing", + "model": ErrorGet, + }, + }, ) async def get_job_outputs( solver_key: SolverKeyId, version: VersionStr, job_id: JobID, - user_id: Annotated[PositiveInt, Depends(get_current_user_id)], + job_service: Annotated[JobService, Depends(get_job_service)], async_pg_engine: Annotated[AsyncEngine, Depends(get_db_asyncpg_engine)], - webserver_api: Annotated[AuthSession, Depends(get_webserver_session)], - storage_client: Annotated[StorageApi, Depends(get_api_client(StorageApi))], ): - job_name = compose_job_resource_name(solver_key, version, job_id) - _logger.debug("Get Job '%s' outputs", job_name) - - project: ProjectGet = await webserver_api.get_project(project_id=job_id) - node_ids = list(project.workbench.keys()) - assert len(node_ids) == 1 # nosec - - product_price = await webserver_api.get_product_price() - if product_price.usd_per_credit is not None: - wallet = await webserver_api.get_project_wallet(project_id=project.uuid) - if wallet is None: - raise MissingWalletError(job_id=project.uuid) - wallet_with_credits = await webserver_api.get_wallet(wallet_id=wallet.wallet_id) - if wallet_with_credits.available_credits <= ZERO_CREDITS: - raise InsufficientCreditsError( - wallet_name=wallet_with_credits.name, - wallet_credit_amount=wallet_with_credits.available_credits, - ) - - outputs: dict[str, ResultsTypes] = await get_solver_output_results( - user_id=user_id, - project_uuid=job_id, - node_uuid=UUID(node_ids[0]), - db_engine=async_pg_engine, + return await job_service.get_solver_job_outputs( + solver_key=solver_key, + version=version, + job_id=job_id, + async_pg_engine=async_pg_engine, ) - results: dict[str, ArgumentTypes] = {} - for name, value in outputs.items(): - if isinstance(value, BaseFileLink): - file_id: UUID = DomainFile.create_id(*value.path.split("/")) - - found = await storage_client.search_owned_files( - user_id=user_id, file_id=file_id, limit=1 - ) - if found: - assert len(found) == 1 # nosec - results[name] = SchemaFile.from_domain_model( - to_file_api_model(found[0]) - ) - else: - api_file = await storage_client.create_soft_link( - user_id=user_id, target_s3_path=value.path, as_file_id=file_id - ) - results[name] = SchemaFile.from_domain_model(api_file) - else: - results[name] = value - - return JobOutputs(job_id=job_id, results=results) - @router.get( "/{solver_key:path}/releases/{version}/jobs/{job_id:uuid}/outputs/logfile", @@ -375,7 +343,7 @@ async def get_job_output_logfile( user_id: Annotated[PositiveInt, Depends(get_current_user_id)], director2_api: Annotated[DirectorV2Api, Depends(get_api_client(DirectorV2Api))], ): - job_name = compose_job_resource_name(solver_key, version, job_id) + job_name = compose_solver_job_resource_name(solver_key, version, job_id) _logger.debug("Get Job '%s' outputs logfile", job_name) project_id = job_id @@ -431,7 +399,7 @@ async def get_job_custom_metadata( webserver_api: Annotated[AuthSession, Depends(get_webserver_session)], url_for: Annotated[Callable, Depends(get_reverse_url_mapper)], ): - job_name = compose_job_resource_name(solver_key, version, job_id) + job_name = compose_solver_job_resource_name(solver_key, version, job_id) _logger.debug("Custom metadata for '%s'", job_name) return await get_custom_metadata( @@ -461,7 +429,7 @@ async def get_job_wallet( job_id: JobID, webserver_api: Annotated[AuthSession, Depends(get_webserver_session)], ) -> WalletGetWithAvailableCreditsLegacy: - job_name = compose_job_resource_name(solver_key, version, job_id) + job_name = compose_solver_job_resource_name(solver_key, version, job_id) _logger.debug("Getting wallet for job '%s'", job_name) if project_wallet := await webserver_api.get_project_wallet(project_id=job_id): @@ -484,7 +452,7 @@ async def get_job_pricing_unit( job_id: JobID, webserver_api: Annotated[AuthSession, Depends(get_webserver_session)], ): - job_name = compose_job_resource_name(solver_key, version, job_id) + job_name = compose_solver_job_resource_name(solver_key, version, job_id) with log_context(_logger, logging.DEBUG, "Get pricing unit"): _logger.debug("job: %s", job_name) project: ProjectGet = await webserver_api.get_project(project_id=job_id) @@ -515,7 +483,7 @@ async def get_log_stream( ): assert request # nosec - job_name = compose_job_resource_name(solver_key, version, job_id) + job_name = compose_solver_job_resource_name(solver_key, version, job_id) with log_context( _logger, logging.DEBUG, f"Streaming logs for {job_name=} and {user_id=}" ): diff --git a/services/api-server/src/simcore_service_api_server/api/routes/studies.py b/services/api-server/src/simcore_service_api_server/api/routes/studies.py index 11a17b295c73..d13f7facaa2e 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/studies.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/studies.py @@ -1,9 +1,10 @@ import logging from typing import Annotated, Final -from fastapi import APIRouter, Depends, Header, status +from fastapi import APIRouter, Body, Depends, Header, Query, status from fastapi_pagination.api import create_page -from models_library.api_schemas_webserver.projects import ProjectGet +from models_library.api_schemas_webserver.projects import ProjectGet, ProjectPatch +from models_library.basic_types import LongTruncatedStr, ShortTruncatedStr from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID @@ -94,13 +95,25 @@ async def clone_study( webserver_api: Annotated[AuthSession, Depends(get_webserver_session)], x_simcore_parent_project_uuid: Annotated[ProjectID | None, Header()] = None, x_simcore_parent_node_id: Annotated[NodeID | None, Header()] = None, + hidden: Annotated[bool, Query()] = False, + title: Annotated[ShortTruncatedStr | None, Body(empty=True)] = None, + description: Annotated[LongTruncatedStr | None, Body(empty=True)] = None, ): project: ProjectGet = await webserver_api.clone_project( project_id=study_id, - hidden=False, + hidden=hidden, parent_project_uuid=x_simcore_parent_project_uuid, parent_node_id=x_simcore_parent_node_id, ) + if title or description: + patch_params = ProjectPatch( + name=title, + description=description, + ) + await webserver_api.patch_project( + project_id=project.uuid, patch_params=patch_params + ) + project = await webserver_api.get_project(project_id=project.uuid) return _create_study_from_project(project) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/studies_jobs.py b/services/api-server/src/simcore_service_api_server/api/routes/studies_jobs.py index 11e8a22e4a78..e8ad03e2342c 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/studies_jobs.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/studies_jobs.py @@ -1,23 +1,18 @@ import logging from collections.abc import Callable from typing import Annotated -from uuid import UUID from fastapi import APIRouter, Depends, Header, Query, Request, status from fastapi.encoders import jsonable_encoder from fastapi.responses import JSONResponse from fastapi_pagination.api import create_page -from models_library.api_schemas_webserver.projects import ProjectPatch -from models_library.api_schemas_webserver.projects_nodes import NodeOutputs from models_library.clusters import ClusterID -from models_library.function_services_catalog.services import file_picker from models_library.projects import ProjectID -from models_library.projects_nodes import InputID, InputTypes from models_library.projects_nodes_io import NodeID from pydantic import HttpUrl, PositiveInt from servicelib.logging_utils import log_context -from ..._service_studies import StudyService +from ..._service_jobs import JobService, compose_study_job_resource_name from ...exceptions.backend_errors import ProjectAlreadyStartedError from ...models.api_resources import parse_resources_ids from ...models.pagination import Page, PaginationParams @@ -29,30 +24,24 @@ JobMetadata, JobMetadataUpdate, JobOutputs, + JobPricingSpecification, JobStatus, ) -from ...models.schemas.studies import JobLogsMap, Study, StudyID +from ...models.schemas.studies import JobLogsMap, StudyID from ...services_http.director_v2 import DirectorV2Api from ...services_http.jobs import ( get_custom_metadata, replace_custom_metadata, - start_project, stop_project, ) -from ...services_http.solver_job_models_converters import create_jobstatus_from_task -from ...services_http.storage import StorageApi from ...services_http.study_job_models_converters import ( - create_job_from_study, - create_job_outputs_from_project_outputs, - get_project_and_file_inputs_from_job_inputs, + get_study_job_rest_interface_links, ) from ...services_http.webserver import AuthSession -from ...services_rpc.wb_api_server import WbApiRpcClient from ..dependencies.application import get_reverse_url_mapper -from ..dependencies.authentication import get_current_user_id, get_product_name -from ..dependencies.services import get_api_client, get_study_service -from ..dependencies.webserver_http import AuthSession, get_webserver_session -from ..dependencies.webserver_rpc import get_wb_api_rpc_client +from ..dependencies.authentication import get_current_user_id +from ..dependencies.services import get_api_client, get_job_service +from ..dependencies.webserver_http import get_webserver_session from ._constants import ( FMSG_CHANGELOG_CHANGED_IN_VERSION, FMSG_CHANGELOG_NEW_IN_VERSION, @@ -60,23 +49,12 @@ ) from .solvers_jobs import JOBS_STATUS_CODES -# pylint: disable=too-many-arguments - - _logger = logging.getLogger(__name__) router = APIRouter() -def _compose_job_resource_name(study_key, job_id) -> str: - """Creates a unique resource name for solver's jobs""" - return Job.compose_resource_name( - parent_name=Study.compose_resource_name(study_key), - job_id=job_id, - ) - - @router.get( "/{study_id:uuid}/jobs", response_model=Page[Job], @@ -91,13 +69,13 @@ def _compose_job_resource_name(study_key, job_id) -> str: async def list_study_jobs( study_id: StudyID, page_params: Annotated[PaginationParams, Depends()], - study_service: Annotated[StudyService, Depends(get_study_service)], + job_service: Annotated[JobService, Depends(get_job_service)], url_for: Annotated[Callable, Depends(get_reverse_url_mapper)], ): msg = f"list study jobs study_id={study_id!r} with pagination={page_params!r}. SEE https://github.com/ITISFoundation/osparc-simcore/issues/4177" _logger.debug(msg) - jobs, meta = await study_service.list_jobs( + jobs, meta = await job_service.list_study_jobs( filter_by_study_id=study_id, pagination_offset=page_params.offset, pagination_limit=page_params.limit, @@ -124,27 +102,26 @@ async def list_study_jobs( async def create_study_job( study_id: StudyID, job_inputs: JobInputs, - webserver_api: Annotated[AuthSession, Depends(get_webserver_session)], - wb_api_rpc: Annotated[WbApiRpcClient, Depends(get_wb_api_rpc_client)], url_for: Annotated[Callable, Depends(get_reverse_url_mapper)], - user_id: Annotated[PositiveInt, Depends(get_current_user_id)], - product_name: Annotated[str, Depends(get_product_name)], + job_service: Annotated[JobService, Depends(get_job_service)], hidden: Annotated[bool, Query()] = True, # noqa: FBT002 - x_simcore_parent_project_uuid: ProjectID | None = Header(default=None), - x_simcore_parent_node_id: NodeID | None = Header(default=None), + x_simcore_parent_project_uuid: Annotated[ProjectID | None, Header()] = None, + x_simcore_parent_node_id: Annotated[NodeID | None, Header()] = None, ) -> Job: """ hidden -- if True (default) hides project from UI """ - project = await webserver_api.clone_project( - project_id=study_id, + + job_links = get_study_job_rest_interface_links(url_for=url_for, study_id=study_id) + job = await job_service.create_studies_job( + study_id=study_id, + job_inputs=job_inputs, + x_simcore_parent_project_uuid=x_simcore_parent_project_uuid, + x_simcore_parent_node_id=x_simcore_parent_node_id, + job_links=job_links, hidden=hidden, - parent_project_uuid=x_simcore_parent_project_uuid, - parent_node_id=x_simcore_parent_node_id, - ) - job = create_job_from_study( - study_key=study_id, project=project, job_inputs=job_inputs ) + assert job.name == compose_study_job_resource_name(study_id, job.id) job.url = url_for( "get_study_job", study_id=study_id, @@ -156,53 +133,6 @@ async def create_study_job( study_id=study_id, job_id=job.id, ) - - await webserver_api.patch_project( - project_id=job.id, - patch_params=ProjectPatch(name=job.name), - ) - - await wb_api_rpc.mark_project_as_job( - product_name=product_name, - user_id=user_id, - project_uuid=job.id, - job_parent_resource_name=job.runner_name, - ) - - project_inputs = await webserver_api.get_project_inputs(project_id=project.uuid) - - file_param_nodes = {} - for node_id, node in project.workbench.items(): - if ( - node.key == file_picker.META.key - and node.outputs is not None - and len(node.outputs) == 0 - ): - file_param_nodes[node.label] = node_id - - file_inputs: dict[InputID, InputTypes] = {} - - ( - new_project_inputs, - new_project_file_inputs, - ) = get_project_and_file_inputs_from_job_inputs( - project_inputs, file_inputs, job_inputs - ) - - for node_label, file_link in new_project_file_inputs.items(): - await webserver_api.update_node_outputs( - project_id=project.uuid, - node_id=UUID(file_param_nodes[node_label]), - new_node_outputs=NodeOutputs(outputs={"outFile": file_link}), - ) - - if len(new_project_inputs) > 0: - await webserver_api.update_project_inputs( - project_id=project.uuid, new_inputs=new_project_inputs - ) - - assert job.name == _compose_job_resource_name(study_id, job.id) - return job @@ -221,9 +151,7 @@ async def create_study_job( async def get_study_job( study_id: StudyID, job_id: JobID, - study_service: Annotated[StudyService, Depends(get_study_service)], ): - assert study_service # nosec msg = f"get study job study_id={study_id!r} job_id={job_id!r}. SEE https://github.com/ITISFoundation/osparc-simcore/issues/4177" raise NotImplementedError(msg) @@ -239,7 +167,7 @@ async def delete_study_job( webserver_api: Annotated[AuthSession, Depends(get_webserver_session)], ): """Deletes an existing study job""" - job_name = _compose_job_resource_name(study_id, job_id) + job_name = compose_study_job_resource_name(study_id, job_id) with log_context(_logger, logging.DEBUG, f"Deleting Job '{job_name}'"): await webserver_api.delete_project(project_id=job_id) @@ -276,9 +204,7 @@ async def start_study_job( request: Request, study_id: StudyID, job_id: JobID, - user_id: Annotated[PositiveInt, Depends(get_current_user_id)], - webserver_api: Annotated[AuthSession, Depends(get_webserver_session)], - director2_api: Annotated[DirectorV2Api, Depends(get_api_client(DirectorV2Api))], + job_service: Annotated[JobService, Depends(get_job_service)], cluster_id: Annotated[ # pylint: disable=unused-argument # noqa: ARG001 ClusterID | None, Query( @@ -293,31 +219,23 @@ async def start_study_job( ), ] = None, ): - job_name = _compose_job_resource_name(study_id, job_id) + pricing_spec = JobPricingSpecification.create_from_headers(headers=request.headers) + + job_name = compose_study_job_resource_name(study_id, job_id) with log_context(_logger, logging.DEBUG, f"Starting Job '{job_name}'"): try: - await start_project( - request=request, + return await job_service.start_study_job( + study_id=study_id, job_id=job_id, - expected_job_name=job_name, - webserver_api=webserver_api, + pricing_spec=pricing_spec, ) except ProjectAlreadyStartedError: - job_status: JobStatus = await inspect_study_job( - study_id=study_id, + job_status: JobStatus = await job_service.inspect_study_job( job_id=job_id, - user_id=user_id, - director2_api=director2_api, ) return JSONResponse( content=jsonable_encoder(job_status), status_code=status.HTTP_200_OK ) - return await inspect_study_job( - study_id=study_id, - job_id=job_id, - user_id=user_id, - director2_api=director2_api, - ) @router.post( @@ -330,7 +248,7 @@ async def stop_study_job( user_id: Annotated[PositiveInt, Depends(get_current_user_id)], director2_api: Annotated[DirectorV2Api, Depends(get_api_client(DirectorV2Api))], ): - job_name = _compose_job_resource_name(study_id, job_id) + job_name = compose_study_job_resource_name(study_id, job_id) with log_context(_logger, logging.DEBUG, f"Stopping Job '{job_name}'"): return await stop_project( job_id=job_id, user_id=user_id, director2_api=director2_api @@ -344,15 +262,12 @@ async def stop_study_job( async def inspect_study_job( study_id: StudyID, job_id: JobID, - user_id: Annotated[PositiveInt, Depends(get_current_user_id)], - director2_api: Annotated[DirectorV2Api, Depends(get_api_client(DirectorV2Api))], + job_service: Annotated[JobService, Depends(get_job_service)], ) -> JobStatus: - job_name = _compose_job_resource_name(study_id, job_id) + job_name = compose_study_job_resource_name(study_id, job_id) _logger.debug("Inspecting Job '%s'", job_name) - task = await director2_api.get_computation(project_id=job_id, user_id=user_id) - job_status: JobStatus = create_jobstatus_from_task(task) - return job_status + return await job_service.inspect_study_job(job_id=job_id) @router.post( @@ -362,20 +277,13 @@ async def inspect_study_job( async def get_study_job_outputs( study_id: StudyID, job_id: JobID, - user_id: Annotated[PositiveInt, Depends(get_current_user_id)], - webserver_api: Annotated[AuthSession, Depends(get_webserver_session)], - storage_client: Annotated[StorageApi, Depends(get_api_client(StorageApi))], + job_service: Annotated[JobService, Depends(get_job_service)], ): - job_name = _compose_job_resource_name(study_id, job_id) - _logger.debug("Getting Job Outputs for '%s'", job_name) - - project_outputs = await webserver_api.get_project_outputs(project_id=job_id) - job_outputs: JobOutputs = await create_job_outputs_from_project_outputs( - job_id, project_outputs, user_id, storage_client + return await job_service.get_study_job_outputs( + study_id=study_id, + job_id=job_id, ) - return job_outputs - @router.get( "/{study_id}/jobs/{job_id}/outputs/log-links", @@ -413,7 +321,7 @@ async def get_study_job_custom_metadata( webserver_api: Annotated[AuthSession, Depends(get_webserver_session)], url_for: Annotated[Callable, Depends(get_reverse_url_mapper)], ): - job_name = _compose_job_resource_name(study_id, job_id) + job_name = compose_study_job_resource_name(study_id, job_id) msg = f"Gets metadata attached to study_id={study_id!r} job_id={job_id!r}.\njob_name={job_name!r}.\nSEE https://github.com/ITISFoundation/osparc-simcore/issues/4313" _logger.debug(msg) @@ -444,7 +352,7 @@ async def replace_study_job_custom_metadata( webserver_api: Annotated[AuthSession, Depends(get_webserver_session)], url_for: Annotated[Callable, Depends(get_reverse_url_mapper)], ): - job_name = _compose_job_resource_name(study_id, job_id) + job_name = compose_study_job_resource_name(study_id, job_id) msg = f"Attaches metadata={replace.metadata!r} to study_id={study_id!r} job_id={job_id!r}.\njob_name={job_name!r}.\nSEE https://github.com/ITISFoundation/osparc-simcore/issues/4313" _logger.debug(msg) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/tasks.py b/services/api-server/src/simcore_service_api_server/api/routes/tasks.py new file mode 100644 index 000000000000..3ba23a481b16 --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/api/routes/tasks.py @@ -0,0 +1,219 @@ +import logging +from typing import Annotated, Any + +from common_library.error_codes import create_error_code +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs +from fastapi import APIRouter, Depends, FastAPI, HTTPException, status +from models_library.api_schemas_long_running_tasks.base import TaskProgress +from models_library.api_schemas_long_running_tasks.tasks import ( + TaskGet, + TaskResult, + TaskStatus, +) +from models_library.api_schemas_rpc_async_jobs.async_jobs import ( + AsyncJobId, +) +from models_library.products import ProductName +from models_library.users import UserID +from servicelib.celery.models import TaskState, TaskUUID +from servicelib.fastapi.dependencies import get_app + +from ...models.domain.celery_models import ( + ApiServerOwnerMetadata, +) +from ...models.schemas.base import ApiServerEnvelope +from ...models.schemas.errors import ErrorGet +from ..dependencies.authentication import get_current_user_id, get_product_name +from ..dependencies.celery import get_task_manager +from ._constants import ( + FMSG_CHANGELOG_NEW_IN_VERSION, + create_route_description, +) + +router = APIRouter() +_logger = logging.getLogger(__name__) + + +_DEFAULT_TASK_STATUS_CODES: dict[int | str, dict[str, Any]] = { + status.HTTP_500_INTERNAL_SERVER_ERROR: { + "description": "Internal server error", + "model": ErrorGet, + }, +} + + +@router.get( + "", + response_model=ApiServerEnvelope[list[TaskGet]], + responses=_DEFAULT_TASK_STATUS_CODES, + description=create_route_description( + base="List all tasks", + changelog=[ + FMSG_CHANGELOG_NEW_IN_VERSION.format("0.10-rc1"), + ], + ), + include_in_schema=True, +) +async def list_tasks( + app: Annotated[FastAPI, Depends(get_app)], + user_id: Annotated[UserID, Depends(get_current_user_id)], + product_name: Annotated[ProductName, Depends(get_product_name)], +): + task_manager = get_task_manager(app) + owner_metadata = ApiServerOwnerMetadata( + user_id=user_id, + product_name=product_name, + ) + tasks = await task_manager.list_tasks( + owner_metadata=owner_metadata, + ) + + app_router = app.router + data = [ + TaskGet( + task_id=f"{task.uuid}", + task_name=task.metadata.name, + status_href=app_router.url_path_for( + "get_task_status", task_id=f"{task.uuid}" + ), + abort_href=app_router.url_path_for("cancel_task", task_id=f"{task.uuid}"), + result_href=app_router.url_path_for( + "get_task_result", task_id=f"{task.uuid}" + ), + ) + for task in tasks + ] + return ApiServerEnvelope(data=data) + + +@router.get( + "/{task_id}", + response_model=TaskStatus, + responses=_DEFAULT_TASK_STATUS_CODES, + description=create_route_description( + base="Get task status", + changelog=[ + FMSG_CHANGELOG_NEW_IN_VERSION.format("0.10-rc1"), + ], + ), + include_in_schema=True, +) +async def get_task_status( + task_id: AsyncJobId, + app: Annotated[FastAPI, Depends(get_app)], + user_id: Annotated[UserID, Depends(get_current_user_id)], + product_name: Annotated[ProductName, Depends(get_product_name)], +): + task_manager = get_task_manager(app) + owner_metadata = ApiServerOwnerMetadata( + user_id=user_id, + product_name=product_name, + ) + task_status = await task_manager.get_task_status( + owner_metadata=owner_metadata, + task_uuid=TaskUUID(f"{task_id}"), + ) + + return TaskStatus( + task_progress=TaskProgress( + task_id=f"{task_status.task_uuid}", + percent=task_status.progress_report.percent_value, + ), + done=task_status.is_done, + started=None, + ) + + +@router.post( + "/{task_id}:cancel", + status_code=status.HTTP_204_NO_CONTENT, + responses=_DEFAULT_TASK_STATUS_CODES, + description=create_route_description( + base="Cancel task", + changelog=[ + FMSG_CHANGELOG_NEW_IN_VERSION.format("0.10-rc1"), + ], + ), + include_in_schema=True, +) +async def cancel_task( + task_id: AsyncJobId, + app: Annotated[FastAPI, Depends(get_app)], + user_id: Annotated[UserID, Depends(get_current_user_id)], + product_name: Annotated[ProductName, Depends(get_product_name)], +): + task_manager = get_task_manager(app) + owner_metadata = ApiServerOwnerMetadata( + user_id=user_id, + product_name=product_name, + ) + await task_manager.cancel_task( + owner_metadata=owner_metadata, + task_uuid=TaskUUID(f"{task_id}"), + ) + + +@router.get( + "/{task_id}/result", + response_model=TaskResult, + responses={ + status.HTTP_404_NOT_FOUND: { + "description": "Task result not found", + "model": ErrorGet, + }, + **_DEFAULT_TASK_STATUS_CODES, + }, + description=create_route_description( + base="Get task result", + changelog=[ + FMSG_CHANGELOG_NEW_IN_VERSION.format("0.10-rc1"), + ], + ), + include_in_schema=True, +) +async def get_task_result( + task_id: AsyncJobId, + app: Annotated[FastAPI, Depends(get_app)], + user_id: Annotated[UserID, Depends(get_current_user_id)], + product_name: Annotated[ProductName, Depends(get_product_name)], +): + task_manager = get_task_manager(app) + owner_metadata = ApiServerOwnerMetadata( + user_id=user_id, + product_name=product_name, + ) + + task_status = await task_manager.get_task_status( + owner_metadata=owner_metadata, + task_uuid=TaskUUID(f"{task_id}"), + ) + + if not task_status.is_done: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Task result not available yet", + ) + + task_result = await task_manager.get_task_result( + owner_metadata=owner_metadata, + task_uuid=TaskUUID(f"{task_id}"), + ) + + if task_status.task_state == TaskState.FAILURE: + assert isinstance(task_result, Exception) + user_error_msg = f"The execution of task {task_id} failed" + support_id = create_error_code(task_result) + _logger.exception( + **create_troubleshooting_log_kwargs( + user_error_msg, + error=task_result, + error_code=support_id, + tip="Unexpected error in Celery", + ) + ) + raise HTTPException( + status_code=status.HTTP_503_SERVICE_UNAVAILABLE, + detail=user_error_msg, + ) + + return TaskResult(result=task_result, error=None) diff --git a/services/api-server/src/simcore_service_api_server/celery_worker/__init__.py b/services/api-server/src/simcore_service_api_server/celery_worker/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/services/api-server/src/simcore_service_api_server/celery_worker/worker_main.py b/services/api-server/src/simcore_service_api_server/celery_worker/worker_main.py new file mode 100644 index 000000000000..82881b6af698 --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/celery_worker/worker_main.py @@ -0,0 +1,40 @@ +"""Main application to be deployed in for example uvicorn.""" + +from functools import partial + +from celery_library.common import create_app as create_celery_app +from celery_library.signals import ( + on_worker_init, +) +from servicelib.fastapi.celery.app_server import FastAPIAppServer +from servicelib.logging_utils import setup_loggers + +from ..core.application import create_app +from ..core.settings import ApplicationSettings +from .worker_tasks.tasks import setup_worker_tasks + + +def get_app(): + _settings = ApplicationSettings.create_from_envs() + + setup_loggers( + log_format_local_dev_enabled=_settings.API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=_settings.API_SERVER_LOG_FILTER_MAPPING, + tracing_settings=_settings.API_SERVER_TRACING, + log_base_level=_settings.log_level, + noisy_loggers=None, + ) + + assert _settings.API_SERVER_CELERY # nosec + app = create_celery_app(_settings.API_SERVER_CELERY) + setup_worker_tasks(app) + + return app + + +def worker_init_wrapper(sender, **_kwargs): + _settings = ApplicationSettings.create_from_envs() + assert _settings.API_SERVER_CELERY # nosec + app_server = FastAPIAppServer(app=create_app(_settings)) + + return partial(on_worker_init, app_server=app_server)(sender, **_kwargs) diff --git a/services/api-server/src/simcore_service_api_server/celery_worker/worker_tasks/__init__.py b/services/api-server/src/simcore_service_api_server/celery_worker/worker_tasks/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/services/api-server/src/simcore_service_api_server/celery_worker/worker_tasks/functions_tasks.py b/services/api-server/src/simcore_service_api_server/celery_worker/worker_tasks/functions_tasks.py new file mode 100644 index 000000000000..fdfc8f1382b1 --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/celery_worker/worker_tasks/functions_tasks.py @@ -0,0 +1,129 @@ +from celery import ( # type: ignore[import-untyped] # pylint: disable=no-name-in-module + Task, +) +from celery_library.utils import get_app_server # pylint: disable=no-name-in-module +from fastapi import FastAPI +from models_library.functions import RegisteredFunction, RegisteredFunctionJob +from models_library.projects import ProjectID +from models_library.projects_nodes_io import NodeID +from servicelib.celery.models import TaskID +from simcore_service_api_server._service_function_jobs import FunctionJobService + +from ...api.dependencies.authentication import Identity +from ...api.dependencies.rabbitmq import get_rabbitmq_rpc_client +from ...api.dependencies.services import ( + get_catalog_service, + get_directorv2_service, + get_function_job_service, + get_function_service, + get_job_service, + get_solver_service, + get_storage_service, +) +from ...api.dependencies.webserver_http import get_session_cookie, get_webserver_session +from ...api.dependencies.webserver_rpc import get_wb_api_rpc_client +from ...models.api_resources import JobLinks +from ...models.domain.functions import PreRegisteredFunctionJobData +from ...models.schemas.jobs import JobPricingSpecification +from ...services_http.director_v2 import DirectorV2Api +from ...services_http.storage import StorageApi + + +async def _assemble_function_job_service( + *, + app: FastAPI, + user_identity: Identity, +) -> FunctionJobService: + # This should ideally be done by a dependency injection system (like it is done in the api-server). + # However, for that we would need to introduce a dependency injection system which is not coupled to, + # but compatible with FastAPI's Depends. One suggestion: https://github.com/ets-labs/python-dependency-injector. + # See also https://github.com/fastapi/fastapi/issues/1105#issuecomment-609919850. + settings = app.state.settings + assert settings.API_SERVER_WEBSERVER # nosec + session_cookie = get_session_cookie( + identity=user_identity.email, settings=settings.API_SERVER_WEBSERVER, app=app + ) + + rpc_client = get_rabbitmq_rpc_client(app=app) + web_server_rest_client = get_webserver_session( + app=app, session_cookies=session_cookie, identity=user_identity + ) + web_api_rpc_client = await get_wb_api_rpc_client(app=app) + director2_api = DirectorV2Api.get_instance(app=app) + assert isinstance(director2_api, DirectorV2Api) # nosec + storage_api = StorageApi.get_instance(app=app) + assert isinstance(storage_api, StorageApi) # nosec + catalog_service = get_catalog_service( + rpc_client=rpc_client, + user_id=user_identity.user_id, + product_name=user_identity.product_name, + ) + + storage_service = get_storage_service( + rpc_client=rpc_client, + user_id=user_identity.user_id, + product_name=user_identity.product_name, + ) + directorv2_service = get_directorv2_service(rpc_client=rpc_client) + + solver_service = get_solver_service( + catalog_service=catalog_service, + user_id=user_identity.user_id, + product_name=user_identity.product_name, + ) + + function_service = get_function_service( + web_rpc_api=web_api_rpc_client, + user_id=user_identity.user_id, + product_name=user_identity.product_name, + ) + + job_service = get_job_service( + web_rest_api=web_server_rest_client, + director2_api=director2_api, + storage_api=storage_api, + web_rpc_api=web_api_rpc_client, + storage_service=storage_service, + directorv2_service=directorv2_service, + user_id=user_identity.user_id, + product_name=user_identity.product_name, + solver_service=solver_service, + ) + + return get_function_job_service( + web_rpc_api=web_api_rpc_client, + job_service=job_service, + user_id=user_identity.user_id, + product_name=user_identity.product_name, + function_service=function_service, + webserver_api=web_server_rest_client, + storage_service=storage_service, + ) + + +async def run_function( + task: Task, + task_id: TaskID, + *, + user_identity: Identity, + function: RegisteredFunction, + pre_registered_function_job_data: PreRegisteredFunctionJobData, + pricing_spec: JobPricingSpecification | None, + job_links: JobLinks, + x_simcore_parent_project_uuid: ProjectID | None, + x_simcore_parent_node_id: NodeID | None, +) -> RegisteredFunctionJob: + assert task_id # nosec + app = get_app_server(task.app).app + function_job_service = await _assemble_function_job_service( + app=app, user_identity=user_identity + ) + + return await function_job_service.run_function( + function=function, + pre_registered_function_job_data=pre_registered_function_job_data, + pricing_spec=pricing_spec, + job_links=job_links, + x_simcore_parent_project_uuid=x_simcore_parent_project_uuid, + x_simcore_parent_node_id=x_simcore_parent_node_id, + ) diff --git a/services/api-server/src/simcore_service_api_server/celery_worker/worker_tasks/tasks.py b/services/api-server/src/simcore_service_api_server/celery_worker/worker_tasks/tasks.py new file mode 100644 index 000000000000..cef6ad06d18c --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/celery_worker/worker_tasks/tasks.py @@ -0,0 +1,21 @@ +import logging + +from celery import ( # type: ignore[import-untyped] # pylint: disable=no-name-in-module + Celery, +) +from celery_library.task import register_task +from celery_library.types import register_celery_types, register_pydantic_types +from servicelib.logging_utils import log_context + +from ...models.domain.celery_models import pydantic_types_to_register +from .functions_tasks import run_function + +_logger = logging.getLogger(__name__) + + +def setup_worker_tasks(app: Celery) -> None: + register_celery_types() + register_pydantic_types(*pydantic_types_to_register) + + with log_context(_logger, logging.INFO, msg="worker task registration"): + register_task(app, run_function) diff --git a/services/api-server/src/simcore_service_api_server/cli.py b/services/api-server/src/simcore_service_api_server/cli.py index 11427f61d4ea..e4ead859f314 100644 --- a/services/api-server/src/simcore_service_api_server/cli.py +++ b/services/api-server/src/simcore_service_api_server/cli.py @@ -18,6 +18,6 @@ def run(): """Runs application""" typer.secho("Sorry, this entrypoint is intentionally disabled. Use instead") typer.secho( - "$ uvicorn simcore_service_api_server.main:the_app", + "$ uvicorn --factory simcore_service_api_server.main:app_factory", fg=typer.colors.BLUE, ) diff --git a/services/api-server/src/simcore_service_api_server/clients/celery_task_manager.py b/services/api-server/src/simcore_service_api_server/clients/celery_task_manager.py new file mode 100644 index 000000000000..a8d2cb13b554 --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/clients/celery_task_manager.py @@ -0,0 +1,48 @@ +import logging + +from celery_library.backends.redis import RedisTaskInfoStore +from celery_library.common import create_app +from celery_library.task_manager import CeleryTaskManager +from celery_library.types import register_celery_types, register_pydantic_types +from fastapi import FastAPI +from servicelib.logging_utils import log_context +from servicelib.redis import RedisClientSDK +from settings_library.celery import CelerySettings +from settings_library.redis import RedisDatabase + +from ..models.domain.celery_models import pydantic_types_to_register + +_logger = logging.getLogger(__name__) + + +def setup_task_manager(app: FastAPI, settings: CelerySettings) -> None: + async def on_startup() -> None: + with log_context(_logger, logging.INFO, "Setting up Celery"): + redis_client_sdk = RedisClientSDK( + settings.CELERY_REDIS_RESULT_BACKEND.build_redis_dsn( + RedisDatabase.CELERY_TASKS + ), + client_name="api_server_celery_tasks", + ) + app.state.celery_tasks_redis_client_sdk = redis_client_sdk + await redis_client_sdk.setup() + + app.state.task_manager = CeleryTaskManager( + create_app(settings), + settings, + RedisTaskInfoStore(redis_client_sdk), + ) + + register_celery_types() + register_pydantic_types(*pydantic_types_to_register) + + async def on_shutdown() -> None: + with log_context(_logger, logging.INFO, "Shutting down Celery"): + redis_client_sdk: RedisClientSDK | None = ( + app.state.celery_tasks_redis_client_sdk + ) + if redis_client_sdk: + await redis_client_sdk.shutdown() + + app.add_event_handler("startup", on_startup) + app.add_event_handler("shutdown", on_shutdown) diff --git a/services/api-server/src/simcore_service_api_server/clients/postgres.py b/services/api-server/src/simcore_service_api_server/clients/postgres.py index 4f337bd82d6d..0b8e9542e8d3 100644 --- a/services/api-server/src/simcore_service_api_server/clients/postgres.py +++ b/services/api-server/src/simcore_service_api_server/clients/postgres.py @@ -3,13 +3,13 @@ from servicelib.fastapi.lifespan_utils import LifespanOnStartupError from sqlalchemy.ext.asyncio import AsyncEngine +from .._meta import APP_NAME from ..core.settings import ApplicationSettings class PostgresNotConfiguredError(LifespanOnStartupError): msg_template = LifespanOnStartupError.msg_template + ( - "Postgres settings are not configured. " - "Please check your application settings. " + "Postgres settings are not configured. Please check your application settings. " ) @@ -30,7 +30,9 @@ async def _on_startup() -> None: settings=settings, ) - await connect_to_db(app, settings.API_SERVER_POSTGRES) + await connect_to_db( + app, settings.API_SERVER_POSTGRES, application_name=APP_NAME + ) assert app.state.engine # nosec assert isinstance(app.state.engine, AsyncEngine) # nosec diff --git a/services/api-server/src/simcore_service_api_server/core/_prometheus_instrumentation.py b/services/api-server/src/simcore_service_api_server/core/_prometheus_instrumentation.py index f19bac34a761..68bef9b369b5 100644 --- a/services/api-server/src/simcore_service_api_server/core/_prometheus_instrumentation.py +++ b/services/api-server/src/simcore_service_api_server/core/_prometheus_instrumentation.py @@ -4,10 +4,10 @@ from datetime import timedelta from typing import Final, cast +from common_library.async_tools import cancel_wait_task from fastapi import FastAPI from prometheus_client import CollectorRegistry, Gauge from pydantic import PositiveInt -from servicelib.async_utils import cancel_wait_task from servicelib.background_task import create_periodic_task from servicelib.fastapi.monitoring import ( setup_prometheus_instrumentation as setup_rest_instrumentation, diff --git a/services/api-server/src/simcore_service_api_server/core/application.py b/services/api-server/src/simcore_service_api_server/core/application.py index 44c5b5fc1296..32a5f6df6415 100644 --- a/services/api-server/src/simcore_service_api_server/core/application.py +++ b/services/api-server/src/simcore_service_api_server/core/application.py @@ -1,5 +1,6 @@ import logging +from common_library.json_serialization import json_dumps from fastapi import FastAPI from fastapi_pagination import add_pagination from models_library.basic_types import BootModeEnum @@ -9,12 +10,12 @@ initialize_fastapi_app_tracing, setup_tracing, ) -from servicelib.logging_utils import config_all_loggers from .. import exceptions from .._meta import API_VERSION, API_VTAG, APP_NAME from ..api.root import create_router from ..api.routes.health import router as health_router +from ..clients.celery_task_manager import setup_task_manager from ..clients.postgres import setup_postgres from ..services_http import director_v2, storage, webserver from ..services_http.rabbitmq import setup_rabbitmq @@ -48,20 +49,15 @@ def _label_title_and_version(settings: ApplicationSettings, title: str, version: return title, version -def init_app(settings: ApplicationSettings | None = None) -> FastAPI: +def create_app(settings: ApplicationSettings | None = None) -> FastAPI: if settings is None: settings = ApplicationSettings.create_from_envs() + _logger.info( + "Application settings: %s", + json_dumps(settings, indent=2, sort_keys=True), + ) assert settings # nosec - logging.basicConfig(level=settings.log_level) - logging.root.setLevel(settings.log_level) - config_all_loggers( - log_format_local_dev_enabled=settings.API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=settings.API_SERVER_LOG_FILTER_MAPPING, - tracing_settings=settings.API_SERVER_TRACING, - ) - _logger.debug("App settings:\n%s", settings.model_dump_json(indent=2)) - # Labeling title = "osparc.io public API" version = API_VERSION # public version identifier @@ -93,11 +89,14 @@ def init_app(settings: ApplicationSettings | None = None) -> FastAPI: setup_rabbitmq(app) + if settings.API_SERVER_CELERY: + setup_task_manager(app, settings.API_SERVER_CELERY) + if app.state.settings.API_SERVER_PROMETHEUS_INSTRUMENTATION_ENABLED: setup_prometheus_instrumentation(app) if settings.API_SERVER_TRACING: - initialize_fastapi_app_tracing(app) + initialize_fastapi_app_tracing(app, add_response_trace_id_header=True) if settings.API_SERVER_WEBSERVER: webserver.setup( diff --git a/services/api-server/src/simcore_service_api_server/core/health_checker.py b/services/api-server/src/simcore_service_api_server/core/health_checker.py index b5a5180b12b9..1e882569f397 100644 --- a/services/api-server/src/simcore_service_api_server/core/health_checker.py +++ b/services/api-server/src/simcore_service_api_server/core/health_checker.py @@ -5,11 +5,11 @@ from typing import Annotated, Final, cast from uuid import uuid4 +from common_library.async_tools import cancel_wait_task from fastapi import Depends, FastAPI from models_library.rabbitmq_messages import LoggerRabbitMessage from models_library.users import UserID from pydantic import NonNegativeInt, PositiveFloat, PositiveInt -from servicelib.async_utils import cancel_wait_task from servicelib.background_task import create_periodic_task from servicelib.fastapi.dependencies import get_app from servicelib.logging_utils import log_catch @@ -95,7 +95,7 @@ async def _background_task_method(self): self._dummy_queue.get(), timeout=self._timeout_seconds ) self._health_check_failure_count = 0 - except asyncio.TimeoutError: + except TimeoutError: self._increment_health_check_failure_count() diff --git a/services/api-server/src/simcore_service_api_server/core/settings.py b/services/api-server/src/simcore_service_api_server/core/settings.py index cf734ed1040d..79fa5b39a054 100644 --- a/services/api-server/src/simcore_service_api_server/core/settings.py +++ b/services/api-server/src/simcore_service_api_server/core/settings.py @@ -2,7 +2,9 @@ from typing import Annotated from common_library.basic_types import DEFAULT_FACTORY +from common_library.logging.logging_utils_filtering import LoggerName, MessageSubstring from models_library.basic_types import BootModeEnum, LogLevel +from models_library.rabbitmq_basic_types import RPCNamespace from pydantic import ( AliasChoices, Field, @@ -11,8 +13,8 @@ SecretStr, field_validator, ) -from servicelib.logging_utils_filtering import LoggerName, MessageSubstring from settings_library.base import BaseCustomSettings +from settings_library.celery import CelerySettings from settings_library.director_v2 import DirectorV2Settings from settings_library.postgres import PostgresSettings from settings_library.rabbit import RabbitSettings @@ -27,7 +29,6 @@ class WebServerSettings(WebServerBaseSettings, MixinSessionSettings): - WEBSERVER_SESSION_SECRET_KEY: Annotated[ SecretStr, Field( @@ -35,7 +36,7 @@ class WebServerSettings(WebServerBaseSettings, MixinSessionSettings): 'TIP: python3 -c "from cryptography.fernet import *; print(Fernet.generate_key())"', min_length=44, validation_alias=AliasChoices( - "SESSION_SECRET_KEY", "WEBSERVER_SESSION_SECRET_KEY" + "WEBSERVER_SESSION_SECRET_KEY", "SESSION_SECRET_KEY" ), ), ] @@ -65,9 +66,7 @@ class BasicSettings(BaseCustomSettings, MixinLoggingSettings): LOG_LEVEL: Annotated[ LogLevel, Field( - validation_alias=AliasChoices( - "API_SERVER_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL" - ), + validation_alias=AliasChoices("API_SERVER_LOGLEVEL", "LOGLEVEL"), ), ] = LogLevel.INFO @@ -104,6 +103,10 @@ class ApplicationSettings(BasicSettings): # DOCKER BOOT SC_BOOT_MODE: BootModeEnum | None = None + API_SERVER_CELERY: Annotated[ + CelerySettings | None, Field(json_schema_extra={"auto_default_from_env": True}) + ] = None + API_SERVER_POSTGRES: Annotated[ PostgresSettings | None, Field(json_schema_extra={"auto_default_from_env": True}), @@ -129,6 +132,7 @@ class ApplicationSettings(BasicSettings): DirectorV2Settings | None, Field(json_schema_extra={"auto_default_from_env": True}), ] + API_SERVER_LOG_CHECK_TIMEOUT_SECONDS: NonNegativeInt = 3 * 60 API_SERVER_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True API_SERVER_HEALTH_CHECK_TASK_PERIOD_SECONDS: PositiveInt = 30 @@ -144,6 +148,15 @@ class ApplicationSettings(BasicSettings): ), ] + API_SERVER_WEBSERVER_RPC_NAMESPACE: Annotated[ + RPCNamespace, + Field(description="Namespace to connect to correct webserver's RPC interface"), + ] + + API_SERVER_WORKER_MODE: Annotated[ + bool, Field(description="If True, the API server runs in worker mode") + ] = False + @cached_property def debug(self) -> bool: """If True, debug tracebacks should be returned on errors.""" diff --git a/services/api-server/src/simcore_service_api_server/exceptions/backend_errors.py b/services/api-server/src/simcore_service_api_server/exceptions/backend_errors.py index bde18b2dbb58..5257bfad700c 100644 --- a/services/api-server/src/simcore_service_api_server/exceptions/backend_errors.py +++ b/services/api-server/src/simcore_service_api_server/exceptions/backend_errors.py @@ -8,6 +8,7 @@ class BaseBackEndError(ApiServerBaseError): """status_code: the default return status which will be returned to the client calling the api-server (in case this exception is raised)""" + msg_template = "The api-server encountered an error when contacting the backend" status_code = status.HTTP_502_BAD_GATEWAY @classmethod @@ -17,6 +18,11 @@ def named_fields(cls) -> set[str]: ) +class BackendTimeoutError(BaseBackEndError): + msg_template = "Backend request timed out" + status_code = status.HTTP_504_GATEWAY_TIMEOUT + + class InvalidInputError(BaseBackEndError): msg_template = "Invalid input" status_code = status.HTTP_422_UNPROCESSABLE_ENTITY @@ -52,6 +58,11 @@ class ServiceForbiddenAccessError(BaseBackEndError): status_code = status.HTTP_403_FORBIDDEN +class JobForbiddenAccessError(BaseBackEndError): + msg_template = "Forbidden access to job {project_id}" + status_code = status.HTTP_403_FORBIDDEN + + class JobNotFoundError(BaseBackEndError): msg_template = "Could not get solver/study job {project_id}" status_code = status.HTTP_404_NOT_FOUND @@ -130,3 +141,8 @@ class CanNotCheckoutServiceIsNotRunningError(BaseBackEndError): class LicensedItemCheckoutNotFoundError(BaseBackEndError): msg_template = "Licensed item checkout {licensed_item_checkout_id} not found." status_code = status.HTTP_404_NOT_FOUND + + +class JobAssetsMissingError(BaseBackEndError): + msg_template = "Job assets missing for job {job_id}" + status_code = status.HTTP_409_CONFLICT diff --git a/services/api-server/src/simcore_service_api_server/exceptions/function_errors.py b/services/api-server/src/simcore_service_api_server/exceptions/function_errors.py new file mode 100644 index 000000000000..28d44a435563 --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/exceptions/function_errors.py @@ -0,0 +1,17 @@ +from fastapi import status + +from .backend_errors import BaseBackEndError + + +class BaseFunctionBackendError(BaseBackEndError): + pass + + +class FunctionJobCacheNotFoundError(BaseBackEndError): + msg_template: str = "No cached function job found." + status_code: int = 404 # Not Found + + +class FunctionJobProjectMissingError(BaseBackEndError): + msg_template: str = "Could not process function job" + status_code: int = status.HTTP_500_INTERNAL_SERVER_ERROR # Not Found diff --git a/services/api-server/src/simcore_service_api_server/exceptions/handlers/__init__.py b/services/api-server/src/simcore_service_api_server/exceptions/handlers/__init__.py index 91c4e0d9ccf9..03e1533a7d42 100644 --- a/services/api-server/src/simcore_service_api_server/exceptions/handlers/__init__.py +++ b/services/api-server/src/simcore_service_api_server/exceptions/handlers/__init__.py @@ -1,14 +1,23 @@ +from celery.exceptions import ( # type: ignore[import-untyped] #pylint: disable=no-name-in-module + CeleryError, +) +from celery_library.errors import TaskNotFoundError from fastapi import FastAPI from fastapi.exceptions import RequestValidationError from httpx import HTTPError as HttpxException +from models_library.functions_errors import FunctionBaseError from starlette import status from starlette.exceptions import HTTPException -from ..._constants import MSG_INTERNAL_ERROR_USER_FRIENDLY_TEMPLATE +from ..._constants import ( + MSG_CLIENT_ERROR_USER_FRIENDLY_TEMPLATE, + MSG_INTERNAL_ERROR_USER_FRIENDLY_TEMPLATE, +) from ...exceptions.backend_errors import BaseBackEndError from ..custom_errors import CustomBaseError from ..log_streaming_errors import LogStreamingBaseError from ._custom_errors import custom_error_handler +from ._handler_function_errors import function_error_handler from ._handlers_backend_errors import backend_error_handler from ._handlers_factory import make_handler_for_exception from ._http_exceptions import http_exception_handler @@ -24,6 +33,7 @@ def setup(app: FastAPI, *, is_debug: bool = False): app.add_exception_handler(LogStreamingBaseError, log_handling_error_handler) app.add_exception_handler(CustomBaseError, custom_error_handler) app.add_exception_handler(BaseBackEndError, backend_error_handler) + app.add_exception_handler(FunctionBaseError, function_error_handler) # SEE https://docs.python.org/3/library/exceptions.html#exception-hierarchy app.add_exception_handler( @@ -34,6 +44,28 @@ def setup(app: FastAPI, *, is_debug: bool = False): error_message="This endpoint is still not implemented (under development)", ), ) + + app.add_exception_handler( + TaskNotFoundError, + make_handler_for_exception( + TaskNotFoundError, + status.HTTP_404_NOT_FOUND, + error_message=MSG_CLIENT_ERROR_USER_FRIENDLY_TEMPLATE, + add_exception_to_message=True, + ), + ) + + app.add_exception_handler( + CeleryError, + make_handler_for_exception( + CeleryError, + status.HTTP_503_SERVICE_UNAVAILABLE, + error_message=MSG_INTERNAL_ERROR_USER_FRIENDLY_TEMPLATE, + add_exception_to_message=is_debug, + add_oec_to_message=True, + ), + ) + app.add_exception_handler( Exception, make_handler_for_exception( diff --git a/services/api-server/src/simcore_service_api_server/exceptions/handlers/_handler_function_errors.py b/services/api-server/src/simcore_service_api_server/exceptions/handlers/_handler_function_errors.py new file mode 100644 index 000000000000..0ce8a579dbef --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/exceptions/handlers/_handler_function_errors.py @@ -0,0 +1,11 @@ +from fastapi import Request +from models_library.functions_errors import FunctionBaseError + +from ._utils import create_error_json_response + + +async def function_error_handler(request: Request, exc: Exception): + assert request # nosec + assert isinstance(exc, FunctionBaseError) + + return create_error_json_response(f"{exc}", status_code=exc.status_code) diff --git a/services/api-server/src/simcore_service_api_server/exceptions/handlers/_handlers_backend_errors.py b/services/api-server/src/simcore_service_api_server/exceptions/handlers/_handlers_backend_errors.py index ca335deceacc..619d52c122ad 100644 --- a/services/api-server/src/simcore_service_api_server/exceptions/handlers/_handlers_backend_errors.py +++ b/services/api-server/src/simcore_service_api_server/exceptions/handlers/_handlers_backend_errors.py @@ -1,12 +1,32 @@ +import logging + +from common_library.error_codes import create_error_code +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs +from servicelib.status_codes_utils import is_5xx_server_error from starlette.requests import Request from starlette.responses import JSONResponse from ...exceptions.backend_errors import BaseBackEndError from ._utils import create_error_json_response +_logger = logging.getLogger(__name__) + async def backend_error_handler(request: Request, exc: Exception) -> JSONResponse: assert request # nosec assert isinstance(exc, BaseBackEndError) - - return create_error_json_response(f"{exc}", status_code=exc.status_code) + user_error_msg = f"{exc}" + support_id = None + if is_5xx_server_error(exc.status_code): + support_id = create_error_code(exc) + _logger.exception( + **create_troubleshooting_log_kwargs( + user_error_msg, + error=exc, + error_code=support_id, + tip="Unexpected error", + ) + ) + return create_error_json_response( + user_error_msg, status_code=exc.status_code, support_id=support_id + ) diff --git a/services/api-server/src/simcore_service_api_server/exceptions/handlers/_handlers_factory.py b/services/api-server/src/simcore_service_api_server/exceptions/handlers/_handlers_factory.py index fe2befdce63c..0af52ea3671c 100644 --- a/services/api-server/src/simcore_service_api_server/exceptions/handlers/_handlers_factory.py +++ b/services/api-server/src/simcore_service_api_server/exceptions/handlers/_handlers_factory.py @@ -1,9 +1,9 @@ import logging from common_library.error_codes import create_error_code +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from fastapi.requests import Request from fastapi.responses import JSONResponse -from servicelib.logging_errors import create_troubleshotting_log_kwargs from ._utils import ExceptionHandler, create_error_json_response @@ -40,7 +40,7 @@ async def _http_error_handler( user_error_msg += f" [{error_code}]" _logger.exception( - **create_troubleshotting_log_kwargs( + **create_troubleshooting_log_kwargs( user_error_msg, error=exception, error_code=error_code, diff --git a/services/api-server/src/simcore_service_api_server/exceptions/handlers/_utils.py b/services/api-server/src/simcore_service_api_server/exceptions/handlers/_utils.py index da741fdb8b45..2c0024476cc9 100644 --- a/services/api-server/src/simcore_service_api_server/exceptions/handlers/_utils.py +++ b/services/api-server/src/simcore_service_api_server/exceptions/handlers/_utils.py @@ -1,6 +1,7 @@ from collections.abc import Awaitable, Callable from typing import Any, TypeAlias +from common_library.error_codes import ErrorCodeStr from fastapi.encoders import jsonable_encoder from fastapi.requests import Request from fastapi.responses import JSONResponse @@ -13,13 +14,13 @@ def create_error_json_response( - *errors: Any, status_code: int, **kwargs + *errors: Any, status_code: int, support_id: ErrorCodeStr | None = None, **kwargs ) -> JSONResponse: """ Converts errors to Error response model defined in the OAS """ - error_model = ErrorGet(errors=list(errors)) + error_model = ErrorGet(errors=list(errors), support_id=support_id, **kwargs) return JSONResponse( content=jsonable_encoder(error_model), status_code=status_code, diff --git a/services/api-server/src/simcore_service_api_server/exceptions/task_errors.py b/services/api-server/src/simcore_service_api_server/exceptions/task_errors.py new file mode 100644 index 000000000000..a12282404823 --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/exceptions/task_errors.py @@ -0,0 +1,28 @@ +from fastapi import status + +from .backend_errors import BaseBackEndError + + +class TaskSchedulerError(BaseBackEndError): + msg_template: str = "TaskScheduler error" + status_code = status.HTTP_500_INTERNAL_SERVER_ERROR + + +class TaskMissingError(BaseBackEndError): + msg_template: str = "Task {job_id} does not exist" + status_code = status.HTTP_404_NOT_FOUND + + +class TaskResultMissingError(BaseBackEndError): + msg_template: str = "Task {job_id} is not done" + status_code = status.HTTP_404_NOT_FOUND + + +class TaskCancelledError(BaseBackEndError): + msg_template: str = "Task {job_id} is cancelled" + status_code = status.HTTP_409_CONFLICT + + +class TaskError(BaseBackEndError): + msg_template: str = "Task '{job_id}' failed" + status_code = status.HTTP_500_INTERNAL_SERVER_ERROR diff --git a/services/api-server/src/simcore_service_api_server/main.py b/services/api-server/src/simcore_service_api_server/main.py index 8b636ac43150..6b8ccc1783f1 100644 --- a/services/api-server/src/simcore_service_api_server/main.py +++ b/services/api-server/src/simcore_service_api_server/main.py @@ -1,7 +1,38 @@ -"""Main application to be deployed in for example uvicorn. -""" +"""Main application to be deployed in for example uvicorn.""" + +import logging +from typing import Final + +from common_library.json_serialization import json_dumps from fastapi import FastAPI -from simcore_service_api_server.core.application import init_app +from servicelib.fastapi.logging_lifespan import create_logging_shutdown_event +from simcore_service_api_server.core.application import create_app +from simcore_service_api_server.core.settings import ApplicationSettings + +_logger = logging.getLogger(__name__) + +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aio_pika", + "aiormq", + "httpcore", + "httpx", +) + + +def app_factory() -> FastAPI: + app_settings = ApplicationSettings.create_from_envs() + logging_shutdown_event = create_logging_shutdown_event( + log_format_local_dev_enabled=app_settings.API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.API_SERVER_LOG_FILTER_MAPPING, + tracing_settings=app_settings.API_SERVER_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) -# SINGLETON FastAPI app -the_app: FastAPI = init_app() + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + app = create_app(settings=app_settings) + app.add_event_handler("shutdown", logging_shutdown_event) + return app diff --git a/services/api-server/src/simcore_service_api_server/models/api_resources.py b/services/api-server/src/simcore_service_api_server/models/api_resources.py index 939012bbf571..f796eb0af86f 100644 --- a/services/api-server/src/simcore_service_api_server/models/api_resources.py +++ b/services/api-server/src/simcore_service_api_server/models/api_resources.py @@ -1,8 +1,10 @@ import re import urllib.parse from typing import Annotated, TypeAlias +from uuid import UUID -from pydantic import Field, TypeAdapter +import parse # type: ignore[import-untyped] +from pydantic import AfterValidator, BaseModel, ConfigDict, Field, HttpUrl, TypeAdapter from pydantic.types import StringConstraints # RESOURCE NAMES https://google.aip.dev/122 @@ -26,7 +28,6 @@ # SEE https://tools.ietf.org/html/rfc3986#appendix-B # - _RELATIVE_RESOURCE_NAME_RE = r"^([^\s/]+/?){1,10}$" @@ -91,3 +92,55 @@ def split_resource_name_as_dict( """ parts = split_resource_name(resource_name) return dict(zip(parts[::2], parts[1::2], strict=False)) + + +def _url_missing_only_job_id(url: str | None) -> str | None: + if url is None: + return None + if set(parse.compile(url).named_fields) != {"job_id"}: + raise ValueError(f"Missing job_id in {url=}") + return url + + +class JobLinks(BaseModel): + @staticmethod + def _update_json_schema_extra(schema: dict) -> None: + schema.update( + { + "examples": [ + { + "url_template": "https://api.osparc.io/v0/jobs/{job_id}", + "runner_url_template": "https://runner.osparc.io/dashboard", + "outputs_url_template": "https://api.osparc.io/v0/jobs/{job_id}/outputs", + } + ] + } + ) + + model_config = ConfigDict(json_schema_extra=_update_json_schema_extra) + + url_template: Annotated[str | None, AfterValidator(_url_missing_only_job_id)] + runner_url_template: str | None + outputs_url_template: Annotated[ + str | None, AfterValidator(_url_missing_only_job_id) + ] + + def url(self, job_id: UUID) -> HttpUrl | None: + if self.url_template is None: + return None + return TypeAdapter(HttpUrl).validate_python( + self.url_template.format(job_id=job_id) + ) + + def runner_url(self, job_id: UUID) -> HttpUrl | None: + assert job_id # nosec + if self.runner_url_template is None: + return None + return TypeAdapter(HttpUrl).validate_python(self.runner_url_template) + + def outputs_url(self, job_id: UUID) -> HttpUrl | None: + if self.outputs_url_template is None: + return None + return TypeAdapter(HttpUrl).validate_python( + self.outputs_url_template.format(job_id=job_id) + ) diff --git a/services/api-server/src/simcore_service_api_server/models/domain/celery_models.py b/services/api-server/src/simcore_service_api_server/models/domain/celery_models.py new file mode 100644 index 000000000000..520f97949ec0 --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/models/domain/celery_models.py @@ -0,0 +1,41 @@ +from typing import Annotated + +from models_library.functions import ( + RegisteredProjectFunction, + RegisteredProjectFunctionJob, + RegisteredPythonCodeFunction, + RegisteredSolverFunction, + RegisteredSolverFunctionJob, +) +from models_library.products import ProductName +from models_library.users import UserID +from pydantic import Field, StringConstraints +from servicelib.celery.models import OwnerMetadata + +from ..._meta import APP_NAME +from ...api.dependencies.authentication import Identity +from ...models.api_resources import JobLinks +from ...models.domain.functions import PreRegisteredFunctionJobData +from ...models.schemas.jobs import JobInputs, JobPricingSpecification + +pydantic_types_to_register = ( + Identity, + JobInputs, + JobLinks, + JobPricingSpecification, + PreRegisteredFunctionJobData, + RegisteredProjectFunction, + RegisteredProjectFunctionJob, + RegisteredPythonCodeFunction, + RegisteredProjectFunctionJob, + RegisteredSolverFunction, + RegisteredSolverFunctionJob, +) + + +class ApiServerOwnerMetadata(OwnerMetadata): + user_id: UserID + product_name: ProductName + owner: Annotated[ + str, StringConstraints(pattern=rf"^{APP_NAME}$"), Field(frozen=True) + ] = APP_NAME diff --git a/services/api-server/src/simcore_service_api_server/models/domain/functions.py b/services/api-server/src/simcore_service_api_server/models/domain/functions.py new file mode 100644 index 000000000000..1f75f7441f29 --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/models/domain/functions.py @@ -0,0 +1,22 @@ +from models_library.functions import ( + FunctionJobID, + RegisteredFunctionJob, + RegisteredFunctionJobWithStatus, +) +from pydantic import BaseModel + +from ...models.pagination import Page +from ...models.schemas.jobs import JobInputs + + +class PreRegisteredFunctionJobData(BaseModel): + function_job_id: FunctionJobID + job_inputs: JobInputs + + +class PageRegisteredFunctionJobWithorWithoutStatus( + Page[RegisteredFunctionJobWithStatus | RegisteredFunctionJob] # order is important +): + # This class is created specifically to provide a name for this in openapi.json. + # When using an alias the python-client generates too long file name + pass diff --git a/services/api-server/src/simcore_service_api_server/models/domain/projects.py b/services/api-server/src/simcore_service_api_server/models/domain/projects.py index ae74533546b4..287bf985aa41 100644 --- a/services/api-server/src/simcore_service_api_server/models/domain/projects.py +++ b/services/api-server/src/simcore_service_api_server/models/domain/projects.py @@ -4,12 +4,6 @@ from models_library.projects_nodes import InputTypes, Node, OutputTypes from models_library.projects_nodes_io import SimCoreFileLink -assert AccessRights # nosec -assert InputTypes # nosec -assert Node # nosec -assert OutputTypes # nosec -assert SimCoreFileLink # nosec - __all__: tuple[str, ...] = ( "AccessRights", "InputTypes", @@ -17,3 +11,5 @@ "OutputTypes", "SimCoreFileLink", ) + +# nopycln: file diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/_base.py b/services/api-server/src/simcore_service_api_server/models/schemas/base.py similarity index 93% rename from services/api-server/src/simcore_service_api_server/models/schemas/_base.py rename to services/api-server/src/simcore_service_api_server/models/schemas/base.py index 07144ba5b766..6abe72ca9f14 100644 --- a/services/api-server/src/simcore_service_api_server/models/schemas/_base.py +++ b/services/api-server/src/simcore_service_api_server/models/schemas/base.py @@ -1,12 +1,12 @@ import urllib.parse -from typing import Annotated +from typing import Annotated, Generic, TypeVar import packaging.version from models_library.utils.change_case import camel_to_snake from models_library.utils.common_validators import trim_string_before from pydantic import BaseModel, ConfigDict, Field, HttpUrl, StringConstraints -from ...models._utils_pydantic import UriSchema +from .._utils_pydantic import UriSchema from ..basic_types import VersionStr @@ -83,3 +83,10 @@ def name(self) -> str: @classmethod def compose_resource_name(cls, key: str, version: str) -> str: raise NotImplementedError("Subclasses must implement this method") + + +DataT = TypeVar("DataT") + + +class ApiServerEnvelope(BaseModel, Generic[DataT]): + data: DataT diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/errors.py b/services/api-server/src/simcore_service_api_server/models/schemas/errors.py index 3243f5e44b98..69d6f7d67003 100644 --- a/services/api-server/src/simcore_service_api_server/models/schemas/errors.py +++ b/services/api-server/src/simcore_service_api_server/models/schemas/errors.py @@ -1,5 +1,6 @@ from typing import Any +from common_library.error_codes import ErrorCodeStr from pydantic import BaseModel, ConfigDict @@ -10,6 +11,7 @@ class ErrorGet(BaseModel): # - https://github.com/ITISFoundation/osparc-simcore/issues/2520 # - https://github.com/ITISFoundation/osparc-simcore/issues/2446 errors: list[Any] + support_id: ErrorCodeStr | None = None model_config = ConfigDict( json_schema_extra={ diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/files.py b/services/api-server/src/simcore_service_api_server/models/schemas/files.py index ebfee726adbc..5c6ea3b3bd89 100644 --- a/services/api-server/src/simcore_service_api_server/models/schemas/files.py +++ b/services/api-server/src/simcore_service_api_server/models/schemas/files.py @@ -18,7 +18,7 @@ from .._utils_pydantic import UriSchema from ..domain.files import File as DomainFile from ..domain.files import FileName -from ._base import ApiServerInputSchema, ApiServerOutputSchema +from .base import ApiServerInputSchema, ApiServerOutputSchema class UserFile(ApiServerInputSchema): diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/functions.py b/services/api-server/src/simcore_service_api_server/models/schemas/functions.py new file mode 100644 index 000000000000..d31f2e5af476 --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/models/schemas/functions.py @@ -0,0 +1,54 @@ +# pylint: disable=protected-access + +from enum import StrEnum +from typing import Annotated, Final + +from models_library.functions import FunctionID, FunctionJobCollectionID, FunctionJobID +from pydantic import BaseModel, ConfigDict, Field +from servicelib.celery.models import TaskState + +_JOB_TASK_RUN_STATUS_PREFIX: Final[str] = "JOB_TASK_RUN_STATUS_" + + +class FunctionJobsListFilters(BaseModel): + """Filters for listing function jobs""" + + function_id: Annotated[ + FunctionID | None, + Field( + description="Filter by function ID pattern", + ), + ] = None + + function_job_ids: Annotated[ + list[FunctionJobID] | None, + Field( + description="Filter by function job IDs", + ), + ] = None + + function_job_collection_id: Annotated[ + FunctionJobCollectionID | None, + Field( + description="Filter by function job collection ID", + ), + ] = None + + model_config = ConfigDict( + extra="ignore", + ) + + +class FunctionJobCreationTaskStatus(StrEnum): + PENDING = f"{_JOB_TASK_RUN_STATUS_PREFIX}PENDING" + STARTED = f"{_JOB_TASK_RUN_STATUS_PREFIX}STARTED" + RETRY = f"{_JOB_TASK_RUN_STATUS_PREFIX}RETRY" + SUCCESS = f"{_JOB_TASK_RUN_STATUS_PREFIX}SUCCESS" + FAILURE = f"{_JOB_TASK_RUN_STATUS_PREFIX}FAILURE" + NOT_YET_SCHEDULED = "JOB_TASK_NOT_YET_SCHEDULED" # api-server custom status + ERROR = "JOB_TASK_CREATION_FAILURE" # api-server custom status + + +assert {elm._name_ for elm in TaskState}.union({"NOT_YET_SCHEDULED", "ERROR"}) == { + elm._name_ for elm in FunctionJobCreationTaskStatus +} diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/jobs.py b/services/api-server/src/simcore_service_api_server/models/schemas/jobs.py index 86abb0a87414..1e3c6d6d0e68 100644 --- a/services/api-server/src/simcore_service_api_server/models/schemas/jobs.py +++ b/services/api-server/src/simcore_service_api_server/models/schemas/jobs.py @@ -1,7 +1,6 @@ import datetime import hashlib import logging -from collections.abc import Callable from pathlib import Path from typing import Annotated, TypeAlias from uuid import UUID, uuid4 @@ -39,7 +38,8 @@ from ..domain.files import File as DomainFile from ..domain.files import FileInProgramJobData from ..schemas.files import UserFile -from ._base import ApiServerInputSchema +from .base import ApiServerInputSchema +from .programs import ProgramKeyId # JOB SUB-RESOURCES ---------- # @@ -47,8 +47,6 @@ # - Input/outputs are defined in service metadata # - custom metadata # -from .programs import Program, ProgramKeyId -from .solvers import Solver JobID: TypeAlias = UUID @@ -324,44 +322,6 @@ def resource_name(self) -> str: return self.name -def get_url( - solver_or_program: Solver | Program, url_for: Callable[..., HttpUrl], job_id: JobID -) -> HttpUrl | None: - if isinstance(solver_or_program, Solver): - return url_for( - "get_job", - solver_key=solver_or_program.id, - version=solver_or_program.version, - job_id=job_id, - ) - return None - - -def get_runner_url( - solver_or_program: Solver | Program, url_for: Callable[..., HttpUrl] -) -> HttpUrl | None: - if isinstance(solver_or_program, Solver): - return url_for( - "get_solver_release", - solver_key=solver_or_program.id, - version=solver_or_program.version, - ) - return None - - -def get_outputs_url( - solver_or_program: Solver | Program, url_for: Callable[..., HttpUrl], job_id: JobID -) -> HttpUrl | None: - if isinstance(solver_or_program, Solver): - return url_for( - "get_job_outputs", - solver_key=solver_or_program.id, - version=solver_or_program.version, - job_id=job_id, - ) - return None - - PercentageInt: TypeAlias = Annotated[int, Field(ge=0, le=100)] diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/profiles.py b/services/api-server/src/simcore_service_api_server/models/schemas/profiles.py index 76b283aa4a99..e70ad1c262d1 100644 --- a/services/api-server/src/simcore_service_api_server/models/schemas/profiles.py +++ b/services/api-server/src/simcore_service_api_server/models/schemas/profiles.py @@ -13,8 +13,7 @@ class ProfileCommon(BaseModel): last_name: LastNameStr | None = Field(None, examples=["Maxwell"]) -class ProfileUpdate(ProfileCommon): - ... +class ProfileUpdate(ProfileCommon): ... class UserRoleEnum(StrAutoEnum): diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/programs.py b/services/api-server/src/simcore_service_api_server/models/schemas/programs.py index 305f9eb28cf7..ca3e0c564ddd 100644 --- a/services/api-server/src/simcore_service_api_server/models/schemas/programs.py +++ b/services/api-server/src/simcore_service_api_server/models/schemas/programs.py @@ -9,7 +9,7 @@ from ..api_resources import compose_resource_name from ..basic_types import VersionStr -from ._base import ( +from .base import ( ApiServerOutputSchema, BaseService, ) diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/solvers.py b/services/api-server/src/simcore_service_api_server/models/schemas/solvers.py index 63a383c4f2b1..d9b70e68f84c 100644 --- a/services/api-server/src/simcore_service_api_server/models/schemas/solvers.py +++ b/services/api-server/src/simcore_service_api_server/models/schemas/solvers.py @@ -13,8 +13,8 @@ from models_library.services_types import ServiceKey from pydantic import BaseModel, ConfigDict, Field, StringConstraints -from ...models.schemas._base import BaseService from ..api_resources import compose_resource_name +from .base import BaseService # NOTE: # - API does NOT impose prefix (simcore)/(services)/comp because does not know anything about registry deployed. This constraint diff --git a/services/api-server/src/simcore_service_api_server/services_http/jobs.py b/services/api-server/src/simcore_service_api_server/services_http/jobs.py index ed2ef50d5881..32cbe4b4cd6c 100644 --- a/services/api-server/src/simcore_service_api_server/services_http/jobs.py +++ b/services/api-server/src/simcore_service_api_server/services_http/jobs.py @@ -1,15 +1,12 @@ import logging -from typing import Annotated from uuid import UUID -from fastapi import Depends, HTTPException, Request, status from models_library.api_schemas_webserver.projects import ProjectGet -from pydantic import HttpUrl, PositiveInt +from models_library.users import UserID +from pydantic import HttpUrl from servicelib.logging_utils import log_context -from ..api.dependencies.authentication import get_current_user_id -from ..api.dependencies.services import get_api_client -from ..api.dependencies.webserver_http import get_webserver_session +from ..exceptions.backend_errors import InvalidInputError from ..models.schemas.jobs import ( JobID, JobMetadata, @@ -28,20 +25,17 @@ def raise_if_job_not_associated_with_solver( expected_project_name: str, project: ProjectGet ) -> None: if expected_project_name != project.name: - raise HTTPException( - status.HTTP_422_UNPROCESSABLE_ENTITY, - detail=f"Invalid input data for job {project.uuid}", - ) + raise InvalidInputError() async def start_project( *, - request: Request, job_id: JobID, expected_job_name: str, - webserver_api: Annotated[AuthSession, Depends(get_webserver_session)], + pricing_spec: JobPricingSpecification | None, + webserver_api: AuthSession, ) -> None: - if pricing_spec := JobPricingSpecification.create_from_headers(request.headers): + if pricing_spec is not None: with log_context(_logger, logging.DEBUG, "Set pricing plan and unit"): project: ProjectGet = await webserver_api.get_project(project_id=job_id) raise_if_job_not_associated_with_solver(expected_job_name, project) @@ -60,8 +54,8 @@ async def start_project( async def stop_project( *, job_id: JobID, - user_id: Annotated[PositiveInt, Depends(get_current_user_id)], - director2_api: Annotated[DirectorV2Api, Depends(get_api_client(DirectorV2Api))], + user_id: UserID, + director2_api: DirectorV2Api, ) -> JobStatus: await director2_api.stop_computation(project_id=job_id, user_id=user_id) diff --git a/services/api-server/src/simcore_service_api_server/services_http/log_streaming.py b/services/api-server/src/simcore_service_api_server/services_http/log_streaming.py index 1fcf65bdf423..7d925aed9971 100644 --- a/services/api-server/src/simcore_service_api_server/services_http/log_streaming.py +++ b/services/api-server/src/simcore_service_api_server/services_http/log_streaming.py @@ -5,10 +5,10 @@ from typing import Final from common_library.error_codes import create_error_code +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from models_library.rabbitmq_messages import LoggerRabbitMessage from models_library.users import UserID from pydantic import NonNegativeInt -from servicelib.logging_errors import create_troubleshotting_log_kwargs from servicelib.logging_utils import log_catch from servicelib.rabbitmq import QueueName, RabbitMQClient @@ -139,7 +139,7 @@ async def log_generator(self) -> AsyncIterable[str]: error_msg = MSG_INTERNAL_ERROR_USER_FRIENDLY_TEMPLATE + f" [{error_code}]" _logger.exception( - **create_troubleshotting_log_kwargs( + **create_troubleshooting_log_kwargs( error_msg, error=exc, error_code=error_code, diff --git a/services/api-server/src/simcore_service_api_server/services_http/solver_job_models_converters.py b/services/api-server/src/simcore_service_api_server/services_http/solver_job_models_converters.py index 3a6728f478a8..d57d9614a83f 100644 --- a/services/api-server/src/simcore_service_api_server/services_http/solver_job_models_converters.py +++ b/services/api-server/src/simcore_service_api_server/services_http/solver_job_models_converters.py @@ -11,10 +11,11 @@ import arrow from models_library.api_schemas_webserver.projects import ProjectCreateNew, ProjectGet from models_library.api_schemas_webserver.projects_ui import StudyUI -from models_library.basic_types import KeyIDStr +from models_library.basic_types import KeyIDStr, VersionStr from models_library.projects import Project from models_library.projects_nodes import InputID -from pydantic import HttpUrl, TypeAdapter +from pydantic import TypeAdapter +from simcore_service_api_server.models.api_resources import JobLinks from ..models.domain.projects import InputTypes, Node, SimCoreFileLink from ..models.schemas.files import File @@ -24,12 +25,9 @@ JobInputs, JobStatus, PercentageInt, - get_outputs_url, - get_runner_url, - get_url, ) from ..models.schemas.programs import Program -from ..models.schemas.solvers import Solver +from ..models.schemas.solvers import Solver, SolverKeyId from .director_v2 import ComputationTaskGet # UTILS ------ @@ -184,11 +182,35 @@ def create_new_project_for_job( ) +def get_solver_job_rest_interface_links( + *, url_for: Callable, solver_key: SolverKeyId, version: VersionStr +) -> JobLinks: + return JobLinks( + url_template=url_for( + "get_job", + solver_key=solver_key, + version=version, + job_id="{job_id}", + ), + runner_url_template=url_for( + "get_solver_release", + solver_key=solver_key, + version=version, + ), + outputs_url_template=url_for( + "get_job_outputs", + solver_key=solver_key, + version=version, + job_id="{job_id}", + ), + ) + + def create_job_from_project( *, solver_or_program: Solver | Program, project: ProjectGet | Project, - url_for: Callable[..., HttpUrl], + job_links: JobLinks, ) -> Job: """ Given a project, creates a job @@ -218,13 +240,9 @@ def create_job_from_project( inputs_checksum=job_inputs.compute_checksum(), created_at=project.creation_date, # type: ignore[arg-type] runner_name=solver_or_program_name, - url=get_url( - solver_or_program=solver_or_program, url_for=url_for, job_id=job_id - ), - runner_url=get_runner_url(solver_or_program=solver_or_program, url_for=url_for), - outputs_url=get_outputs_url( - solver_or_program=solver_or_program, url_for=url_for, job_id=job_id - ), + url=job_links.url(job_id=job_id), + runner_url=job_links.runner_url(job_id=job_id), + outputs_url=job_links.outputs_url(job_id=job_id), ) diff --git a/services/api-server/src/simcore_service_api_server/services_http/solver_job_outputs.py b/services/api-server/src/simcore_service_api_server/services_http/solver_job_outputs.py index 4554bc0ccc35..f225747b4271 100644 --- a/services/api-server/src/simcore_service_api_server/services_http/solver_job_outputs.py +++ b/services/api-server/src/simcore_service_api_server/services_http/solver_job_outputs.py @@ -1,13 +1,14 @@ import logging from typing import Any, TypeAlias -from models_library.projects import ProjectID, ProjectIDStr -from models_library.projects_nodes_io import BaseFileLink, NodeID, NodeIDStr +from models_library.projects import ProjectID +from models_library.projects_nodes_io import BaseFileLink, NodeID from pydantic import StrictBool, StrictFloat, StrictInt, TypeAdapter from simcore_sdk import node_ports_v2 from simcore_sdk.node_ports_v2 import DBManager, Nodeports from sqlalchemy.ext.asyncio import AsyncEngine +from .._meta import APP_NAME from ..exceptions.backend_errors import SolverOutputNotFoundError log = logging.getLogger(__name__) @@ -26,13 +27,13 @@ async def get_solver_output_results( """ # get the DB engine - db_manager = DBManager(db_engine=db_engine) + db_manager = DBManager(db_engine=db_engine, application_name=APP_NAME) try: solver: Nodeports = await node_ports_v2.ports( user_id=user_id, - project_id=ProjectIDStr(f"{project_uuid}"), - node_uuid=NodeIDStr(f"{node_uuid}"), + project_id=f"{project_uuid}", + node_uuid=f"{node_uuid}", db_manager=db_manager, ) solver_output_results: dict[str, Any] = {} diff --git a/services/api-server/src/simcore_service_api_server/services_http/storage.py b/services/api-server/src/simcore_service_api_server/services_http/storage.py index ae82609a58aa..fc0cf744f962 100644 --- a/services/api-server/src/simcore_service_api_server/services_http/storage.py +++ b/services/api-server/src/simcore_service_api_server/services_http/storage.py @@ -1,35 +1,59 @@ import logging import re import urllib.parse +from datetime import timedelta from functools import partial from mimetypes import guess_type -from typing import Literal +from typing import Final, Literal from uuid import UUID from fastapi import FastAPI from fastapi.encoders import jsonable_encoder +from httpx import QueryParams from models_library.api_schemas_storage.storage_schemas import ( + ETag, FileMetaDataArray, ) from models_library.api_schemas_storage.storage_schemas import ( FileMetaDataGet as StorageFileMetaData, ) from models_library.api_schemas_storage.storage_schemas import ( + FileUploadCompleteFutureResponse, + FileUploadCompleteResponse, + FileUploadCompleteState, + FileUploadCompletionBody, FileUploadSchema, + LinkType, PresignedLink, + UploadedPart, ) from models_library.basic_types import SHA256Str from models_library.generics import Envelope +from models_library.projects import ProjectID from models_library.rest_pagination import PageLimitInt, PageOffsetInt +from models_library.users import UserID from pydantic import AnyUrl from settings_library.tracing import TracingSettings -from starlette.datastructures import URL +from simcore_service_api_server.exceptions.backend_errors import BackendTimeoutError +from simcore_service_api_server.models.schemas.files import UserFile +from simcore_service_api_server.models.schemas.jobs import UserFileToProgramJob +from tenacity import ( + AsyncRetrying, + TryAgain, + before_sleep_log, + retry_if_exception_type, + stop_after_delay, + wait_fixed, +) from ..core.settings import StorageSettings from ..exceptions.service_errors_utils import service_exception_mapper from ..models.domain.files import File from ..utils.client_base import BaseServiceClientApi, setup_client_instance +_POLL_TIMEOUT: Final[timedelta] = timedelta(minutes=10) + + _logger = logging.getLogger(__name__) _exception_mapper = partial(service_exception_mapper, service_name="Storage") @@ -157,15 +181,22 @@ async def delete_file(self, *, user_id: int, quoted_storage_file_id: str) -> Non response.raise_for_status() @_exception_mapper(http_status_map={}) - async def get_upload_links( - self, *, user_id: int, file_id: UUID, file_name: str + async def get_file_upload_links( + self, *, user_id: int, file: File, client_file: UserFileToProgramJob | UserFile ) -> FileUploadSchema: - object_path = urllib.parse.quote_plus(f"api/{file_id}/{file_name}") + + query_params = QueryParams( + user_id=f"{user_id}", + link_type=LinkType.PRESIGNED.value, + file_size=int(client_file.filesize), + is_directory="false", + sha256_checksum=f"{client_file.sha256_checksum}", + ) # complete_upload_file response = await self.client.put( - f"/locations/{self.SIMCORE_S3_ID}/files/{object_path}", - params={"user_id": user_id, "file_size": 0}, + f"/locations/{self.SIMCORE_S3_ID}/files/{file.quoted_storage_file_id}", + params=query_params, ) response.raise_for_status() @@ -173,25 +204,58 @@ async def get_upload_links( assert enveloped_data.data # nosec return enveloped_data.data - async def create_complete_upload_link( - self, *, file: File, query: dict[str, str] | None = None - ) -> URL: - url = URL( - f"{self.client.base_url}locations/{self.SIMCORE_S3_ID}/files/{file.quoted_storage_file_id}:complete" + @_exception_mapper(http_status_map={}) + async def complete_file_upload( + self, *, user_id: int, file: File, uploaded_parts: list[UploadedPart] + ) -> ETag: + + response = await self.client.post( + f"/locations/{self.SIMCORE_S3_ID}/files/{file.storage_file_id}:complete", + params={"user_id": f"{user_id}"}, + json=jsonable_encoder(FileUploadCompletionBody(parts=uploaded_parts)), ) - if query is not None: - url = url.include_query_params(**query) - return url + response.raise_for_status() + file_upload_complete_response = Envelope[ + FileUploadCompleteResponse + ].model_validate_json(response.text) + assert file_upload_complete_response.data # nosec + state_url = f"{file_upload_complete_response.data.links.state}" + try: + async for attempt in AsyncRetrying( + reraise=True, + wait=wait_fixed(1), + stop=stop_after_delay(_POLL_TIMEOUT), + retry=retry_if_exception_type(TryAgain), + before_sleep=before_sleep_log(_logger, logging.DEBUG), + ): + with attempt: + resp = await self.client.post(state_url) + resp.raise_for_status() + future_enveloped = Envelope[ + FileUploadCompleteFutureResponse + ].model_validate_json(resp.text) + assert future_enveloped.data # nosec + if future_enveloped.data.state == FileUploadCompleteState.NOK: + raise TryAgain() + + assert future_enveloped.data.e_tag # nosec + _logger.debug( + "multipart upload completed in %s, received %s", + attempt.retry_state.retry_object.statistics, + f"{future_enveloped.data.e_tag=}", + ) + return future_enveloped.data.e_tag + except TryAgain as exc: + raise BackendTimeoutError() from exc + raise BackendTimeoutError() - async def create_abort_upload_link( - self, *, file: File, query: dict[str, str] | None = None - ) -> URL: - url = URL( - f"{self.client.base_url}locations/{self.SIMCORE_S3_ID}/files/{file.quoted_storage_file_id}:abort" + @_exception_mapper(http_status_map={}) + async def abort_file_upload(self, *, user_id: int, file: File) -> None: + response = await self.client.post( + f"/locations/{self.SIMCORE_S3_ID}/files/{file.quoted_storage_file_id}:abort", + params={"user_id": f"{user_id}"}, ) - if query is not None: - url = url.include_query_params(**query) - return url + response.raise_for_status() @_exception_mapper(http_status_map={}) async def create_soft_link( @@ -222,6 +286,16 @@ async def create_soft_link( file_meta: File = to_file_api_model(stored_file_meta) return file_meta + @_exception_mapper(http_status_map={}) + async def delete_project_s3_assets( + self, user_id: UserID, project_id: ProjectID + ) -> None: + response = await self.client.delete( + f"/simcore-s3/folders/{project_id}", + params={"user_id": user_id}, + ) + response.raise_for_status() + # MODULES APP SETUP ------------------------------------------------------------- diff --git a/services/api-server/src/simcore_service_api_server/services_http/study_job_models_converters.py b/services/api-server/src/simcore_service_api_server/services_http/study_job_models_converters.py index 99bb5a59ae97..d73fd5fac4ec 100644 --- a/services/api-server/src/simcore_service_api_server/services_http/study_job_models_converters.py +++ b/services/api-server/src/simcore_service_api_server/services_http/study_job_models_converters.py @@ -3,6 +3,7 @@ services/api-server/src/simcore_service_api_server/api/routes/studies_jobs.py """ +from collections.abc import Callable from typing import Any, NamedTuple from uuid import UUID @@ -16,6 +17,7 @@ from models_library.projects_nodes_io import LinkToFileTypes, NodeID, SimcoreS3FileID from pydantic import TypeAdapter +from ..models.api_resources import JobLinks from ..models.domain.files import File from ..models.domain.projects import InputTypes, SimCoreFileLink from ..models.schemas.jobs import Job, JobInputs, JobOutputs @@ -57,10 +59,26 @@ def get_project_and_file_inputs_from_job_inputs( return ProjectInputs(new_inputs, file_inputs) +def get_study_job_rest_interface_links( + *, url_for: Callable, study_id: StudyID +) -> JobLinks: + return JobLinks( + url_template=url_for( + "get_study_job", + study_id=study_id, + job_id="{job_id}", + ), + runner_url_template=url_for("get_study", study_id=study_id), + outputs_url_template=url_for( + "get_study_job_outputs", + study_id=study_id, + job_id="{job_id}", + ), + ) + + def create_job_from_study( - study_key: StudyID, - project: ProjectGet, - job_inputs: JobInputs, + study_key: StudyID, project: ProjectGet, job_inputs: JobInputs, job_links: JobLinks ) -> Job: """ Given a study, creates a job @@ -78,9 +96,9 @@ def create_job_from_study( inputs_checksum=job_inputs.compute_checksum(), created_at=DateTimeStr.to_datetime(project.creation_date), runner_name=study_name, - url=None, - runner_url=None, - outputs_url=None, + url=job_links.url(job_id=project.uuid), + runner_url=job_links.runner_url(job_id=project.uuid), + outputs_url=job_links.outputs_url(job_id=project.uuid), ) diff --git a/services/api-server/src/simcore_service_api_server/services_http/webserver.py b/services/api-server/src/simcore_service_api_server/services_http/webserver.py index 7751ffb382fb..c564c29cbd0d 100644 --- a/services/api-server/src/simcore_service_api_server/services_http/webserver.py +++ b/services/api-server/src/simcore_service_api_server/services_http/webserver.py @@ -2,10 +2,10 @@ import logging import urllib.parse -from collections.abc import Mapping from dataclasses import dataclass +from datetime import timedelta from functools import partial -from typing import Any +from typing import Any, Final, Self from uuid import UUID import httpx @@ -30,24 +30,27 @@ ProjectInputUpdate, ) from models_library.api_schemas_webserver.resource_usage import PricingPlanGet -from models_library.api_schemas_webserver.users import MyProfileGet as WebProfileGet +from models_library.api_schemas_webserver.users import MyProfileRestGet as WebProfileGet from models_library.api_schemas_webserver.users import ( - MyProfilePatch as WebProfileUpdate, + MyProfileRestPatch as WebProfileUpdate, ) from models_library.api_schemas_webserver.wallets import WalletGet from models_library.generics import Envelope +from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.rest_pagination import Page, PageLimitInt, PageOffsetInt +from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import PositiveInt -from servicelib.aiohttp.long_running_tasks.server import TaskStatus from servicelib.common_headers import ( X_SIMCORE_PARENT_NODE_ID, X_SIMCORE_PARENT_PROJECT_UUID, ) +from servicelib.long_running_tasks.models import TaskStatus +from servicelib.rest_constants import X_PRODUCT_NAME_HEADER from settings_library.tracing import TracingSettings -from tenacity import TryAgain +from tenacity import TryAgain, retry_if_exception_type from tenacity.asyncio import AsyncRetrying from tenacity.before_sleep import before_sleep_log from tenacity.stop import stop_after_delay @@ -92,6 +95,8 @@ _exception_mapper = partial(service_exception_mapper, service_name="Webserver") +_POLL_TIMEOUT: Final[timedelta] = timedelta(minutes=10) + _JOB_STATUS_MAP = { status.HTTP_402_PAYMENT_REQUIRED: PaymentRequiredError, status.HTTP_404_NOT_FOUND: JobNotFoundError, @@ -128,7 +133,7 @@ class LongRunningTasksClient(BaseServiceClientApi): "Client for requesting status and results of long running tasks" -@dataclass +@dataclass(frozen=True) class AuthSession: """ - wrapper around thin-client to simplify webserver's API @@ -140,8 +145,12 @@ class AuthSession: SEE services/api-server/src/simcore_service_api_server/api/dependencies/webserver.py """ + _product_name: ProductName + _user_id: UserID + _api: WebserverApi _long_running_task_client: LongRunningTasksClient + vtag: str session_cookies: dict | None = None @@ -150,26 +159,47 @@ def create( cls, app: FastAPI, session_cookies: dict, - product_extra_headers: Mapping[str, str], - ) -> "AuthSession": - api = WebserverApi.get_instance(app) - assert api # nosec - assert isinstance(api, WebserverApi) # nosec - - api.client.headers = product_extra_headers # type: ignore[assignment] - long_running_tasks_client = LongRunningTasksClient.get_instance(app=app) - - assert long_running_tasks_client # nosec - assert isinstance(long_running_tasks_client, LongRunningTasksClient) # nosec + user_id: UserID, + product_name: ProductName, + ) -> Self: + + # WARNING: this client lifespan is tied to the app + app_http_webserver_client = WebserverApi.get_instance(app) + assert app_http_webserver_client # nosec + assert isinstance(app_http_webserver_client, WebserverApi) # nosec + + # WARNING: this client lifespan is tied to the app + app_http_lrt_webserver_client = LongRunningTasksClient.get_instance(app=app) + assert app_http_lrt_webserver_client # nosec + assert isinstance( + app_http_lrt_webserver_client, LongRunningTasksClient + ) # nosec - long_running_tasks_client.client.headers = product_extra_headers # type: ignore[assignment] return cls( - _api=api, - _long_running_task_client=long_running_tasks_client, + _product_name=product_name, + _user_id=user_id, + _api=app_http_webserver_client, + _long_running_task_client=app_http_lrt_webserver_client, vtag=app.state.settings.API_SERVER_WEBSERVER.WEBSERVER_VTAG, session_cookies=session_cookies, ) + def _get_session_headers( + self, + *, + parent_project_uuid: ProjectID | None = None, + parent_node_id: NodeID | None = None, + ) -> dict[str, str]: + headers = {X_PRODUCT_NAME_HEADER: self._product_name} + + if parent_project_uuid is not None: + headers[X_SIMCORE_PARENT_PROJECT_UUID] = str(parent_project_uuid) + + if parent_node_id is not None: + headers[X_SIMCORE_PARENT_NODE_ID] = str(parent_node_id) + + return headers + # OPERATIONS @property @@ -212,6 +242,7 @@ async def _page_projects( **optional, }, cookies=self.session_cookies, + headers=self._get_session_headers(), ) resp.raise_for_status() @@ -223,15 +254,20 @@ async def _wait_for_long_running_task_results(self, lrt_response: httpx.Response # GET task status now until done async for attempt in AsyncRetrying( wait=wait_fixed(0.5), - stop=stop_after_delay(60), + stop=stop_after_delay(_POLL_TIMEOUT), reraise=True, + retry=retry_if_exception_type(TryAgain), before_sleep=before_sleep_log(_logger, logging.INFO), ): with attempt: get_response = await self.long_running_task_client.get( - url=status_url, cookies=self.session_cookies + url=status_url, + cookies=self.session_cookies, + headers=self._get_session_headers(), + ) + get_response.raise_for_status( + # NOTE: stops retrying if the response in not 2xx ) - get_response.raise_for_status() task_status = ( Envelope[TaskStatus].model_validate_json(get_response.text).data ) @@ -241,7 +277,9 @@ async def _wait_for_long_running_task_results(self, lrt_response: httpx.Response raise TryAgain(msg) result_response = await self.long_running_task_client.get( - f"{result_url}", cookies=self.session_cookies + f"{result_url}", + cookies=self.session_cookies, + headers=self._get_session_headers(), ) result_response.raise_for_status() return Envelope.model_validate_json(result_response.text).data @@ -250,7 +288,11 @@ async def _wait_for_long_running_task_results(self, lrt_response: httpx.Response @_exception_mapper(http_status_map=_PROFILE_STATUS_MAP) async def get_me(self) -> Profile: - response = await self.client.get("/me", cookies=self.session_cookies) + response = await self.client.get( + "/me", + cookies=self.session_cookies, + headers=self._get_session_headers(), + ) response.raise_for_status() got: WebProfileGet | None = ( @@ -281,6 +323,7 @@ async def update_me(self, *, profile_update: ProfileUpdate) -> Profile: "/me", json=update.model_dump(exclude_unset=True), cookies=self.session_cookies, + headers=self._get_session_headers(), ) response.raise_for_status() profile: Profile = await self.get_me() @@ -299,17 +342,15 @@ async def create_project( ) -> ProjectGet: # POST /projects --> 202 Accepted query_params = {"hidden": is_hidden} - headers = { - X_SIMCORE_PARENT_PROJECT_UUID: parent_project_uuid, - X_SIMCORE_PARENT_NODE_ID: parent_node_id, - } response = await self.client.post( "/projects", params=query_params, - headers={k: f"{v}" for k, v in headers.items() if v is not None}, json=jsonable_encoder(project, by_alias=True, exclude={"state"}), cookies=self.session_cookies, + headers=self._get_session_headers( + parent_project_uuid=parent_project_uuid, parent_node_id=parent_node_id + ), ) response.raise_for_status() result = await self._wait_for_long_running_task_results(response) @@ -326,16 +367,14 @@ async def clone_project( ) -> ProjectGet: # POST /projects --> 202 Accepted query_params = {"from_study": project_id, "hidden": hidden} - _headers = { - X_SIMCORE_PARENT_PROJECT_UUID: parent_project_uuid, - X_SIMCORE_PARENT_NODE_ID: parent_node_id, - } response = await self.client.post( "/projects", cookies=self.session_cookies, params=query_params, - headers={k: f"{v}" for k, v in _headers.items() if v is not None}, + headers=self._get_session_headers( + parent_project_uuid=parent_project_uuid, parent_node_id=parent_node_id + ), ) response.raise_for_status() result = await self._wait_for_long_running_task_results(response) @@ -346,6 +385,7 @@ async def get_project(self, *, project_id: UUID) -> ProjectGet: response = await self.client.get( f"/projects/{project_id}", cookies=self.session_cookies, + headers=self._get_session_headers(), ) response.raise_for_status() data = Envelope[ProjectGet].model_validate_json(response.text).data @@ -376,6 +416,7 @@ async def delete_project(self, *, project_id: ProjectID) -> None: response = await self.client.delete( f"/projects/{project_id}", cookies=self.session_cookies, + headers=self._get_session_headers(), ) response.raise_for_status() @@ -392,6 +433,7 @@ async def get_project_metadata_ports( response = await self.client.get( f"/projects/{project_id}/metadata/ports", cookies=self.session_cookies, + headers=self._get_session_headers(), ) response.raise_for_status() data = Envelope[list[StudyPort]].model_validate_json(response.text).data @@ -408,6 +450,7 @@ async def get_project_metadata( response = await self.client.get( f"/projects/{project_id}/metadata", cookies=self.session_cookies, + headers=self._get_session_headers(), ) response.raise_for_status() data = Envelope[ProjectMetadataGet].model_validate_json(response.text).data @@ -419,6 +462,7 @@ async def patch_project(self, *, project_id: UUID, patch_params: ProjectPatch): response = await self.client.patch( f"/projects/{project_id}", cookies=self.session_cookies, + headers=self._get_session_headers(), json=jsonable_encoder(patch_params, exclude_unset=True), ) response.raise_for_status() @@ -432,6 +476,7 @@ async def update_project_metadata( response = await self.client.patch( f"/projects/{project_id}/metadata", cookies=self.session_cookies, + headers=self._get_session_headers(), json=jsonable_encoder(ProjectMetadataUpdate(custom=metadata)), ) response.raise_for_status() @@ -448,6 +493,7 @@ async def get_project_node_pricing_unit( response = await self.client.get( f"/projects/{project_id}/nodes/{node_id}/pricing-unit", cookies=self.session_cookies, + headers=self._get_session_headers(), ) response.raise_for_status() @@ -469,6 +515,7 @@ async def connect_pricing_unit_to_project_node( response = await self.client.put( f"/projects/{project_id}/nodes/{node_id}/pricing-plan/{pricing_plan}/pricing-unit/{pricing_unit}", cookies=self.session_cookies, + headers=self._get_session_headers(), ) response.raise_for_status() @@ -491,6 +538,7 @@ async def start_project( response = await self.client.post( f"/computations/{project_id}:start", cookies=self.session_cookies, + headers=self._get_session_headers(), json=jsonable_encoder(body, exclude_unset=True, exclude_defaults=True), ) response.raise_for_status() @@ -505,6 +553,7 @@ async def update_project_inputs( response = await self.client.patch( f"/projects/{project_id}/inputs", cookies=self.session_cookies, + headers=self._get_session_headers(), json=jsonable_encoder(new_inputs), ) response.raise_for_status() @@ -523,6 +572,7 @@ async def get_project_inputs( response = await self.client.get( f"/projects/{project_id}/inputs", cookies=self.session_cookies, + headers=self._get_session_headers(), ) response.raise_for_status() @@ -544,6 +594,7 @@ async def get_project_outputs( response = await self.client.get( f"/projects/{project_id}/outputs", cookies=self.session_cookies, + headers=self._get_session_headers(), ) response.raise_for_status() @@ -563,6 +614,7 @@ async def update_node_outputs( response = await self.client.patch( f"/projects/{project_id}/nodes/{node_id}/outputs", cookies=self.session_cookies, + headers=self._get_session_headers(), json=jsonable_encoder(new_node_outputs), ) response.raise_for_status() @@ -574,6 +626,7 @@ async def get_default_wallet(self) -> WalletGetWithAvailableCreditsLegacy: response = await self.client.get( "/wallets/default", cookies=self.session_cookies, + headers=self._get_session_headers(), ) response.raise_for_status() data = ( @@ -591,6 +644,7 @@ async def get_wallet( response = await self.client.get( f"/wallets/{wallet_id}", cookies=self.session_cookies, + headers=self._get_session_headers(), ) response.raise_for_status() data = ( @@ -606,6 +660,7 @@ async def get_project_wallet(self, *, project_id: ProjectID) -> WalletGet: response = await self.client.get( f"/projects/{project_id}/wallet", cookies=self.session_cookies, + headers=self._get_session_headers(), ) response.raise_for_status() data = Envelope[WalletGet].model_validate_json(response.text).data @@ -621,6 +676,7 @@ async def get_product_price(self) -> GetCreditPriceLegacy: response = await self.client.get( "/credits-price", cookies=self.session_cookies, + headers=self._get_session_headers(), ) response.raise_for_status() data = Envelope[GetCreditPriceLegacy].model_validate_json(response.text).data @@ -640,6 +696,7 @@ async def get_service_pricing_plan( response = await self.client.get( f"/catalog/services/{service_key}/{version}/pricing-plan", cookies=self.session_cookies, + headers=self._get_session_headers(), ) response.raise_for_status() pricing_plan_get = ( diff --git a/services/api-server/src/simcore_service_api_server/services_rpc/async_jobs.py b/services/api-server/src/simcore_service_api_server/services_rpc/async_jobs.py new file mode 100644 index 000000000000..0c1cb911e2fd --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/services_rpc/async_jobs.py @@ -0,0 +1,96 @@ +import functools +from dataclasses import dataclass + +from models_library.api_schemas_rpc_async_jobs.async_jobs import ( + AsyncJobGet, + AsyncJobId, + AsyncJobResult, + AsyncJobStatus, +) +from models_library.api_schemas_rpc_async_jobs.exceptions import ( + JobAbortedError, + JobError, + JobNotDoneError, + JobSchedulerError, +) +from models_library.api_schemas_storage import STORAGE_RPC_NAMESPACE +from servicelib.celery.models import OwnerMetadata +from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient +from servicelib.rabbitmq.rpc_interfaces.async_jobs import async_jobs + +from ..exceptions.service_errors_utils import service_exception_mapper +from ..exceptions.task_errors import ( + TaskCancelledError, + TaskError, + TaskResultMissingError, + TaskSchedulerError, +) + +_exception_mapper = functools.partial( + service_exception_mapper, service_name="Async jobs" +) + + +@dataclass +class AsyncJobClient: + _rabbitmq_rpc_client: RabbitMQRPCClient + + @_exception_mapper( + rpc_exception_map={ + JobSchedulerError: TaskSchedulerError, + } + ) + async def cancel( + self, *, job_id: AsyncJobId, owner_metadata: OwnerMetadata + ) -> None: + return await async_jobs.cancel( + self._rabbitmq_rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, + job_id=job_id, + owner_metadata=owner_metadata, + ) + + @_exception_mapper( + rpc_exception_map={ + JobSchedulerError: TaskSchedulerError, + } + ) + async def status( + self, *, job_id: AsyncJobId, owner_metadata: OwnerMetadata + ) -> AsyncJobStatus: + return await async_jobs.status( + self._rabbitmq_rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, + job_id=job_id, + owner_metadata=owner_metadata, + ) + + @_exception_mapper( + rpc_exception_map={ + JobSchedulerError: TaskSchedulerError, + JobNotDoneError: TaskResultMissingError, + JobAbortedError: TaskCancelledError, + JobError: TaskError, + } + ) + async def result( + self, *, job_id: AsyncJobId, owner_metadata: OwnerMetadata + ) -> AsyncJobResult: + return await async_jobs.result( + self._rabbitmq_rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, + job_id=job_id, + owner_metadata=owner_metadata, + ) + + @_exception_mapper( + rpc_exception_map={ + JobSchedulerError: TaskSchedulerError, + } + ) + async def list_jobs(self, *, owner_metadata: OwnerMetadata) -> list[AsyncJobGet]: + return await async_jobs.list_jobs( + self._rabbitmq_rpc_client, + rpc_namespace=STORAGE_RPC_NAMESPACE, + owner_metadata=owner_metadata, + ) diff --git a/services/api-server/src/simcore_service_api_server/services_rpc/director_v2.py b/services/api-server/src/simcore_service_api_server/services_rpc/director_v2.py new file mode 100644 index 000000000000..cb3315c81163 --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/services_rpc/director_v2.py @@ -0,0 +1,27 @@ +from dataclasses import dataclass +from functools import partial + +from models_library.projects import ProjectID +from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient +from servicelib.rabbitmq.rpc_interfaces.director_v2 import computations_tasks +from servicelib.rabbitmq.rpc_interfaces.director_v2.errors import ( + ComputationalTaskMissingError, +) + +from ..exceptions.backend_errors import JobNotFoundError +from ..exceptions.service_errors_utils import service_exception_mapper + +_exception_mapper = partial(service_exception_mapper, service_name="DirectorV2") + + +@dataclass(frozen=True, kw_only=True) +class DirectorV2Service: + _rpc_client: RabbitMQRPCClient + + @_exception_mapper( + rpc_exception_map={ComputationalTaskMissingError: JobNotFoundError} + ) + async def get_computation_task_log_file_ids(self, *, project_id: ProjectID): + return await computations_tasks.get_computation_task_log_file_ids( + self._rpc_client, project_id=project_id + ) diff --git a/services/api-server/src/simcore_service_api_server/services_rpc/storage.py b/services/api-server/src/simcore_service_api_server/services_rpc/storage.py new file mode 100644 index 000000000000..94a82fd7c369 --- /dev/null +++ b/services/api-server/src/simcore_service_api_server/services_rpc/storage.py @@ -0,0 +1,44 @@ +from dataclasses import dataclass +from functools import partial + +from models_library.api_schemas_rpc_async_jobs.async_jobs import ( + AsyncJobGet, +) +from models_library.api_schemas_webserver.storage import PathToExport +from models_library.products import ProductName +from models_library.users import UserID +from servicelib.celery.models import OwnerMetadata +from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient +from servicelib.rabbitmq.rpc_interfaces.storage import simcore_s3 as storage_rpc + +from ..exceptions.service_errors_utils import service_exception_mapper +from ..models.domain.celery_models import ( + ApiServerOwnerMetadata, +) + +_exception_mapper = partial(service_exception_mapper, service_name="Storage") + + +@dataclass(frozen=True, kw_only=True) +class StorageService: + _rpc_client: RabbitMQRPCClient + _user_id: UserID + _product_name: ProductName + + @_exception_mapper(rpc_exception_map={}) + async def start_data_export( + self, + paths_to_export: list[PathToExport], + ) -> AsyncJobGet: + async_job_get, _ = await storage_rpc.start_export_data( + self._rpc_client, + paths_to_export=paths_to_export, + export_as="download_link", + owner_metadata=OwnerMetadata.model_validate( + ApiServerOwnerMetadata( + user_id=self._user_id, product_name=self._product_name + ).model_dump() + ), + user_id=self._user_id, + ) + return async_job_get diff --git a/services/api-server/src/simcore_service_api_server/services_rpc/wb_api_server.py b/services/api-server/src/simcore_service_api_server/services_rpc/wb_api_server.py index d2bb3be899e2..405b16bc1149 100644 --- a/services/api-server/src/simcore_service_api_server/services_rpc/wb_api_server.py +++ b/services/api-server/src/simcore_service_api_server/services_rpc/wb_api_server.py @@ -23,6 +23,14 @@ RegisteredFunctionJobCollection, ) from models_library.api_schemas_webserver.licensed_items import LicensedItemRpcGetPage +from models_library.functions import ( + FunctionJobStatus, + FunctionOutputs, + FunctionUserAccessRights, + FunctionUserApiAccessRights, + RegisteredFunctionJobPatch, + RegisteredFunctionJobWithStatus, +) from models_library.licenses import LicensedItemID from models_library.products import ProductName from models_library.projects import ProjectID @@ -38,6 +46,7 @@ from models_library.rpc.webserver.projects import ( ListProjectsMarkedAsJobRpcFilters, MetadataFilterItem, + ProjectJobRpcGet, ) from models_library.services_types import ServiceRunID from models_library.users import UserID @@ -57,6 +66,10 @@ NotEnoughAvailableSeatsError, ) from servicelib.rabbitmq.rpc_interfaces.webserver import projects as projects_rpc +from servicelib.rabbitmq.rpc_interfaces.webserver.errors import ( + ProjectForbiddenRpcError, + ProjectNotFoundRpcError, +) from servicelib.rabbitmq.rpc_interfaces.webserver.functions import ( functions_rpc_interface, ) @@ -77,6 +90,8 @@ from ..exceptions.backend_errors import ( CanNotCheckoutServiceIsNotRunningError, InsufficientNumberOfSeatsError, + JobForbiddenAccessError, + JobNotFoundError, LicensedItemCheckoutNotFoundError, ) from ..exceptions.service_errors_utils import service_exception_mapper @@ -236,6 +251,7 @@ async def mark_project_as_job( user_id: UserID, project_uuid: ProjectID, job_parent_resource_name: RelativeResourceName, + storage_assets_deleted: bool, # noqa: FBT001 ): await projects_rpc.mark_project_as_job( rpc_client=self._client, @@ -243,6 +259,29 @@ async def mark_project_as_job( user_id=user_id, project_uuid=project_uuid, job_parent_resource_name=job_parent_resource_name, + storage_assets_deleted=storage_assets_deleted, + ) + + @_exception_mapper( + rpc_exception_map={ + ProjectForbiddenRpcError: JobForbiddenAccessError, + ProjectNotFoundRpcError: JobNotFoundError, + } + ) + async def get_project_marked_as_job( + self, + *, + product_name: ProductName, + user_id: UserID, + project_id: ProjectID, + job_parent_resource_name: RelativeResourceName, + ) -> ProjectJobRpcGet: + return await projects_rpc.get_project_marked_as_job( + rpc_client=self._client, + product_name=product_name, + user_id=user_id, + project_uuid=project_id, + job_parent_resource_name=job_parent_resource_name, ) async def list_projects_marked_as_jobs( @@ -334,6 +373,8 @@ async def list_function_jobs( pagination_offset: PageOffsetInt = 0, pagination_limit: PageLimitInt = DEFAULT_NUMBER_OF_ITEMS_PER_PAGE, filter_by_function_id: FunctionID | None = None, + filter_by_function_job_ids: list[FunctionJobID] | None = None, + filter_by_function_job_collection_id: FunctionJobCollectionID | None = None, ) -> tuple[list[RegisteredFunctionJob], PageMetaInfoLimitOffset]: return await functions_rpc_interface.list_function_jobs( self._client, @@ -342,6 +383,33 @@ async def list_function_jobs( pagination_offset=pagination_offset, pagination_limit=pagination_limit, filter_by_function_id=filter_by_function_id, + filter_by_function_job_ids=filter_by_function_job_ids, + filter_by_function_job_collection_id=filter_by_function_job_collection_id, + ) + + async def list_function_jobs_with_status( + self, + *, + user_id: UserID, + product_name: ProductName, + pagination_offset: PageOffsetInt = 0, + pagination_limit: PageLimitInt = DEFAULT_NUMBER_OF_ITEMS_PER_PAGE, + filter_by_function_id: FunctionID | None = None, + filter_by_function_job_ids: list[FunctionJobID] | None = None, + filter_by_function_job_collection_id: FunctionJobCollectionID | None = None, + ) -> tuple[ + list[RegisteredFunctionJobWithStatus], + PageMetaInfoLimitOffset, + ]: + return await functions_rpc_interface.list_function_jobs_with_status( + self._client, + user_id=user_id, + product_name=product_name, + pagination_offset=pagination_offset, + pagination_limit=pagination_limit, + filter_by_function_id=filter_by_function_id, + filter_by_function_job_ids=filter_by_function_job_ids, + filter_by_function_job_collection_id=filter_by_function_job_collection_id, ) async def list_function_job_collections( @@ -448,6 +516,22 @@ async def register_function_job( function_job=function_job, ) + async def patch_registered_function_job( + self, + *, + user_id: UserID, + product_name: ProductName, + function_job_id: FunctionJobID, + registered_function_job_patch: RegisteredFunctionJobPatch, + ) -> RegisteredFunctionJob: + return await functions_rpc_interface.patch_registered_function_job( + self._client, + user_id=user_id, + product_name=product_name, + function_job_uuid=function_job_id, + registered_function_job_patch=registered_function_job_patch, + ) + async def get_function_input_schema( self, *, user_id: UserID, product_name: ProductName, function_id: FunctionID ) -> FunctionInputSchema: @@ -468,6 +552,70 @@ async def get_function_output_schema( function_id=function_id, ) + async def get_function_job_status( + self, + *, + user_id: UserID, + product_name: ProductName, + function_job_id: FunctionJobID, + ) -> FunctionJobStatus: + return await functions_rpc_interface.get_function_job_status( + self._client, + user_id=user_id, + product_name=product_name, + function_job_id=function_job_id, + ) + + async def get_function_job_outputs( + self, + *, + user_id: UserID, + product_name: ProductName, + function_job_id: FunctionJobID, + ) -> FunctionOutputs: + return await functions_rpc_interface.get_function_job_outputs( + self._client, + user_id=user_id, + product_name=product_name, + function_job_id=function_job_id, + ) + + async def update_function_job_status( + self, + *, + function_job_id: FunctionJobID, + user_id: UserID, + product_name: ProductName, + job_status: FunctionJobStatus, + check_write_permissions: bool = True, + ) -> FunctionJobStatus: + return await functions_rpc_interface.update_function_job_status( + self._client, + function_job_id=function_job_id, + user_id=user_id, + product_name=product_name, + job_status=job_status, + check_write_permissions=check_write_permissions, + ) + + async def update_function_job_outputs( + self, + *, + function_job_id: FunctionJobID, + user_id: UserID, + product_name: ProductName, + outputs: FunctionOutputs, + check_write_permissions: bool = True, + ) -> FunctionOutputs: + return await functions_rpc_interface.update_function_job_outputs( + self._client, + function_job_id=function_job_id, + user_id=user_id, + product_name=product_name, + outputs=outputs, + check_write_permissions=check_write_permissions, + ) + async def find_cached_function_jobs( self, *, @@ -526,6 +674,32 @@ async def delete_function_job_collection( function_job_collection_id=function_job_collection_id, ) + async def get_function_user_permissions( + self, + *, + user_id: UserID, + product_name: ProductName, + function_id: FunctionID, + ) -> FunctionUserAccessRights: + return await functions_rpc_interface.get_function_user_permissions( + self._client, + user_id=user_id, + product_name=product_name, + function_id=function_id, + ) + + async def get_functions_user_api_access_rights( + self, + *, + user_id: UserID, + product_name: ProductName, + ) -> FunctionUserApiAccessRights: + return await functions_rpc_interface.get_functions_user_api_access_rights( + self._client, + user_id=user_id, + product_name=product_name, + ) + def setup(app: FastAPI, rabbitmq_rmp_client: RabbitMQRPCClient): wb_api_rpc_client = WbApiRpcClient(_client=rabbitmq_rmp_client) diff --git a/services/api-server/tests/conftest.py b/services/api-server/tests/conftest.py index 16d33d3afc0b..0287fa38d5b0 100644 --- a/services/api-server/tests/conftest.py +++ b/services/api-server/tests/conftest.py @@ -16,6 +16,7 @@ CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent pytest_plugins = [ + "pytest_simcore.asyncio_event_loops", "pytest_simcore.cli_runner", "pytest_simcore.docker_compose", "pytest_simcore.docker_swarm", @@ -24,9 +25,11 @@ "pytest_simcore.faker_users_data", "pytest_simcore.httpbin_service", "pytest_simcore.httpx_calls_capture", + "pytest_simcore.logging", "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", "pytest_simcore.rabbit_service", + "pytest_simcore.redis_service", "pytest_simcore.repository_paths", "pytest_simcore.schemas", "pytest_simcore.services_api_mocks_for_aiohttp_clients", @@ -69,6 +72,12 @@ def default_app_env_vars( env_vars["API_SERVER_DEV_FEATURES_ENABLED"] = "1" env_vars["API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED"] = "1" env_vars["API_SERVER_PROMETHEUS_INSTRUMENTATION_ENABLED"] = "0" + env_vars["POSTGRES_MINSIZE"] = "1" + env_vars["POSTGRES_MAXSIZE"] = "10" + env_vars["POSTGRES_MAX_POOLSIZE"] = "10" + env_vars["POSTGRES_MAX_OVERFLOW"] = "20" + env_vars["API_SERVER_CELERY"] = "null" + env_vars["API_SERVER_RABBITMQ"] = "null" return env_vars diff --git a/services/api-server/tests/mocks/create_program_job_success.json b/services/api-server/tests/mocks/create_program_job_success.json index 32006ffdc5ad..ea9bbfcd4fc0 100644 --- a/services/api-server/tests/mocks/create_program_job_success.json +++ b/services/api-server/tests/mocks/create_program_job_success.json @@ -162,9 +162,10 @@ "creationDate": "2025-04-15T13:14:44.636Z", "lastChangeDate": "2025-04-15T13:14:46.704Z", "state": { - "locked": { - "value": false, - "status": "CLOSED" + "shareState": { + "locked": false, + "status": "CLOSED", + "currentUserGroupids": [] }, "state": { "value": "UNKNOWN" diff --git a/services/api-server/tests/mocks/create_study_job.json b/services/api-server/tests/mocks/create_study_job.json index db7f717efd2b..5cbf615db3a4 100644 --- a/services/api-server/tests/mocks/create_study_job.json +++ b/services/api-server/tests/mocks/create_study_job.json @@ -98,9 +98,10 @@ "tags": [], "classifiers": [], "state": { - "locked": { - "value": false, - "status": "CLOSED" + "shareState": { + "locked": false, + "status": "CLOSED", + "currentUserGroupids": [] }, "state": { "value": "UNKNOWN" diff --git a/services/api-server/tests/mocks/for_test_api_routes_studies.json b/services/api-server/tests/mocks/for_test_api_routes_studies.json index 65524c3c742d..ba01ac0b2553 100644 --- a/services/api-server/tests/mocks/for_test_api_routes_studies.json +++ b/services/api-server/tests/mocks/for_test_api_routes_studies.json @@ -160,9 +160,10 @@ "tags": [], "classifiers": [], "state": { - "locked": { - "value": false, - "status": "CLOSED" + "shareState": { + "locked": false, + "status": "CLOSED", + "currentUserGroupids": [] }, "state": { "value": "UNKNOWN" @@ -385,9 +386,10 @@ "tags": [], "classifiers": [], "state": { - "locked": { - "value": false, - "status": "CLOSED" + "shareState": { + "locked": false, + "status": "CLOSED", + "currentUserGroupids": [] }, "state": { "value": "UNKNOWN" diff --git a/services/api-server/tests/mocks/for_test_get_and_update_job_metadata.json b/services/api-server/tests/mocks/for_test_get_and_update_job_metadata.json index f14044c398c8..85faddf8a6d2 100644 --- a/services/api-server/tests/mocks/for_test_get_and_update_job_metadata.json +++ b/services/api-server/tests/mocks/for_test_get_and_update_job_metadata.json @@ -1,461 +1,462 @@ [ - { - "name": "get_profile", - "description": "", - "method": "GET", - "host": "webserver", - "path": "/v0/me", - "query": null, - "request_payload": null, - "response_body": { - "data": { - "first_name": "crespo", - "last_name": "", - "id": 1, - "login": "rturner@example.net", - "role": "User", - "groups": { - "me": { - "gid": 3, - "label": "crespo", - "description": "primary group", - "thumbnail": null, - "accessRights": { - "read": true, - "write": false, - "delete": false - }, - "inclusionRules": {} - }, - "organizations": [ - { - "gid": 2, - "label": "osparc", - "description": "osparc product group", - "thumbnail": null, - "accessRights": { - "read": false, - "write": false, - "delete": false - }, - "inclusionRules": {} + { + "name": "get_profile", + "description": "", + "method": "GET", + "host": "webserver", + "path": "/v0/me", + "query": null, + "request_payload": null, + "response_body": { + "data": { + "first_name": "crespo", + "last_name": "", + "id": 1, + "login": "rturner@example.net", + "role": "User", + "groups": { + "me": { + "gid": 3, + "label": "crespo", + "description": "primary group", + "thumbnail": null, + "accessRights": { + "read": true, + "write": false, + "delete": false + }, + "inclusionRules": {} + }, + "organizations": [ + { + "gid": 2, + "label": "osparc", + "description": "osparc product group", + "thumbnail": null, + "accessRights": { + "read": false, + "write": false, + "delete": false + }, + "inclusionRules": {} + } + ], + "all": { + "gid": 1, + "label": "Everyone", + "description": "all users", + "thumbnail": null, + "accessRights": { + "read": true, + "write": false, + "delete": false + }, + "inclusionRules": {} + } + }, + "gravatar_id": "aa33f6ec77ea434c2ea4fb92d0fd379e" } - ], - "all": { - "gid": 1, - "label": "Everyone", - "description": "all users", + }, + "status_code": 200 + }, + { + "name": "get_service", + "description": "", + "method": "GET", + "host": "catalog", + "path": "/v0/services/simcore/services/comp/itis/sleeper/2.0.0", + "query": "user_id=1", + "request_payload": null, + "response_body": { + "name": "sleeper", "thumbnail": null, - "accessRights": { - "read": true, - "write": false, - "delete": false + "description": "A service which awaits for time to pass.", + "deprecated": null, + "classifiers": [], + "quality": {}, + "key": "simcore/services/comp/itis/sleeper", + "version": "2.0.0", + "integration-version": "1.0.0", + "type": "computational", + "authors": [ + { + "name": "Joshua Boone", + "email": "sharon60@example.net", + "affiliation": "Johnson Inc" + }, + { + "name": "Kenneth Alvarez", + "email": "ncollins@example.com", + "affiliation": "Singh LLC" + }, + { + "name": "Jennifer Howard", + "email": "amyhood@example.org", + "affiliation": "Campos-Weaver" + } + ], + "contact": "sharon91@example.com", + "inputs": { + "input_1": { + "displayOrder": 1.0, + "label": "File with int number", + "description": "Pick a file containing only one integer", + "type": "data:text/plain", + "fileToKeyMap": { + "single_number.txt": "input_1" + } + }, + "input_2": { + "displayOrder": 2.0, + "label": "Sleep interval", + "description": "Choose an amount of time to sleep", + "type": "integer", + "defaultValue": 2 + }, + "input_3": { + "displayOrder": 3.0, + "label": "Fail after sleep", + "description": "If set to true will cause service to fail after it sleeps", + "type": "boolean", + "defaultValue": false + } }, - "inclusionRules": {} - } + "outputs": { + "output_1": { + "displayOrder": 1.0, + "label": "File containing one random integer", + "description": "Integer is generated in range [1-9]", + "type": "data:text/plain", + "fileToKeyMap": { + "single_number.txt": "output_1" + } + }, + "output_2": { + "displayOrder": 2.0, + "label": "Random sleep interval", + "description": "Interval is generated in range [1-9]", + "type": "integer" + } + } }, - "gravatar_id": "aa33f6ec77ea434c2ea4fb92d0fd379e" - } + "status_code": 200 }, - "status_code": 200 - }, - { - "name": "get_service", - "description": "", - "method": "GET", - "host": "catalog", - "path": "/v0/services/simcore/services/comp/itis/sleeper/2.0.0", - "query": "user_id=1", - "request_payload": null, - "response_body": { - "name": "sleeper", - "thumbnail": null, - "description": "A service which awaits for time to pass.", - "deprecated": null, - "classifiers": [], - "quality": {}, - "key": "simcore/services/comp/itis/sleeper", - "version": "2.0.0", - "integration-version": "1.0.0", - "type": "computational", - "authors": [ - { - "name": "Joshua Boone", - "email": "sharon60@example.net", - "affiliation": "Johnson Inc" - }, - { - "name": "Kenneth Alvarez", - "email": "ncollins@example.com", - "affiliation": "Singh LLC" - }, - { - "name": "Jennifer Howard", - "email": "amyhood@example.org", - "affiliation": "Campos-Weaver" - } - ], - "contact": "sharon91@example.com", - "inputs": { - "input_1": { - "displayOrder": 1.0, - "label": "File with int number", - "description": "Pick a file containing only one integer", - "type": "data:text/plain", - "fileToKeyMap": { - "single_number.txt": "input_1" - } - }, - "input_2": { - "displayOrder": 2.0, - "label": "Sleep interval", - "description": "Choose an amount of time to sleep", - "type": "integer", - "defaultValue": 2 + { + "name": "create_project", + "description": "", + "method": "POST", + "host": "webserver", + "path": "/v0/projects", + "query": "hidden=true", + "request_payload": { + "uuid": "767a7355-3062-4ceb-b339-ed67d07d2ed2", + "name": "solvers/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/releases/2.0.0/jobs/767a7355-3062-4ceb-b339-ed67d07d2ed2", + "description": "Study associated to solver job:\n{\n \"id\": \"767a7355-3062-4ceb-b339-ed67d07d2ed2\",\n \"name\": \"solvers/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/releases/2.0.0/jobs/767a7355-3062-4ceb-b339-ed67d07d2ed2\",\n \"inputs_checksum\": \"f6fce006d0fe7b6168fc20a10ec1fe74d1723ebc935232d3c0707c277db2ef0c\",\n \"created_at\": \"2023-07-12T12:45:30.757605+00:00\"\n}", + "thumbnail": "https://via.placeholder.com/170x120.png", + "workbench": { + "d49543e7-6e36-57ee-86ff-46b71f63757f": { + "key": "simcore/services/comp/itis/sleeper", + "version": "2.0.0", + "label": "sleeper", + "progress": null, + "thumbnail": null, + "runHash": null, + "inputs": { + "x": 4.33, + "n": 55, + "title": "Temperature", + "enabled": true + }, + "inputsUnits": {}, + "inputAccess": null, + "inputNodes": [], + "outputs": {}, + "outputNode": null, + "outputNodes": null, + "parent": null, + "position": null, + "state": { + "modified": true, + "dependencies": [], + "currentStatus": "NOT_STARTED", + "progress": 0 + }, + "bootOptions": null + } + }, + "accessRights": {}, + "tags": [], + "classifiers": [], + "ui": { + "workbench": { + "d49543e7-6e36-57ee-86ff-46b71f63757f": { + "position": { + "x": 633, + "y": 229 + }, + "marker": null + } + }, + "slideshow": {}, + "currentNodeId": "d49543e7-6e36-57ee-86ff-46b71f63757f", + "annotations": {} + } }, - "input_3": { - "displayOrder": 3.0, - "label": "Fail after sleep", - "description": "If set to true will cause service to fail after it sleeps", - "type": "boolean", - "defaultValue": false - } - }, - "outputs": { - "output_1": { - "displayOrder": 1.0, - "label": "File containing one random integer", - "description": "Integer is generated in range [1-9]", - "type": "data:text/plain", - "fileToKeyMap": { - "single_number.txt": "output_1" - } + "response_body": { + "data": { + "task_id": "POST%20%2Fv0%2Fprojects%3Fhidden%3Dtrue.ceb7b7ab-ccef-4ea6-b82f-199d265d4c3b", + "task_name": "POST /v0/projects?hidden=true", + "status_href": "/v0/tasks/POST%2520%252Fv0%252Fprojects%253Fhidden%253Dtrue.ceb7b7ab-ccef-4ea6-b82f-199d265d4c3b", + "result_href": "/v0/tasks/POST%2520%252Fv0%252Fprojects%253Fhidden%253Dtrue.ceb7b7ab-ccef-4ea6-b82f-199d265d4c3b/result", + "abort_href": "/v0/tasks/POST%2520%252Fv0%252Fprojects%253Fhidden%253Dtrue.ceb7b7ab-ccef-4ea6-b82f-199d265d4c3b" + } }, - "output_2": { - "displayOrder": 2.0, - "label": "Random sleep interval", - "description": "Interval is generated in range [1-9]", - "type": "integer" - } - } + "status_code": 202 }, - "status_code": 200 - }, - { - "name": "create_project", - "description": "", - "method": "POST", - "host": "webserver", - "path": "/v0/projects", - "query": "hidden=true", - "request_payload": { - "uuid": "767a7355-3062-4ceb-b339-ed67d07d2ed2", - "name": "solvers/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/releases/2.0.0/jobs/767a7355-3062-4ceb-b339-ed67d07d2ed2", - "description": "Study associated to solver job:\n{\n \"id\": \"767a7355-3062-4ceb-b339-ed67d07d2ed2\",\n \"name\": \"solvers/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/releases/2.0.0/jobs/767a7355-3062-4ceb-b339-ed67d07d2ed2\",\n \"inputs_checksum\": \"f6fce006d0fe7b6168fc20a10ec1fe74d1723ebc935232d3c0707c277db2ef0c\",\n \"created_at\": \"2023-07-12T12:45:30.757605+00:00\"\n}", - "thumbnail": "https://via.placeholder.com/170x120.png", - "workbench": { - "d49543e7-6e36-57ee-86ff-46b71f63757f": { - "key": "simcore/services/comp/itis/sleeper", - "version": "2.0.0", - "label": "sleeper", - "progress": null, - "thumbnail": null, - "runHash": null, - "inputs": { - "x": 4.33, - "n": 55, - "title": "Temperature", - "enabled": true - }, - "inputsUnits": {}, - "inputAccess": null, - "inputNodes": [], - "outputs": {}, - "outputNode": null, - "outputNodes": null, - "parent": null, - "position": null, - "state": { - "modified": true, - "dependencies": [], - "currentStatus": "NOT_STARTED", - "progress": 0 - }, - "bootOptions": null - } - }, - "accessRights": {}, - "tags": [], - "classifiers": [], - "ui": { - "workbench": { - "d49543e7-6e36-57ee-86ff-46b71f63757f": { - "position": { - "x": 633, - "y": 229 - }, - "marker": null - } + { + "name": "get_task_status_1", + "description": "", + "method": "GET", + "host": "webserver", + "path": "/v0/tasks/POST%20%2Fv0%2Fprojects%3Fhidden%3Dtrue.ceb7b7ab-ccef-4ea6-b82f-199d265d4c3b", + "query": null, + "request_payload": null, + "response_body": { + "data": { + "task_progress": { + "message": "creating new study...", + "percent": 0.0 + }, + "done": false, + "started": "2023-07-12T12:45:30.825756" + } }, - "slideshow": {}, - "currentNodeId": "d49543e7-6e36-57ee-86ff-46b71f63757f", - "annotations": {} - } - }, - "response_body": { - "data": { - "task_id": "POST%20%2Fv0%2Fprojects%3Fhidden%3Dtrue.ceb7b7ab-ccef-4ea6-b82f-199d265d4c3b", - "task_name": "POST /v0/projects?hidden=true", - "status_href": "/v0/tasks/POST%2520%252Fv0%252Fprojects%253Fhidden%253Dtrue.ceb7b7ab-ccef-4ea6-b82f-199d265d4c3b", - "result_href": "/v0/tasks/POST%2520%252Fv0%252Fprojects%253Fhidden%253Dtrue.ceb7b7ab-ccef-4ea6-b82f-199d265d4c3b/result", - "abort_href": "/v0/tasks/POST%2520%252Fv0%252Fprojects%253Fhidden%253Dtrue.ceb7b7ab-ccef-4ea6-b82f-199d265d4c3b" - } + "status_code": 200 }, - "status_code": 202 - }, - { - "name": "get_task_status_1", - "description": "", - "method": "GET", - "host": "webserver", - "path": "/v0/tasks/POST%20%2Fv0%2Fprojects%3Fhidden%3Dtrue.ceb7b7ab-ccef-4ea6-b82f-199d265d4c3b", - "query": null, - "request_payload": null, - "response_body": { - "data": { - "task_progress": { - "message": "creating new study...", - "percent": 0.0 + { + "name": "get_task_status_2", + "description": "", + "method": "GET", + "host": "webserver", + "path": "/v0/tasks/POST%20%2Fv0%2Fprojects%3Fhidden%3Dtrue.ceb7b7ab-ccef-4ea6-b82f-199d265d4c3b", + "query": null, + "request_payload": null, + "response_body": { + "data": { + "task_progress": { + "message": "creating new study...", + "percent": 0.0 + }, + "done": false, + "started": "2023-07-12T12:45:30.825756" + } }, - "done": false, - "started": "2023-07-12T12:45:30.825756" - } + "status_code": 200 }, - "status_code": 200 - }, - { - "name": "get_task_status_2", - "description": "", - "method": "GET", - "host": "webserver", - "path": "/v0/tasks/POST%20%2Fv0%2Fprojects%3Fhidden%3Dtrue.ceb7b7ab-ccef-4ea6-b82f-199d265d4c3b", - "query": null, - "request_payload": null, - "response_body": { - "data": { - "task_progress": { - "message": "creating new study...", - "percent": 0.0 + { + "name": "get_task_status_3", + "description": "", + "method": "GET", + "host": "webserver", + "path": "/v0/tasks/POST%20%2Fv0%2Fprojects%3Fhidden%3Dtrue.ceb7b7ab-ccef-4ea6-b82f-199d265d4c3b", + "query": null, + "request_payload": null, + "response_body": { + "data": { + "task_progress": { + "message": "finished", + "percent": 1.0 + }, + "done": true, + "started": "2023-07-12T12:45:30.825756" + } }, - "done": false, - "started": "2023-07-12T12:45:30.825756" - } + "status_code": 200 }, - "status_code": 200 - }, - { - "name": "get_task_status_3", - "description": "", - "method": "GET", - "host": "webserver", - "path": "/v0/tasks/POST%20%2Fv0%2Fprojects%3Fhidden%3Dtrue.ceb7b7ab-ccef-4ea6-b82f-199d265d4c3b", - "query": null, - "request_payload": null, - "response_body": { - "data": { - "task_progress": { - "message": "finished", - "percent": 1.0 + { + "name": "get_task_result", + "description": "", + "method": "GET", + "host": "webserver", + "path": "/v0/tasks/POST%20%2Fv0%2Fprojects%3Fhidden%3Dtrue.ceb7b7ab-ccef-4ea6-b82f-199d265d4c3b/result", + "query": null, + "request_payload": null, + "response_body": { + "data": { + "uuid": "767a7355-3062-4ceb-b339-ed67d07d2ed2", + "name": "solvers/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/releases/2.0.0/jobs/767a7355-3062-4ceb-b339-ed67d07d2ed2", + "description": "Study associated to solver job:\n{\n \"id\": \"767a7355-3062-4ceb-b339-ed67d07d2ed2\",\n \"name\": \"solvers/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/releases/2.0.0/jobs/767a7355-3062-4ceb-b339-ed67d07d2ed2\",\n \"inputs_checksum\": \"f6fce006d0fe7b6168fc20a10ec1fe74d1723ebc935232d3c0707c277db2ef0c\",\n \"created_at\": \"2023-07-12T12:45:30.757605+00:00\"\n}", + "thumbnail": "https://via.placeholder.com/170x120.png", + "creationDate": "2023-07-12T12:45:30.832Z", + "lastChangeDate": "2023-07-12T12:45:30.832Z", + "workbench": { + "d49543e7-6e36-57ee-86ff-46b71f63757f": { + "key": "simcore/services/comp/itis/sleeper", + "version": "2.0.0", + "label": "sleeper", + "progress": 0.0, + "inputs": { + "x": 4.33, + "n": 55, + "title": "Temperature", + "enabled": true + }, + "inputsUnits": {}, + "inputNodes": [], + "outputs": {}, + "state": { + "modified": true, + "dependencies": [], + "currentStatus": "NOT_STARTED", + "progress": 0.0 + } + } + }, + "prjOwner": "brownlisa@example.com", + "accessRights": { + "3": { + "read": true, + "write": true, + "delete": true + } + }, + "tags": [], + "classifiers": [], + "state": { + "shareState": { + "locked": false, + "status": "CLOSED", + "currentUserGroupids": [] + }, + "state": { + "value": "NOT_STARTED" + } + }, + "ui": { + "workbench": { + "d49543e7-6e36-57ee-86ff-46b71f63757f": { + "position": { + "x": 633, + "y": 229 + } + } + }, + "slideshow": {}, + "currentNodeId": "d49543e7-6e36-57ee-86ff-46b71f63757f", + "annotations": {} + }, + "quality": {}, + "dev": {} + } }, - "done": true, - "started": "2023-07-12T12:45:30.825756" - } + "status_code": 201 }, - "status_code": 200 - }, - { - "name": "get_task_result", - "description": "", - "method": "GET", - "host": "webserver", - "path": "/v0/tasks/POST%20%2Fv0%2Fprojects%3Fhidden%3Dtrue.ceb7b7ab-ccef-4ea6-b82f-199d265d4c3b/result", - "query": null, - "request_payload": null, - "response_body": { - "data": { - "uuid": "767a7355-3062-4ceb-b339-ed67d07d2ed2", - "name": "solvers/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/releases/2.0.0/jobs/767a7355-3062-4ceb-b339-ed67d07d2ed2", - "description": "Study associated to solver job:\n{\n \"id\": \"767a7355-3062-4ceb-b339-ed67d07d2ed2\",\n \"name\": \"solvers/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/releases/2.0.0/jobs/767a7355-3062-4ceb-b339-ed67d07d2ed2\",\n \"inputs_checksum\": \"f6fce006d0fe7b6168fc20a10ec1fe74d1723ebc935232d3c0707c277db2ef0c\",\n \"created_at\": \"2023-07-12T12:45:30.757605+00:00\"\n}", - "thumbnail": "https://via.placeholder.com/170x120.png", - "creationDate": "2023-07-12T12:45:30.832Z", - "lastChangeDate": "2023-07-12T12:45:30.832Z", - "workbench": { - "d49543e7-6e36-57ee-86ff-46b71f63757f": { - "key": "simcore/services/comp/itis/sleeper", - "version": "2.0.0", - "label": "sleeper", - "progress": 0.0, - "inputs": { - "x": 4.33, - "n": 55, - "title": "Temperature", - "enabled": true - }, - "inputsUnits": {}, - "inputNodes": [], - "outputs": {}, - "state": { - "modified": true, - "dependencies": [], - "currentStatus": "NOT_STARTED", - "progress": 0.0 + { + "name": "get_project_metadata", + "description": "", + "method": "GET", + "host": "webserver", + "path": "/v0/projects/767a7355-3062-4ceb-b339-ed67d07d2ed2/metadata", + "query": null, + "request_payload": null, + "response_body": { + "data": { + "projectUuid": "767a7355-3062-4ceb-b339-ed67d07d2ed2", + "custom": {} } - } }, - "prjOwner": "brownlisa@example.com", - "accessRights": { - "3": { - "read": true, - "write": true, - "delete": true - } - }, - "tags": [], - "classifiers": [], - "state": { - "locked": { - "value": false, - "status": "CLOSED" - }, - "state": { - "value": "NOT_STARTED" - } + "status_code": 200 + }, + { + "name": "update_project_metadata", + "description": "", + "method": "PATCH", + "host": "webserver", + "path": "/v0/projects/767a7355-3062-4ceb-b339-ed67d07d2ed2/metadata", + "query": null, + "request_payload": { + "custom": { + "number": 3.14, + "integer": 42, + "string": "foo", + "boolean": true + } }, - "ui": { - "workbench": { - "d49543e7-6e36-57ee-86ff-46b71f63757f": { - "position": { - "x": 633, - "y": 229 - } + "response_body": { + "data": { + "projectUuid": "767a7355-3062-4ceb-b339-ed67d07d2ed2", + "custom": { + "number": 3.14, + "string": "foo", + "boolean": true, + "integer": 42 + } } - }, - "slideshow": {}, - "currentNodeId": "d49543e7-6e36-57ee-86ff-46b71f63757f", - "annotations": {} }, - "quality": {}, - "dev": {} - } - }, - "status_code": 201 - }, - { - "name": "get_project_metadata", - "description": "", - "method": "GET", - "host": "webserver", - "path": "/v0/projects/767a7355-3062-4ceb-b339-ed67d07d2ed2/metadata", - "query": null, - "request_payload": null, - "response_body": { - "data": { - "projectUuid": "767a7355-3062-4ceb-b339-ed67d07d2ed2", - "custom": {} - } - }, - "status_code": 200 - }, - { - "name": "update_project_metadata", - "description": "", - "method": "PATCH", - "host": "webserver", - "path": "/v0/projects/767a7355-3062-4ceb-b339-ed67d07d2ed2/metadata", - "query": null, - "request_payload": { - "custom": { - "number": 3.14, - "integer": 42, - "string": "foo", - "boolean": true - } - }, - "response_body": { - "data": { - "projectUuid": "767a7355-3062-4ceb-b339-ed67d07d2ed2", - "custom": { - "number": 3.14, - "string": "foo", - "boolean": true, - "integer": 42 - } - } + "status_code": 200 }, - "status_code": 200 - }, - { - "name": "get_project_metadata_1", - "description": "", - "method": "GET", - "host": "webserver", - "path": "/v0/projects/767a7355-3062-4ceb-b339-ed67d07d2ed2/metadata", - "query": null, - "request_payload": null, - "response_body": { - "data": { - "projectUuid": "767a7355-3062-4ceb-b339-ed67d07d2ed2", - "custom": { - "number": 3.14, - "string": "foo", - "boolean": true, - "integer": 42 - } - } + { + "name": "get_project_metadata_1", + "description": "", + "method": "GET", + "host": "webserver", + "path": "/v0/projects/767a7355-3062-4ceb-b339-ed67d07d2ed2/metadata", + "query": null, + "request_payload": null, + "response_body": { + "data": { + "projectUuid": "767a7355-3062-4ceb-b339-ed67d07d2ed2", + "custom": { + "number": 3.14, + "string": "foo", + "boolean": true, + "integer": 42 + } + } + }, + "status_code": 200 }, - "status_code": 200 - }, - { - "name": "delete_project", - "description": "", - "method": "DELETE", - "host": "webserver", - "path": "/v0/projects/767a7355-3062-4ceb-b339-ed67d07d2ed2", - "query": null, - "request_payload": null, - "response_body": null, - "status_code": 204 - }, - { - "name": "get_project_metadata_2", - "description": "", - "method": "GET", - "host": "webserver", - "path": "/v0/projects/767a7355-3062-4ceb-b339-ed67d07d2ed2/metadata", - "query": null, - "request_payload": null, - "response_body": { - "data": null, - "error": { - "logs": [ - { - "message": "Project with uuid 767a7355-3062-4ceb-b339-ed67d07d2ed2 not found.", - "level": "ERROR", - "logger": "user" - } - ], - "errors": [ - { - "code": "HTTPNotFound", - "message": "Project with uuid 767a7355-3062-4ceb-b339-ed67d07d2ed2 not found.", - "resource": null, - "field": null - } - ], - "status": 404, - "message": "Project with uuid 767a7355-3062-4ceb-b339-ed67d07d2ed2 not found." - } + { + "name": "delete_project", + "description": "", + "method": "DELETE", + "host": "webserver", + "path": "/v0/projects/767a7355-3062-4ceb-b339-ed67d07d2ed2", + "query": null, + "request_payload": null, + "response_body": null, + "status_code": 204 }, - "status_code": 404 - } + { + "name": "get_project_metadata_2", + "description": "", + "method": "GET", + "host": "webserver", + "path": "/v0/projects/767a7355-3062-4ceb-b339-ed67d07d2ed2/metadata", + "query": null, + "request_payload": null, + "response_body": { + "data": null, + "error": { + "logs": [ + { + "message": "Project with uuid 767a7355-3062-4ceb-b339-ed67d07d2ed2 not found.", + "level": "ERROR", + "logger": "user" + } + ], + "errors": [ + { + "code": "HTTPNotFound", + "message": "Project with uuid 767a7355-3062-4ceb-b339-ed67d07d2ed2 not found.", + "resource": null, + "field": null + } + ], + "status": 404, + "message": "Project with uuid 767a7355-3062-4ceb-b339-ed67d07d2ed2 not found." + } + }, + "status_code": 404 + } ] diff --git a/services/api-server/tests/mocks/get_job_outputs.json b/services/api-server/tests/mocks/get_job_outputs.json index 579d79b7de95..ca61d64183ef 100644 --- a/services/api-server/tests/mocks/get_job_outputs.json +++ b/services/api-server/tests/mocks/get_job_outputs.json @@ -210,9 +210,10 @@ "tags": [], "classifiers": [], "state": { - "locked": { - "value": false, - "status": "CLOSED" + "shareState": { + "locked": false, + "status": "CLOSED", + "currentUserGroupids": [] }, "state": { "value": "NOT_STARTED" diff --git a/services/api-server/tests/mocks/get_job_pricing_unit_invalid_solver.json b/services/api-server/tests/mocks/get_job_pricing_unit_invalid_solver.json index 270af965166a..eb357b2344ee 100644 --- a/services/api-server/tests/mocks/get_job_pricing_unit_invalid_solver.json +++ b/services/api-server/tests/mocks/get_job_pricing_unit_invalid_solver.json @@ -81,9 +81,10 @@ "tags": [], "classifiers": [], "state": { - "locked": { - "value": false, - "status": "CLOSED" + "shareState": { + "locked": false, + "status": "CLOSED", + "currentUserGroupids": [] }, "state": { "value": "NOT_STARTED" diff --git a/services/api-server/tests/mocks/get_job_pricing_unit_success.json b/services/api-server/tests/mocks/get_job_pricing_unit_success.json index 8ab29ed9112a..883c07df327e 100644 --- a/services/api-server/tests/mocks/get_job_pricing_unit_success.json +++ b/services/api-server/tests/mocks/get_job_pricing_unit_success.json @@ -81,9 +81,10 @@ "tags": [], "classifiers": [], "state": { - "locked": { - "value": false, - "status": "CLOSED" + "shareState": { + "locked": false, + "status": "CLOSED", + "currentUserGroupids": [] }, "state": { "value": "NOT_STARTED" diff --git a/services/api-server/tests/mocks/get_solver_outputs.json b/services/api-server/tests/mocks/get_solver_outputs.json index 53e041cc45dd..4d8f96472d6d 100644 --- a/services/api-server/tests/mocks/get_solver_outputs.json +++ b/services/api-server/tests/mocks/get_solver_outputs.json @@ -1,258 +1,259 @@ [ - { - "name": "GET /projects/0f5f114f-c2bf-4807-914f-2f4df2604223", - "description": "", - "method": "GET", - "host": "webserver", - "path": { - "path": "/v0/projects/{project_id}", - "path_parameters": [ - { - "in": "path", - "name": "project_id", - "required": true, - "schema": { - "title": "Project Id", - "type": "str", - "pattern": null, - "format": "uuid", - "exclusiveMinimum": null, - "minimum": null, - "anyOf": null, - "allOf": null, - "oneOf": null - }, - "response_value": "projects" - } - ] - }, - "query": null, - "request_payload": null, - "response_body": { - "data": { - "uuid": "0f5f114f-c2bf-4807-914f-2f4df2604223", - "name": "solvers/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/releases/2.0.2/jobs/0f5f114f-c2bf-4807-914f-2f4df2604223", - "description": "Study associated to solver job:\n{\n \"id\": \"0f5f114f-c2bf-4807-914f-2f4df2604223\",\n \"name\": \"solvers/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/releases/2.0.2/jobs/0f5f114f-c2bf-4807-914f-2f4df2604223\",\n \"inputs_checksum\": \"88e3aa0cf82491572d5978fa359bad9d100ef247492020efb4dbcc9c5ee09b45\",\n \"created_at\": \"2024-01-18T12:33:56.883048+00:00\"\n}", - "thumbnail": "https://via.placeholder.com/170x120.png", - "creationDate": "2024-01-18T12:33:56.952Z", - "lastChangeDate": "2024-01-18T12:34:13.002Z", - "workspaceId": 2, - "type": "STANDARD", - "templateType": null, - "folderId": 2, - "trashedAt": null, - "trashedBy": null, - "workbench": { - "df42d273-b6f0-509c-bfb5-4abbc5bb0581": { - "key": "simcore/services/comp/itis/sleeper", - "version": "2.0.2", - "label": "sleeper", - "progress": 100.0, - "runHash": "10042fe8aa6ba3140532ba27dbbb1ba6c25d3e60a75c1d142f55a53dbecb5ead", - "inputs": { - "input_1": { - "store": 0, - "path": "api/45c97ed8-191d-300c-89be-c5f83148a391/input.txt", - "label": "input.txt", - "eTag": "eccbc87e4b5ce2fe28308fd9f2a7baf3" - }, - "input_2": 3, - "input_3": false, - "input_4": 4 - }, - "inputsUnits": {}, - "inputNodes": [], - "outputs": { - "output_1": { - "store": 0, - "path": "0f5f114f-c2bf-4807-914f-2f4df2604223/df42d273-b6f0-509c-bfb5-4abbc5bb0581/single_number.txt", - "eTag": "c81e728d9d4c2f636f067f89cc14862c" - }, - "output_2": 6 - }, - "state": { - "modified": false, - "dependencies": [], - "currentStatus": "SUCCESS", - "progress": 1.0 - } - } + { + "name": "GET /projects/0f5f114f-c2bf-4807-914f-2f4df2604223", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/projects/{project_id}", + "path_parameters": [ + { + "in": "path", + "name": "project_id", + "required": true, + "schema": { + "title": "Project Id", + "type": "str", + "pattern": null, + "format": "uuid", + "exclusiveMinimum": null, + "minimum": null, + "anyOf": null, + "allOf": null, + "oneOf": null + }, + "response_value": "projects" + } + ] }, - "prjOwner": "greenrichard@example.org", - "accessRights": { - "3": { - "read": true, - "write": true, - "delete": true - } + "query": null, + "request_payload": null, + "response_body": { + "data": { + "uuid": "0f5f114f-c2bf-4807-914f-2f4df2604223", + "name": "solvers/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/releases/2.0.2/jobs/0f5f114f-c2bf-4807-914f-2f4df2604223", + "description": "Study associated to solver job:\n{\n \"id\": \"0f5f114f-c2bf-4807-914f-2f4df2604223\",\n \"name\": \"solvers/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/releases/2.0.2/jobs/0f5f114f-c2bf-4807-914f-2f4df2604223\",\n \"inputs_checksum\": \"88e3aa0cf82491572d5978fa359bad9d100ef247492020efb4dbcc9c5ee09b45\",\n \"created_at\": \"2024-01-18T12:33:56.883048+00:00\"\n}", + "thumbnail": "https://via.placeholder.com/170x120.png", + "creationDate": "2024-01-18T12:33:56.952Z", + "lastChangeDate": "2024-01-18T12:34:13.002Z", + "workspaceId": 2, + "type": "STANDARD", + "templateType": null, + "folderId": 2, + "trashedAt": null, + "trashedBy": null, + "workbench": { + "df42d273-b6f0-509c-bfb5-4abbc5bb0581": { + "key": "simcore/services/comp/itis/sleeper", + "version": "2.0.2", + "label": "sleeper", + "progress": 100.0, + "runHash": "10042fe8aa6ba3140532ba27dbbb1ba6c25d3e60a75c1d142f55a53dbecb5ead", + "inputs": { + "input_1": { + "store": 0, + "path": "api/45c97ed8-191d-300c-89be-c5f83148a391/input.txt", + "label": "input.txt", + "eTag": "eccbc87e4b5ce2fe28308fd9f2a7baf3" + }, + "input_2": 3, + "input_3": false, + "input_4": 4 + }, + "inputsUnits": {}, + "inputNodes": [], + "outputs": { + "output_1": { + "store": 0, + "path": "0f5f114f-c2bf-4807-914f-2f4df2604223/df42d273-b6f0-509c-bfb5-4abbc5bb0581/single_number.txt", + "eTag": "c81e728d9d4c2f636f067f89cc14862c" + }, + "output_2": 6 + }, + "state": { + "modified": false, + "dependencies": [], + "currentStatus": "SUCCESS", + "progress": 1.0 + } + } + }, + "prjOwner": "greenrichard@example.org", + "accessRights": { + "3": { + "read": true, + "write": true, + "delete": true + } + }, + "tags": [], + "classifiers": [], + "state": { + "shareState": { + "locked": false, + "status": "CLOSED", + "currentUserGroupids": [] + }, + "state": { + "value": "SUCCESS" + } + }, + "ui": { + "workbench": { + "df42d273-b6f0-509c-bfb5-4abbc5bb0581": { + "position": { + "x": 633, + "y": 229 + } + } + }, + "slideshow": {}, + "currentNodeId": "df42d273-b6f0-509c-bfb5-4abbc5bb0581", + "annotations": {} + }, + "quality": {}, + "dev": {} + } }, - "tags": [], - "classifiers": [], - "state": { - "locked": { - "value": false, - "status": "CLOSED" - }, - "state": { - "value": "SUCCESS" - } + "status_code": 200 + }, + { + "name": "GET /credits-price", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/credits-price", + "path_parameters": [] }, - "ui": { - "workbench": { - "df42d273-b6f0-509c-bfb5-4abbc5bb0581": { - "position": { - "x": 633, - "y": 229 - } + "query": null, + "request_payload": null, + "response_body": { + "data": { + "productName": "osparc", + "usdPerCredit": 10.0, + "minPaymentAmountUsd": 5 } - }, - "slideshow": {}, - "currentNodeId": "df42d273-b6f0-509c-bfb5-4abbc5bb0581", - "annotations": {} }, - "quality": {}, - "dev": {} - } - }, - "status_code": 200 - }, - { - "name": "GET /credits-price", - "description": "", - "method": "GET", - "host": "webserver", - "path": { - "path": "/v0/credits-price", - "path_parameters": [] - }, - "query": null, - "request_payload": null, - "response_body": { - "data": { - "productName": "osparc", - "usdPerCredit": 10.0, - "minPaymentAmountUsd": 5 - } - }, - "status_code": 200 - }, - { - "name": "GET /projects/0f5f114f-c2bf-4807-914f-2f4df2604223/wallet", - "description": "", - "method": "GET", - "host": "webserver", - "path": { - "path": "/v0/projects/{project_id}/wallet", - "path_parameters": [ - { - "in": "path", - "name": "project_id", - "required": true, - "schema": { - "title": "Project Id", - "type": "str", - "pattern": null, - "format": "uuid", - "exclusiveMinimum": null, - "minimum": null, - "anyOf": null, - "allOf": null, - "oneOf": null - }, - "response_value": "projects" - } - ] - }, - "query": null, - "request_payload": null, - "response_body": { - "data": { - "walletId": 1, - "name": "Derek Nguyen", - "description": "Credits purchased by Bisgaard end up in here", - "owner": 3, - "thumbnail": null, - "status": "ACTIVE", - "created": "2024-01-18T09:32:58.042380+00:00", - "modified": "2024-01-18T09:32:58.042380+00:00" - } - }, - "status_code": 200 - }, - { - "name": "GET /wallets/1", - "description": "", - "method": "GET", - "host": "webserver", - "path": { - "path": "/v0/wallets/{wallet_id}", - "path_parameters": [ - { - "in": "path", - "name": "wallet_id", - "required": true, - "schema": { - "title": "Wallet Id", - "type": "int", - "pattern": null, - "format": null, - "exclusiveMinimum": true, - "minimum": 0, - "anyOf": null, - "allOf": null, - "oneOf": null - }, - "response_value": "wallets" - } - ] + "status_code": 200 }, - "query": null, - "request_payload": null, - "response_body": { - "data": { - "walletId": 1, - "name": "Eric Hunter", - "description": "Credits purchased by Bisgaard end up in here", - "owner": 3, - "thumbnail": null, - "status": "ACTIVE", - "created": "2024-01-18T09:32:58.042380+00:00", - "modified": "2024-01-18T09:32:58.042380+00:00", - "availableCredits": 0.0 - } - }, - "status_code": 200 - }, - { - "name": "POST /simcore-s3/files/metadata:search", - "description": "", - "method": "POST", - "host": "storage", - "path": { - "path": "/v0/simcore-s3/files/metadata:search", - "path_parameters": [] + { + "name": "GET /projects/0f5f114f-c2bf-4807-914f-2f4df2604223/wallet", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/projects/{project_id}/wallet", + "path_parameters": [ + { + "in": "path", + "name": "project_id", + "required": true, + "schema": { + "title": "Project Id", + "type": "str", + "pattern": null, + "format": "uuid", + "exclusiveMinimum": null, + "minimum": null, + "anyOf": null, + "allOf": null, + "oneOf": null + }, + "response_value": "projects" + } + ] + }, + "query": null, + "request_payload": null, + "response_body": { + "data": { + "walletId": 1, + "name": "Derek Nguyen", + "description": "Credits purchased by Bisgaard end up in here", + "owner": 3, + "thumbnail": null, + "status": "ACTIVE", + "created": "2024-01-18T09:32:58.042380+00:00", + "modified": "2024-01-18T09:32:58.042380+00:00" + } + }, + "status_code": 200 }, - "query": "kind=owned&user_id=1&startswith=api/4ea24645-fd8c-339b-9621-ae045d45d94d", - "request_payload": null, - "response_body": { - "data": [ - { - "file_uuid": "api/4ea24645-fd8c-339b-9621-ae045d45d94d/single_number.txt", - "location_id": 0, - "project_name": null, - "node_name": null, - "file_name": "single_number.txt", - "file_id": "api/4ea24645-fd8c-339b-9621-ae045d45d94d/single_number.txt", - "created_at": "2024-01-18T12:33:58.399872", - "last_modified": "2024-01-18T12:34:12+00:00", - "file_size": 1, - "entity_tag": "c81e728d9d4c2f636f067f89cc14862c", - "is_soft_link": true, - "is_directory": false, - "sha256_checksum": null - } - ] + { + "name": "GET /wallets/1", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/wallets/{wallet_id}", + "path_parameters": [ + { + "in": "path", + "name": "wallet_id", + "required": true, + "schema": { + "title": "Wallet Id", + "type": "int", + "pattern": null, + "format": null, + "exclusiveMinimum": true, + "minimum": 0, + "anyOf": null, + "allOf": null, + "oneOf": null + }, + "response_value": "wallets" + } + ] + }, + "query": null, + "request_payload": null, + "response_body": { + "data": { + "walletId": 1, + "name": "Eric Hunter", + "description": "Credits purchased by Bisgaard end up in here", + "owner": 3, + "thumbnail": null, + "status": "ACTIVE", + "created": "2024-01-18T09:32:58.042380+00:00", + "modified": "2024-01-18T09:32:58.042380+00:00", + "availableCredits": 0.0 + } + }, + "status_code": 200 }, - "status_code": 200 - } + { + "name": "POST /simcore-s3/files/metadata:search", + "description": "", + "method": "POST", + "host": "storage", + "path": { + "path": "/v0/simcore-s3/files/metadata:search", + "path_parameters": [] + }, + "query": "kind=owned&user_id=1&startswith=api/4ea24645-fd8c-339b-9621-ae045d45d94d", + "request_payload": null, + "response_body": { + "data": [ + { + "file_uuid": "api/4ea24645-fd8c-339b-9621-ae045d45d94d/single_number.txt", + "location_id": 0, + "project_name": null, + "node_name": null, + "file_name": "single_number.txt", + "file_id": "api/4ea24645-fd8c-339b-9621-ae045d45d94d/single_number.txt", + "created_at": "2024-01-18T12:33:58.399872", + "last_modified": "2024-01-18T12:34:12+00:00", + "file_size": 1, + "entity_tag": "c81e728d9d4c2f636f067f89cc14862c", + "is_soft_link": true, + "is_directory": false, + "sha256_checksum": null + } + ] + }, + "status_code": 200 + } ] diff --git a/services/api-server/tests/mocks/on_create_job.json b/services/api-server/tests/mocks/on_create_job.json index 9820285afad8..9c16884a6e6e 100644 --- a/services/api-server/tests/mocks/on_create_job.json +++ b/services/api-server/tests/mocks/on_create_job.json @@ -1,300 +1,301 @@ [ - { - "name": "GET /services/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/2.0.0", - "description": "", - "method": "GET", - "host": "catalog", - "path": "/v0/services/simcore/services/comp/itis/sleeper/2.0.0", - "query": "user_id=1", - "request_payload": null, - "response_body": { - "name": "sleeper", - "thumbnail": null, - "description": "A service which awaits for time to pass.", - "deprecated": null, - "classifiers": [], - "quality": {}, - "key": "simcore/services/comp/itis/sleeper", - "version": "2.0.0", - "integration-version": "1.0.0", - "type": "computational", - "authors": [ - { - "name": "Kimberly Wilson", - "email": "christopher72@example.org", - "affiliation": "Ford, Collins and Villarreal" - }, - { - "name": "Daniel Jones", - "email": "fschmitt@example.net", - "affiliation": "Ayala-Anderson" - } - ], - "contact": "jlozano@example.net", - "inputs": { - "input_1": { - "displayOrder": 1.0, - "label": "File with int number", - "description": "Pick a file containing only one integer", - "type": "data:text/plain", - "fileToKeyMap": { - "single_number.txt": "input_1" - } - }, - "input_2": { - "displayOrder": 2.0, - "label": "Sleep interval", - "description": "Choose an amount of time to sleep", - "type": "integer", - "defaultValue": 2 - }, - "input_3": { - "displayOrder": 3.0, - "label": "Fail after sleep", - "description": "If set to true will cause service to fail after it sleeps", - "type": "boolean", - "defaultValue": false - } - }, - "outputs": { - "output_1": { - "displayOrder": 1.0, - "label": "File containing one random integer", - "description": "Integer is generated in range [1-9]", - "type": "data:text/plain", - "fileToKeyMap": { - "single_number.txt": "output_1" - } + { + "name": "GET /services/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/2.0.0", + "description": "", + "method": "GET", + "host": "catalog", + "path": "/v0/services/simcore/services/comp/itis/sleeper/2.0.0", + "query": "user_id=1", + "request_payload": null, + "response_body": { + "name": "sleeper", + "thumbnail": null, + "description": "A service which awaits for time to pass.", + "deprecated": null, + "classifiers": [], + "quality": {}, + "key": "simcore/services/comp/itis/sleeper", + "version": "2.0.0", + "integration-version": "1.0.0", + "type": "computational", + "authors": [ + { + "name": "Kimberly Wilson", + "email": "christopher72@example.org", + "affiliation": "Ford, Collins and Villarreal" + }, + { + "name": "Daniel Jones", + "email": "fschmitt@example.net", + "affiliation": "Ayala-Anderson" + } + ], + "contact": "jlozano@example.net", + "inputs": { + "input_1": { + "displayOrder": 1.0, + "label": "File with int number", + "description": "Pick a file containing only one integer", + "type": "data:text/plain", + "fileToKeyMap": { + "single_number.txt": "input_1" + } + }, + "input_2": { + "displayOrder": 2.0, + "label": "Sleep interval", + "description": "Choose an amount of time to sleep", + "type": "integer", + "defaultValue": 2 + }, + "input_3": { + "displayOrder": 3.0, + "label": "Fail after sleep", + "description": "If set to true will cause service to fail after it sleeps", + "type": "boolean", + "defaultValue": false + } + }, + "outputs": { + "output_1": { + "displayOrder": 1.0, + "label": "File containing one random integer", + "description": "Integer is generated in range [1-9]", + "type": "data:text/plain", + "fileToKeyMap": { + "single_number.txt": "output_1" + } + }, + "output_2": { + "displayOrder": 2.0, + "label": "Random sleep interval", + "description": "Interval is generated in range [1-9]", + "type": "integer" + } + } }, - "output_2": { - "displayOrder": 2.0, - "label": "Random sleep interval", - "description": "Interval is generated in range [1-9]", - "type": "integer" - } - } + "status_code": 200 }, - "status_code": 200 - }, - { - "name": "POST /projects", - "description": "", - "method": "POST", - "host": "webserver", - "path": "/v0/projects", - "query": "hidden=true", - "request_payload": { - "uuid": "06325dd9-64af-4243-8011-efdf7fb588a4", - "name": "solvers/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/releases/2.0.0/jobs/06325dd9-64af-4243-8011-efdf7fb588a4", - "description": "Study associated to solver job:\n{\n \"id\": \"06325dd9-64af-4243-8011-efdf7fb588a4\",\n \"name\": \"solvers/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/releases/2.0.0/jobs/06325dd9-64af-4243-8011-efdf7fb588a4\",\n \"inputs_checksum\": \"0def0cfbe784a61b4779a5a8cf35a376c6335558f5208958fb13cc24e6851bc6\",\n \"created_at\": \"2023-06-08T16:15:03.573115\"\n}", - "thumbnail": "https://via.placeholder.com/170x120.png", - "creationDate": "2023-06-08T16:15:03.587Z", - "lastChangeDate": "2023-06-08T16:15:03.587Z", - "workbench": { - "b5b971ac-86a2-5f31-93ab-d2ac572a201a": { - "key": "simcore/services/comp/itis/sleeper", - "version": "2.0.0", - "label": "sleeper", - "progress": null, - "thumbnail": null, - "runHash": null, - "inputs": { - "x": 4.33, - "n": 55 - }, - "inputsUnits": {}, - "inputAccess": null, - "inputNodes": [], - "outputs": {}, - "outputNode": null, - "outputNodes": null, - "parent": null, - "position": null, - "state": { - "modified": true, - "dependencies": [], - "currentStatus": "NOT_STARTED", - "progress": 0 - }, - "bootOptions": null - } - }, - "prjOwner": "robertsmith@example.org", - "accessRights": {}, - "tags": [], - "classifiers": [], - "ui": { - "workbench": { - "b5b971ac-86a2-5f31-93ab-d2ac572a201a": { - "position": { - "x": 633, - "y": 229 + { + "name": "POST /projects", + "description": "", + "method": "POST", + "host": "webserver", + "path": "/v0/projects", + "query": "hidden=true", + "request_payload": { + "uuid": "06325dd9-64af-4243-8011-efdf7fb588a4", + "name": "solvers/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/releases/2.0.0/jobs/06325dd9-64af-4243-8011-efdf7fb588a4", + "description": "Study associated to solver job:\n{\n \"id\": \"06325dd9-64af-4243-8011-efdf7fb588a4\",\n \"name\": \"solvers/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/releases/2.0.0/jobs/06325dd9-64af-4243-8011-efdf7fb588a4\",\n \"inputs_checksum\": \"0def0cfbe784a61b4779a5a8cf35a376c6335558f5208958fb13cc24e6851bc6\",\n \"created_at\": \"2023-06-08T16:15:03.573115\"\n}", + "thumbnail": "https://via.placeholder.com/170x120.png", + "creationDate": "2023-06-08T16:15:03.587Z", + "lastChangeDate": "2023-06-08T16:15:03.587Z", + "workbench": { + "b5b971ac-86a2-5f31-93ab-d2ac572a201a": { + "key": "simcore/services/comp/itis/sleeper", + "version": "2.0.0", + "label": "sleeper", + "progress": null, + "thumbnail": null, + "runHash": null, + "inputs": { + "x": 4.33, + "n": 55 + }, + "inputsUnits": {}, + "inputAccess": null, + "inputNodes": [], + "outputs": {}, + "outputNode": null, + "outputNodes": null, + "parent": null, + "position": null, + "state": { + "modified": true, + "dependencies": [], + "currentStatus": "NOT_STARTED", + "progress": 0 + }, + "bootOptions": null + } + }, + "prjOwner": "robertsmith@example.org", + "accessRights": {}, + "tags": [], + "classifiers": [], + "ui": { + "workbench": { + "b5b971ac-86a2-5f31-93ab-d2ac572a201a": { + "position": { + "x": 633, + "y": 229 + }, + "marker": null + } + }, + "slideshow": {}, + "currentNodeId": "b5b971ac-86a2-5f31-93ab-d2ac572a201a", + "annotations": {} }, - "marker": null - } + "quality": {}, + "dev": {} }, - "slideshow": {}, - "currentNodeId": "b5b971ac-86a2-5f31-93ab-d2ac572a201a", - "annotations": {} - }, - "quality": {}, - "dev": {} - }, - "response_body": { - "data": { - "task_id": "POST%20%2Fv0%2Fprojects%3Fhidden%3Dtrue.42a04ed5-c581-4a8d-b037-48deda49ef59", - "task_name": "POST /v0/projects?hidden=true", - "status_href": "/v0/tasks/POST%2520%252Fv0%252Fprojects%253Fhidden%253Dtrue.42a04ed5-c581-4a8d-b037-48deda49ef59", - "result_href": "/v0/tasks/POST%2520%252Fv0%252Fprojects%253Fhidden%253Dtrue.42a04ed5-c581-4a8d-b037-48deda49ef59/result", - "abort_href": "/v0/tasks/POST%2520%252Fv0%252Fprojects%253Fhidden%253Dtrue.42a04ed5-c581-4a8d-b037-48deda49ef59" - } - }, - "status_code": 202 - }, - { - "name": "GET tasks/POST%2520%252Fv0%252Fprojects%253Fhidden%253Dtrue.42a04ed5-c581-4a8d-b037-48deda49ef59", - "description": "", - "method": "GET", - "host": "webserver", - "path": "/v0/tasks/POST%20%2Fv0%2Fprojects%3Fhidden%3Dtrue.42a04ed5-c581-4a8d-b037-48deda49ef59", - "query": null, - "request_payload": null, - "response_body": { - "data": { - "task_progress": { - "message": "creating new study...", - "percent": 0.0 + "response_body": { + "data": { + "task_id": "POST%20%2Fv0%2Fprojects%3Fhidden%3Dtrue.42a04ed5-c581-4a8d-b037-48deda49ef59", + "task_name": "POST /v0/projects?hidden=true", + "status_href": "/v0/tasks/POST%2520%252Fv0%252Fprojects%253Fhidden%253Dtrue.42a04ed5-c581-4a8d-b037-48deda49ef59", + "result_href": "/v0/tasks/POST%2520%252Fv0%252Fprojects%253Fhidden%253Dtrue.42a04ed5-c581-4a8d-b037-48deda49ef59/result", + "abort_href": "/v0/tasks/POST%2520%252Fv0%252Fprojects%253Fhidden%253Dtrue.42a04ed5-c581-4a8d-b037-48deda49ef59" + } }, - "done": false, - "started": "2023-06-08T16:15:03.621835" - } + "status_code": 202 }, - "status_code": 200 - }, - { - "name": "GET tasks/POST%2520%252Fv0%252Fprojects%253Fhidden%253Dtrue.42a04ed5-c581-4a8d-b037-48deda49ef59", - "description": "", - "method": "GET", - "host": "webserver", - "path": "/v0/tasks/POST%20%2Fv0%2Fprojects%3Fhidden%3Dtrue.42a04ed5-c581-4a8d-b037-48deda49ef59", - "query": null, - "request_payload": null, - "response_body": { - "data": { - "task_progress": { - "message": "creating new study...", - "percent": 0.0 + { + "name": "GET tasks/POST%2520%252Fv0%252Fprojects%253Fhidden%253Dtrue.42a04ed5-c581-4a8d-b037-48deda49ef59", + "description": "", + "method": "GET", + "host": "webserver", + "path": "/v0/tasks/POST%20%2Fv0%2Fprojects%3Fhidden%3Dtrue.42a04ed5-c581-4a8d-b037-48deda49ef59", + "query": null, + "request_payload": null, + "response_body": { + "data": { + "task_progress": { + "message": "creating new study...", + "percent": 0.0 + }, + "done": false, + "started": "2023-06-08T16:15:03.621835" + } }, - "done": false, - "started": "2023-06-08T16:15:03.621835" - } + "status_code": 200 }, - "status_code": 200 - }, - { - "name": "GET tasks/POST%2520%252Fv0%252Fprojects%253Fhidden%253Dtrue.42a04ed5-c581-4a8d-b037-48deda49ef59", - "description": "", - "method": "GET", - "host": "webserver", - "path": "/v0/tasks/POST%20%2Fv0%2Fprojects%3Fhidden%3Dtrue.42a04ed5-c581-4a8d-b037-48deda49ef59", - "query": null, - "request_payload": null, - "response_body": { - "data": { - "task_progress": { - "message": "finished", - "percent": 1.0 + { + "name": "GET tasks/POST%2520%252Fv0%252Fprojects%253Fhidden%253Dtrue.42a04ed5-c581-4a8d-b037-48deda49ef59", + "description": "", + "method": "GET", + "host": "webserver", + "path": "/v0/tasks/POST%20%2Fv0%2Fprojects%3Fhidden%3Dtrue.42a04ed5-c581-4a8d-b037-48deda49ef59", + "query": null, + "request_payload": null, + "response_body": { + "data": { + "task_progress": { + "message": "creating new study...", + "percent": 0.0 + }, + "done": false, + "started": "2023-06-08T16:15:03.621835" + } }, - "done": true, - "started": "2023-06-08T16:15:03.621835" - } + "status_code": 200 }, - "status_code": 200 - }, - { - "name": "GET tasks/POST%2520%252Fv0%252Fprojects%253Fhidden%253Dtrue.42a04ed5-c581-4a8d-b037-48deda49ef59/result", - "description": "", - "method": "GET", - "host": "webserver", - "path": "/v0/tasks/POST%20%2Fv0%2Fprojects%3Fhidden%3Dtrue.42a04ed5-c581-4a8d-b037-48deda49ef59/result", - "query": null, - "request_payload": null, - "response_body": { - "data": { - "uuid": "06325dd9-64af-4243-8011-efdf7fb588a4", - "name": "solvers/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/releases/2.0.0/jobs/06325dd9-64af-4243-8011-efdf7fb588a4", - "description": "Study associated to solver job:\n{\n \"id\": \"06325dd9-64af-4243-8011-efdf7fb588a4\",\n \"name\": \"solvers/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/releases/2.0.0/jobs/06325dd9-64af-4243-8011-efdf7fb588a4\",\n \"inputs_checksum\": \"0def0cfbe784a61b4779a5a8cf35a376c6335558f5208958fb13cc24e6851bc6\",\n \"created_at\": \"2023-06-08T16:15:03.573115\"\n}", - "thumbnail": "https://via.placeholder.com/170x120.png", - "creationDate": "2023-06-08T16:15:03.627Z", - "lastChangeDate": "2023-06-08T16:15:03.627Z", - "workbench": { - "b5b971ac-86a2-5f31-93ab-d2ac572a201a": { - "key": "simcore/services/comp/itis/sleeper", - "version": "2.0.0", - "label": "sleeper", - "progress": 0.0, - "inputs": { - "x": 4.33, - "n": 55 - }, - "inputsUnits": {}, - "inputNodes": [], - "outputs": {}, - "state": { - "modified": true, - "dependencies": [], - "currentStatus": "NOT_STARTED", - "progress": 0.0 + { + "name": "GET tasks/POST%2520%252Fv0%252Fprojects%253Fhidden%253Dtrue.42a04ed5-c581-4a8d-b037-48deda49ef59", + "description": "", + "method": "GET", + "host": "webserver", + "path": "/v0/tasks/POST%20%2Fv0%2Fprojects%3Fhidden%3Dtrue.42a04ed5-c581-4a8d-b037-48deda49ef59", + "query": null, + "request_payload": null, + "response_body": { + "data": { + "task_progress": { + "message": "finished", + "percent": 1.0 + }, + "done": true, + "started": "2023-06-08T16:15:03.621835" } - } - }, - "prjOwner": "rhondakelly@example.net", - "accessRights": { - "3": { - "read": true, - "write": true, - "delete": true - } }, - "tags": [], - "classifiers": [], - "state": { - "locked": { - "value": false, - "status": "CLOSED" - }, - "state": { - "value": "NOT_STARTED" - } - }, - "ui": { - "workbench": { - "b5b971ac-86a2-5f31-93ab-d2ac572a201a": { - "position": { - "x": 633, - "y": 229 - } + "status_code": 200 + }, + { + "name": "GET tasks/POST%2520%252Fv0%252Fprojects%253Fhidden%253Dtrue.42a04ed5-c581-4a8d-b037-48deda49ef59/result", + "description": "", + "method": "GET", + "host": "webserver", + "path": "/v0/tasks/POST%20%2Fv0%2Fprojects%3Fhidden%3Dtrue.42a04ed5-c581-4a8d-b037-48deda49ef59/result", + "query": null, + "request_payload": null, + "response_body": { + "data": { + "uuid": "06325dd9-64af-4243-8011-efdf7fb588a4", + "name": "solvers/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/releases/2.0.0/jobs/06325dd9-64af-4243-8011-efdf7fb588a4", + "description": "Study associated to solver job:\n{\n \"id\": \"06325dd9-64af-4243-8011-efdf7fb588a4\",\n \"name\": \"solvers/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/releases/2.0.0/jobs/06325dd9-64af-4243-8011-efdf7fb588a4\",\n \"inputs_checksum\": \"0def0cfbe784a61b4779a5a8cf35a376c6335558f5208958fb13cc24e6851bc6\",\n \"created_at\": \"2023-06-08T16:15:03.573115\"\n}", + "thumbnail": "https://via.placeholder.com/170x120.png", + "creationDate": "2023-06-08T16:15:03.627Z", + "lastChangeDate": "2023-06-08T16:15:03.627Z", + "workbench": { + "b5b971ac-86a2-5f31-93ab-d2ac572a201a": { + "key": "simcore/services/comp/itis/sleeper", + "version": "2.0.0", + "label": "sleeper", + "progress": 0.0, + "inputs": { + "x": 4.33, + "n": 55 + }, + "inputsUnits": {}, + "inputNodes": [], + "outputs": {}, + "state": { + "modified": true, + "dependencies": [], + "currentStatus": "NOT_STARTED", + "progress": 0.0 + } + } + }, + "prjOwner": "rhondakelly@example.net", + "accessRights": { + "3": { + "read": true, + "write": true, + "delete": true + } + }, + "tags": [], + "classifiers": [], + "state": { + "shareState": { + "locked": false, + "status": "CLOSED", + "currentUserGroupids": [] + }, + "state": { + "value": "NOT_STARTED" + } + }, + "ui": { + "workbench": { + "b5b971ac-86a2-5f31-93ab-d2ac572a201a": { + "position": { + "x": 633, + "y": 229 + } + } + }, + "slideshow": {}, + "currentNodeId": "b5b971ac-86a2-5f31-93ab-d2ac572a201a", + "annotations": {} + }, + "quality": {}, + "dev": {} } - }, - "slideshow": {}, - "currentNodeId": "b5b971ac-86a2-5f31-93ab-d2ac572a201a", - "annotations": {} }, - "quality": {}, - "dev": {} - } + "status_code": 201 }, - "status_code": 201 - }, - { - "name": "DELETE /projects/06325dd9-64af-4243-8011-efdf7fb588a4", - "description": "", - "method": "DELETE", - "host": "webserver", - "path": "/v0/projects/06325dd9-64af-4243-8011-efdf7fb588a4", - "query": null, - "request_payload": null, - "response_body": null, - "status_code": 204 - } + { + "name": "DELETE /projects/06325dd9-64af-4243-8011-efdf7fb588a4", + "description": "", + "method": "DELETE", + "host": "webserver", + "path": "/v0/projects/06325dd9-64af-4243-8011-efdf7fb588a4", + "query": null, + "request_payload": null, + "response_body": null, + "status_code": 204 + } ] diff --git a/services/api-server/tests/mocks/on_list_jobs.json b/services/api-server/tests/mocks/on_list_jobs.json index 6f920f42ad5b..42e2a4f7055f 100644 --- a/services/api-server/tests/mocks/on_list_jobs.json +++ b/services/api-server/tests/mocks/on_list_jobs.json @@ -146,9 +146,10 @@ "tags": [], "classifiers": [], "state": { - "locked": { - "value": false, - "status": "CLOSED" + "shareState": { + "locked": false, + "status": "CLOSED", + "currentUserGroupids": [] }, "state": { "value": "NOT_STARTED" @@ -222,9 +223,10 @@ "tags": [], "classifiers": [], "state": { - "locked": { - "value": false, - "status": "CLOSED" + "shareState": { + "locked": false, + "status": "CLOSED", + "currentUserGroupids": [] }, "state": { "value": "NOT_STARTED" diff --git a/services/api-server/tests/mocks/run_study_function_parent_info.json b/services/api-server/tests/mocks/run_study_function_parent_info.json new file mode 100644 index 000000000000..dfd8035ef8f2 --- /dev/null +++ b/services/api-server/tests/mocks/run_study_function_parent_info.json @@ -0,0 +1,2372 @@ +[ + { + "name": "POST /projects", + "description": "", + "method": "POST", + "host": "webserver", + "path": { + "path": "/v0/projects", + "path_parameters": [] + }, + "query": "from_study=e3e70682-c209-4cac-a29f-6fbed82c07cd&hidden=true", + "response_body": { + "data": { + "task_id": "POST%20%2Fv0%2Fprojects%3Ffrom_study%3De3e70682-c209-4cac-a29f-6fbed82c07cd%26hidden%3Dtrue.10b07f64-0b19-4f2a-924b-67d9bcadcef8", + "task_name": "POST /v0/projects?from_study=e3e70682-c209-4cac-a29f-6fbed82c07cd&hidden=true", + "status_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.10b07f64-0b19-4f2a-924b-67d9bcadcef8", + "result_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.10b07f64-0b19-4f2a-924b-67d9bcadcef8/result", + "abort_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.10b07f64-0b19-4f2a-924b-67d9bcadcef8" + } + }, + "status_code": 202 + }, + { + "name": "GET http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.10b07f64-0b19-4f2a-924b-67d9bcadcef8", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/tasks-legacy/{task_id}", + "path_parameters": [ + { + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "title": "Task Id", + "type": "str" + }, + "response_value": "tasks-legacy" + } + ] + }, + "response_body": { + "data": { + "task_progress": { + "task_id": "POST%20%2Fv0%2Fprojects%3Ffrom_study%3De3e70682-c209-4cac-a29f-6fbed82c07cd%26hidden%3Dtrue.10b07f64-0b19-4f2a-924b-67d9bcadcef8", + "message": "finished", + "percent": 1.0 + }, + "done": true, + "started": "2025-06-13T13:05:50.043464+00:00" + }, + "error": null + } + }, + { + "name": "GET http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.10b07f64-0b19-4f2a-924b-67d9bcadcef8/result", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/tasks-legacy/{task_id}/result", + "path_parameters": [ + { + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "title": "Task Id", + "type": "str" + }, + "response_value": "tasks-legacy" + } + ] + }, + "response_body": { + "data": null, + "error": { + "message": "Project e3e70682-c209-4cac-a29f-6fbed82c07cd not found", + "support_id": null, + "status": 404, + "errors": [ + { + "code": "HTTPNotFound", + "message": "Not Found", + "resource": null, + "field": null + } + ], + "logs": [ + { + "message": "Project e3e70682-c209-4cac-a29f-6fbed82c07cd not found", + "level": "ERROR", + "logger": "user" + } + ] + } + }, + "status_code": 404 + }, + { + "name": "POST /projects", + "description": "", + "method": "POST", + "host": "webserver", + "path": { + "path": "/v0/projects", + "path_parameters": [] + }, + "query": "from_study=e3e70682-c209-4cac-a29f-6fbed82c07cd&hidden=true", + "response_body": { + "data": { + "task_id": "POST%20%2Fv0%2Fprojects%3Ffrom_study%3De3e70682-c209-4cac-a29f-6fbed82c07cd%26hidden%3Dtrue.b0f371b4-5247-4efc-94fb-eb3338765f73", + "task_name": "POST /v0/projects?from_study=e3e70682-c209-4cac-a29f-6fbed82c07cd&hidden=true", + "status_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.b0f371b4-5247-4efc-94fb-eb3338765f73", + "result_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.b0f371b4-5247-4efc-94fb-eb3338765f73/result", + "abort_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.b0f371b4-5247-4efc-94fb-eb3338765f73" + } + }, + "status_code": 202 + }, + { + "name": "GET http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.b0f371b4-5247-4efc-94fb-eb3338765f73", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/tasks-legacy/{task_id}", + "path_parameters": [ + { + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "title": "Task Id", + "type": "str" + }, + "response_value": "tasks-legacy" + } + ] + }, + "response_body": { + "data": { + "task_progress": { + "task_id": "POST%20%2Fv0%2Fprojects%3Ffrom_study%3De3e70682-c209-4cac-a29f-6fbed82c07cd%26hidden%3Dtrue.b0f371b4-5247-4efc-94fb-eb3338765f73", + "message": "finished", + "percent": 1.0 + }, + "done": true, + "started": "2025-06-13T13:08:08.825919+00:00" + }, + "error": null + } + }, + { + "name": "GET http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.b0f371b4-5247-4efc-94fb-eb3338765f73/result", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/tasks-legacy/{task_id}/result", + "path_parameters": [ + { + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "title": "Task Id", + "type": "str" + }, + "response_value": "tasks-legacy" + } + ] + }, + "response_body": { + "data": null, + "error": { + "message": "Project e3e70682-c209-4cac-a29f-6fbed82c07cd not found", + "support_id": null, + "status": 404, + "errors": [ + { + "code": "HTTPNotFound", + "message": "Not Found", + "resource": null, + "field": null + } + ], + "logs": [ + { + "message": "Project e3e70682-c209-4cac-a29f-6fbed82c07cd not found", + "level": "ERROR", + "logger": "user" + } + ] + } + }, + "status_code": 404 + }, + { + "name": "POST /projects", + "description": "", + "method": "POST", + "host": "webserver", + "path": { + "path": "/v0/projects", + "path_parameters": [] + }, + "query": "from_study=e3e70682-c209-4cac-a29f-6fbed82c07cd&hidden=true", + "response_body": { + "data": { + "task_id": "POST%20%2Fv0%2Fprojects%3Ffrom_study%3De3e70682-c209-4cac-a29f-6fbed82c07cd%26hidden%3Dtrue.c7256296-53c7-408d-ad2b-53f2e2e17b77", + "task_name": "POST /v0/projects?from_study=e3e70682-c209-4cac-a29f-6fbed82c07cd&hidden=true", + "status_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.c7256296-53c7-408d-ad2b-53f2e2e17b77", + "result_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.c7256296-53c7-408d-ad2b-53f2e2e17b77/result", + "abort_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.c7256296-53c7-408d-ad2b-53f2e2e17b77" + } + }, + "status_code": 202 + }, + { + "name": "GET http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.c7256296-53c7-408d-ad2b-53f2e2e17b77", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/tasks-legacy/{task_id}", + "path_parameters": [ + { + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "title": "Task Id", + "type": "str" + }, + "response_value": "tasks-legacy" + } + ] + }, + "response_body": { + "data": { + "task_progress": { + "task_id": "POST%20%2Fv0%2Fprojects%3Ffrom_study%3De3e70682-c209-4cac-a29f-6fbed82c07cd%26hidden%3Dtrue.c7256296-53c7-408d-ad2b-53f2e2e17b77", + "message": "finished", + "percent": 1.0 + }, + "done": true, + "started": "2025-06-13T13:09:08.065507+00:00" + }, + "error": null + } + }, + { + "name": "GET http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.c7256296-53c7-408d-ad2b-53f2e2e17b77/result", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/tasks-legacy/{task_id}/result", + "path_parameters": [ + { + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "title": "Task Id", + "type": "str" + }, + "response_value": "tasks-legacy" + } + ] + }, + "response_body": { + "data": null, + "error": { + "message": "Project e3e70682-c209-4cac-a29f-6fbed82c07cd not found", + "support_id": null, + "status": 404, + "errors": [ + { + "code": "HTTPNotFound", + "message": "Not Found", + "resource": null, + "field": null + } + ], + "logs": [ + { + "message": "Project e3e70682-c209-4cac-a29f-6fbed82c07cd not found", + "level": "ERROR", + "logger": "user" + } + ] + } + }, + "status_code": 404 + }, + { + "name": "POST /projects", + "description": "", + "method": "POST", + "host": "webserver", + "path": { + "path": "/v0/projects", + "path_parameters": [] + }, + "query": "from_study=e3e70682-c209-4cac-a29f-6fbed82c07cd&hidden=true", + "response_body": { + "data": { + "task_id": "POST%20%2Fv0%2Fprojects%3Ffrom_study%3De3e70682-c209-4cac-a29f-6fbed82c07cd%26hidden%3Dtrue.55547ef2-50e2-4320-ae1b-ba4fb0c68feb", + "task_name": "POST /v0/projects?from_study=e3e70682-c209-4cac-a29f-6fbed82c07cd&hidden=true", + "status_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.55547ef2-50e2-4320-ae1b-ba4fb0c68feb", + "result_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.55547ef2-50e2-4320-ae1b-ba4fb0c68feb/result", + "abort_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.55547ef2-50e2-4320-ae1b-ba4fb0c68feb" + } + }, + "status_code": 202 + }, + { + "name": "GET http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.55547ef2-50e2-4320-ae1b-ba4fb0c68feb", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/tasks-legacy/{task_id}", + "path_parameters": [ + { + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "title": "Task Id", + "type": "str" + }, + "response_value": "tasks-legacy" + } + ] + }, + "response_body": { + "data": { + "task_progress": { + "task_id": "POST%20%2Fv0%2Fprojects%3Ffrom_study%3De3e70682-c209-4cac-a29f-6fbed82c07cd%26hidden%3Dtrue.55547ef2-50e2-4320-ae1b-ba4fb0c68feb", + "message": "finished", + "percent": 1.0 + }, + "done": true, + "started": "2025-06-13T13:11:47.314542+00:00" + }, + "error": null + } + }, + { + "name": "GET http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.55547ef2-50e2-4320-ae1b-ba4fb0c68feb/result", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/tasks-legacy/{task_id}/result", + "path_parameters": [ + { + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "title": "Task Id", + "type": "str" + }, + "response_value": "tasks-legacy" + } + ] + }, + "response_body": { + "data": null, + "error": { + "message": "Project e3e70682-c209-4cac-a29f-6fbed82c07cd not found", + "support_id": null, + "status": 404, + "errors": [ + { + "code": "HTTPNotFound", + "message": "Not Found", + "resource": null, + "field": null + } + ], + "logs": [ + { + "message": "Project e3e70682-c209-4cac-a29f-6fbed82c07cd not found", + "level": "ERROR", + "logger": "user" + } + ] + } + }, + "status_code": 404 + }, + { + "name": "POST /projects", + "description": "", + "method": "POST", + "host": "webserver", + "path": { + "path": "/v0/projects", + "path_parameters": [] + }, + "query": "from_study=e3e70682-c209-4cac-a29f-6fbed82c07cd&hidden=true", + "response_body": { + "data": { + "task_id": "POST%20%2Fv0%2Fprojects%3Ffrom_study%3De3e70682-c209-4cac-a29f-6fbed82c07cd%26hidden%3Dtrue.b20cfc08-44cb-4a7c-92a5-112e2560ba61", + "task_name": "POST /v0/projects?from_study=e3e70682-c209-4cac-a29f-6fbed82c07cd&hidden=true", + "status_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.b20cfc08-44cb-4a7c-92a5-112e2560ba61", + "result_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.b20cfc08-44cb-4a7c-92a5-112e2560ba61/result", + "abort_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.b20cfc08-44cb-4a7c-92a5-112e2560ba61" + } + }, + "status_code": 202 + }, + { + "name": "GET http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.b20cfc08-44cb-4a7c-92a5-112e2560ba61", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/tasks-legacy/{task_id}", + "path_parameters": [ + { + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "title": "Task Id", + "type": "str" + }, + "response_value": "tasks-legacy" + } + ] + }, + "response_body": { + "data": { + "task_progress": { + "task_id": "POST%20%2Fv0%2Fprojects%3Ffrom_study%3De3e70682-c209-4cac-a29f-6fbed82c07cd%26hidden%3Dtrue.b20cfc08-44cb-4a7c-92a5-112e2560ba61", + "message": "finished", + "percent": 1.0 + }, + "done": true, + "started": "2025-06-13T13:15:55.792723+00:00" + }, + "error": null + } + }, + { + "name": "GET http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.b20cfc08-44cb-4a7c-92a5-112e2560ba61/result", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/tasks-legacy/{task_id}/result", + "path_parameters": [ + { + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "title": "Task Id", + "type": "str" + }, + "response_value": "tasks-legacy" + } + ] + }, + "response_body": { + "data": null, + "error": { + "message": "Project e3e70682-c209-4cac-a29f-6fbed82c07cd not found", + "support_id": null, + "status": 404, + "errors": [ + { + "code": "HTTPNotFound", + "message": "Not Found", + "resource": null, + "field": null + } + ], + "logs": [ + { + "message": "Project e3e70682-c209-4cac-a29f-6fbed82c07cd not found", + "level": "ERROR", + "logger": "user" + } + ] + } + }, + "status_code": 404 + }, + { + "name": "POST /projects", + "description": "", + "method": "POST", + "host": "webserver", + "path": { + "path": "/v0/projects", + "path_parameters": [] + }, + "query": "from_study=e3e70682-c209-4cac-a29f-6fbed82c07cd&hidden=true", + "response_body": { + "data": { + "task_id": "POST%20%2Fv0%2Fprojects%3Ffrom_study%3De3e70682-c209-4cac-a29f-6fbed82c07cd%26hidden%3Dtrue.68d96dd3-5ee2-42c8-add2-b0e32196a773", + "task_name": "POST /v0/projects?from_study=e3e70682-c209-4cac-a29f-6fbed82c07cd&hidden=true", + "status_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.68d96dd3-5ee2-42c8-add2-b0e32196a773", + "result_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.68d96dd3-5ee2-42c8-add2-b0e32196a773/result", + "abort_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.68d96dd3-5ee2-42c8-add2-b0e32196a773" + } + }, + "status_code": 202 + }, + { + "name": "GET http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.68d96dd3-5ee2-42c8-add2-b0e32196a773", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/tasks-legacy/{task_id}", + "path_parameters": [ + { + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "title": "Task Id", + "type": "str" + }, + "response_value": "tasks-legacy" + } + ] + }, + "response_body": { + "data": { + "task_progress": { + "task_id": "POST%20%2Fv0%2Fprojects%3Ffrom_study%3De3e70682-c209-4cac-a29f-6fbed82c07cd%26hidden%3Dtrue.68d96dd3-5ee2-42c8-add2-b0e32196a773", + "message": "finished", + "percent": 1.0 + }, + "done": true, + "started": "2025-06-13T13:20:31.694095+00:00" + }, + "error": null + } + }, + { + "name": "GET http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.68d96dd3-5ee2-42c8-add2-b0e32196a773/result", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/tasks-legacy/{task_id}/result", + "path_parameters": [ + { + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "title": "Task Id", + "type": "str" + }, + "response_value": "tasks-legacy" + } + ] + }, + "response_body": { + "data": null, + "error": { + "message": "Project e3e70682-c209-4cac-a29f-6fbed82c07cd not found", + "support_id": null, + "status": 404, + "errors": [ + { + "code": "HTTPNotFound", + "message": "Not Found", + "resource": null, + "field": null + } + ], + "logs": [ + { + "message": "Project e3e70682-c209-4cac-a29f-6fbed82c07cd not found", + "level": "ERROR", + "logger": "user" + } + ] + } + }, + "status_code": 404 + }, + { + "name": "POST /projects", + "description": "", + "method": "POST", + "host": "webserver", + "path": { + "path": "/v0/projects", + "path_parameters": [] + }, + "query": "from_study=e3e70682-c209-4cac-a29f-6fbed82c07cd&hidden=true", + "response_body": { + "data": { + "task_id": "POST%20%2Fv0%2Fprojects%3Ffrom_study%3De3e70682-c209-4cac-a29f-6fbed82c07cd%26hidden%3Dtrue.54dfa19d-3324-44cf-876b-e30c92f238b8", + "task_name": "POST /v0/projects?from_study=e3e70682-c209-4cac-a29f-6fbed82c07cd&hidden=true", + "status_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.54dfa19d-3324-44cf-876b-e30c92f238b8", + "result_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.54dfa19d-3324-44cf-876b-e30c92f238b8/result", + "abort_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.54dfa19d-3324-44cf-876b-e30c92f238b8" + } + }, + "status_code": 202 + }, + { + "name": "GET http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.54dfa19d-3324-44cf-876b-e30c92f238b8", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/tasks-legacy/{task_id}", + "path_parameters": [ + { + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "title": "Task Id", + "type": "str" + }, + "response_value": "tasks-legacy" + } + ] + }, + "response_body": { + "data": { + "task_progress": { + "task_id": "POST%20%2Fv0%2Fprojects%3Ffrom_study%3De3e70682-c209-4cac-a29f-6fbed82c07cd%26hidden%3Dtrue.54dfa19d-3324-44cf-876b-e30c92f238b8", + "message": "finished", + "percent": 1.0 + }, + "done": true, + "started": "2025-06-13T13:23:55.125558+00:00" + }, + "error": null + } + }, + { + "name": "GET http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253De3e70682-c209-4cac-a29f-6fbed82c07cd%2526hidden%253Dtrue.54dfa19d-3324-44cf-876b-e30c92f238b8/result", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/tasks-legacy/{task_id}/result", + "path_parameters": [ + { + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "title": "Task Id", + "type": "str" + }, + "response_value": "tasks-legacy" + } + ] + }, + "response_body": { + "data": null, + "error": { + "message": "Project e3e70682-c209-4cac-a29f-6fbed82c07cd not found", + "support_id": null, + "status": 404, + "errors": [ + { + "code": "HTTPNotFound", + "message": "Not Found", + "resource": null, + "field": null + } + ], + "logs": [ + { + "message": "Project e3e70682-c209-4cac-a29f-6fbed82c07cd not found", + "level": "ERROR", + "logger": "user" + } + ] + } + }, + "status_code": 404 + }, + { + "name": "POST /projects", + "description": "", + "method": "POST", + "host": "webserver", + "path": { + "path": "/v0/projects", + "path_parameters": [] + }, + "query": "from_study=b8dd6dc4-4857-11f0-bc8c-0242ac140019&hidden=true", + "response_body": { + "data": { + "task_id": "POST%20%2Fv0%2Fprojects%3Ffrom_study%3Db8dd6dc4-4857-11f0-bc8c-0242ac140019%26hidden%3Dtrue.40882c4a-e222-4e04-af43-3a8e45b2df24", + "task_name": "POST /v0/projects?from_study=b8dd6dc4-4857-11f0-bc8c-0242ac140019&hidden=true", + "status_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253Db8dd6dc4-4857-11f0-bc8c-0242ac140019%2526hidden%253Dtrue.40882c4a-e222-4e04-af43-3a8e45b2df24", + "result_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253Db8dd6dc4-4857-11f0-bc8c-0242ac140019%2526hidden%253Dtrue.40882c4a-e222-4e04-af43-3a8e45b2df24/result", + "abort_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253Db8dd6dc4-4857-11f0-bc8c-0242ac140019%2526hidden%253Dtrue.40882c4a-e222-4e04-af43-3a8e45b2df24" + } + }, + "status_code": 202 + }, + { + "name": "GET http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253Db8dd6dc4-4857-11f0-bc8c-0242ac140019%2526hidden%253Dtrue.40882c4a-e222-4e04-af43-3a8e45b2df24", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/tasks-legacy/{task_id}", + "path_parameters": [ + { + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "title": "Task Id", + "type": "str" + }, + "response_value": "tasks-legacy" + } + ] + }, + "response_body": { + "data": { + "task_progress": { + "task_id": "POST%20%2Fv0%2Fprojects%3Ffrom_study%3Db8dd6dc4-4857-11f0-bc8c-0242ac140019%26hidden%3Dtrue.40882c4a-e222-4e04-af43-3a8e45b2df24", + "message": "finished", + "percent": 1.0 + }, + "done": true, + "started": "2025-06-13T13:29:34.382817+00:00" + }, + "error": null + } + }, + { + "name": "GET http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253Db8dd6dc4-4857-11f0-bc8c-0242ac140019%2526hidden%253Dtrue.40882c4a-e222-4e04-af43-3a8e45b2df24/result", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/tasks-legacy/{task_id}/result", + "path_parameters": [ + { + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "title": "Task Id", + "type": "str" + }, + "response_value": "tasks-legacy" + } + ] + }, + "response_body": { + "data": { + "uuid": "71ec33ca-485a-11f0-bc8c-0242ac140019", + "name": "sleeper (Copy)", + "description": "", + "thumbnail": "", + "type": "STANDARD", + "templateType": null, + "workbench": { + "1cd13000-dd5d-5ebd-964f-7affa26606a9": { + "key": "simcore/services/comp/itis/sleeper", + "version": "2.2.1", + "label": "sleeper", + "progress": 0.0, + "inputs": { + "input_2": 2, + "input_3": false, + "input_4": 0, + "input_5": 0 + }, + "inputsRequired": [], + "inputNodes": [], + "state": { + "modified": true, + "dependencies": [], + "currentStatus": "NOT_STARTED", + "progress": null + } + } + }, + "prjOwner": "bisgaard@itis.swiss", + "accessRights": { + "4": { + "read": true, + "write": true, + "delete": true + } + }, + "creationDate": "2025-06-13T13:29:34.407Z", + "lastChangeDate": "2025-06-13T13:29:34.407Z", + "state": { + "shareState": { + "locked": false, + "status": "CLOSED", + "currentUserGroupids": [] + }, + "state": { + "value": "NOT_STARTED" + } + }, + "trashedAt": null, + "trashedBy": null, + "tags": [], + "classifiers": [], + "quality": { + "enabled": true, + "tsr_target": { + "r01": { + "level": 4, + "references": "" + }, + "r02": { + "level": 4, + "references": "" + }, + "r03": { + "level": 4, + "references": "" + }, + "r04": { + "level": 4, + "references": "" + }, + "r05": { + "level": 4, + "references": "" + }, + "r06": { + "level": 4, + "references": "" + }, + "r07": { + "level": 4, + "references": "" + }, + "r08": { + "level": 4, + "references": "" + }, + "r09": { + "level": 4, + "references": "" + }, + "r10": { + "level": 4, + "references": "" + }, + "r03b": { + "references": "" + }, + "r03c": { + "references": "" + }, + "r07b": { + "references": "" + }, + "r07c": { + "references": "" + }, + "r07d": { + "references": "" + }, + "r07e": { + "references": "" + }, + "r08b": { + "references": "" + }, + "r10b": { + "references": "" + } + }, + "tsr_current": { + "r01": { + "level": 0, + "references": "" + }, + "r02": { + "level": 0, + "references": "" + }, + "r03": { + "level": 0, + "references": "" + }, + "r04": { + "level": 0, + "references": "" + }, + "r05": { + "level": 0, + "references": "" + }, + "r06": { + "level": 0, + "references": "" + }, + "r07": { + "level": 0, + "references": "" + }, + "r08": { + "level": 0, + "references": "" + }, + "r09": { + "level": 0, + "references": "" + }, + "r10": { + "level": 0, + "references": "" + }, + "r03b": { + "references": "" + }, + "r03c": { + "references": "" + }, + "r07b": { + "references": "" + }, + "r07c": { + "references": "" + }, + "r07d": { + "references": "" + }, + "r07e": { + "references": "" + }, + "r08b": { + "references": "" + }, + "r10b": { + "references": "" + } + } + }, + "ui": { + "workbench": { + "1cd13000-dd5d-5ebd-964f-7affa26606a9": { + "position": { + "x": 250, + "y": 100 + } + } + }, + "slideshow": {}, + "currentNodeId": "b00f9b90-4857-11f0-bc8c-0242ac140019", + "mode": "pipeline" + }, + "dev": {}, + "workspaceId": null, + "folderId": null + } + }, + "status_code": 201 + }, + { + "name": "PATCH /projects/71ec33ca-485a-11f0-bc8c-0242ac140019", + "description": "", + "method": "PATCH", + "host": "webserver", + "path": { + "path": "/v0/projects/{project_id}", + "path_parameters": [ + { + "in": "path", + "name": "project_id", + "required": true, + "schema": { + "title": "Project Id", + "type": "str", + "format": "uuid" + }, + "response_value": "projects" + } + ] + }, + "request_payload": { + "name": "studies/b8dd6dc4-4857-11f0-bc8c-0242ac140019/jobs/71ec33ca-485a-11f0-bc8c-0242ac140019" + }, + "status_code": 204 + }, + { + "name": "GET /projects/71ec33ca-485a-11f0-bc8c-0242ac140019/inputs", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/projects/{project_id}/inputs", + "path_parameters": [ + { + "in": "path", + "name": "project_id", + "required": true, + "schema": { + "title": "Project Id", + "type": "str", + "format": "uuid" + }, + "response_value": "projects" + } + ] + }, + "response_body": { + "data": {} + } + }, + { + "name": "POST /computations/71ec33ca-485a-11f0-bc8c-0242ac140019:start", + "description": "", + "method": "POST", + "host": "webserver", + "path": { + "path": "/v0/computations/{project_id}:start", + "path_parameters": [ + { + "in": "path", + "name": "project_id", + "required": true, + "schema": { + "title": "Project Id", + "type": "str", + "format": "uuid" + }, + "response_value": "computations" + } + ] + }, + "request_payload": {}, + "response_body": { + "data": { + "pipeline_id": "71ec33ca-485a-11f0-bc8c-0242ac140019" + } + }, + "status_code": 201 + }, + { + "name": "GET /v2/computations/71ec33ca-485a-11f0-bc8c-0242ac140019", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v2/computations/{project_id}", + "path_parameters": [ + { + "in": "path", + "name": "project_id", + "required": true, + "schema": { + "title": "Project Id", + "type": "str", + "format": "uuid" + }, + "response_value": "computations" + } + ] + }, + "query": "user_id=1", + "response_body": { + "id": "71ec33ca-485a-11f0-bc8c-0242ac140019", + "state": "STARTED", + "result": null, + "pipeline_details": { + "adjacency_list": { + "1cd13000-dd5d-5ebd-964f-7affa26606a9": [] + }, + "progress": 0.05, + "node_states": { + "1cd13000-dd5d-5ebd-964f-7affa26606a9": { + "modified": true, + "dependencies": [], + "currentStatus": "STARTED", + "progress": 0.05 + } + } + }, + "iteration": 1, + "started": "2025-06-13T13:30:18.988214Z", + "stopped": null, + "submitted": "2025-06-13T13:30:18.806995Z", + "url": "http://webserver:8080:30003/v2/computations/71ec33ca-485a-11f0-bc8c-0242ac140019?user_id=1", + "stop_url": "http://webserver:8080:30003/v2/computations/71ec33ca-485a-11f0-bc8c-0242ac140019:stop?user_id=1" + } + }, + { + "name": "POST /projects", + "description": "", + "method": "POST", + "host": "webserver", + "path": { + "path": "/v0/projects", + "path_parameters": [] + }, + "query": "from_study=b8dd6dc4-4857-11f0-bc8c-0242ac140019&hidden=true", + "response_body": { + "data": { + "task_id": "POST%20%2Fv0%2Fprojects%3Ffrom_study%3Db8dd6dc4-4857-11f0-bc8c-0242ac140019%26hidden%3Dtrue.14b1e487-728d-4b99-badc-6993ec76f269", + "task_name": "POST /v0/projects?from_study=b8dd6dc4-4857-11f0-bc8c-0242ac140019&hidden=true", + "status_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253Db8dd6dc4-4857-11f0-bc8c-0242ac140019%2526hidden%253Dtrue.14b1e487-728d-4b99-badc-6993ec76f269", + "result_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253Db8dd6dc4-4857-11f0-bc8c-0242ac140019%2526hidden%253Dtrue.14b1e487-728d-4b99-badc-6993ec76f269/result", + "abort_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253Db8dd6dc4-4857-11f0-bc8c-0242ac140019%2526hidden%253Dtrue.14b1e487-728d-4b99-badc-6993ec76f269" + } + }, + "status_code": 202 + }, + { + "name": "GET http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253Db8dd6dc4-4857-11f0-bc8c-0242ac140019%2526hidden%253Dtrue.14b1e487-728d-4b99-badc-6993ec76f269", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/tasks-legacy/{task_id}", + "path_parameters": [ + { + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "title": "Task Id", + "type": "str" + }, + "response_value": "tasks-legacy" + } + ] + }, + "response_body": { + "data": { + "task_progress": { + "task_id": "POST%20%2Fv0%2Fprojects%3Ffrom_study%3Db8dd6dc4-4857-11f0-bc8c-0242ac140019%26hidden%3Dtrue.14b1e487-728d-4b99-badc-6993ec76f269", + "message": "Collecting files of 'sleeper'...", + "percent": 1.0 + }, + "done": false, + "started": "2025-06-13T13:31:09.141214+00:00" + }, + "error": null + } + }, + { + "name": "GET http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253Db8dd6dc4-4857-11f0-bc8c-0242ac140019%2526hidden%253Dtrue.14b1e487-728d-4b99-badc-6993ec76f269", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/tasks-legacy/{task_id}", + "path_parameters": [ + { + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "title": "Task Id", + "type": "str" + }, + "response_value": "tasks-legacy" + } + ] + }, + "response_body": { + "data": { + "task_progress": { + "task_id": "POST%20%2Fv0%2Fprojects%3Ffrom_study%3Db8dd6dc4-4857-11f0-bc8c-0242ac140019%26hidden%3Dtrue.14b1e487-728d-4b99-badc-6993ec76f269", + "message": "finished", + "percent": 1.0 + }, + "done": true, + "started": "2025-06-13T13:31:09.141214+00:00" + }, + "error": null + } + }, + { + "name": "GET http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253Db8dd6dc4-4857-11f0-bc8c-0242ac140019%2526hidden%253Dtrue.14b1e487-728d-4b99-badc-6993ec76f269/result", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/tasks-legacy/{task_id}/result", + "path_parameters": [ + { + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "title": "Task Id", + "type": "str" + }, + "response_value": "tasks-legacy" + } + ] + }, + "response_body": { + "data": { + "uuid": "aa679b18-485a-11f0-89d1-0242ac14050c", + "name": "sleeper (Copy)", + "description": "", + "thumbnail": "", + "type": "STANDARD", + "templateType": null, + "workbench": { + "ed78079d-66fa-54fb-aca4-8f37ec436383": { + "key": "simcore/services/comp/itis/sleeper", + "version": "2.2.1", + "label": "sleeper", + "progress": 0.0, + "inputs": { + "input_2": 2, + "input_3": false, + "input_4": 0, + "input_5": 0 + }, + "inputsRequired": [], + "inputNodes": [], + "state": { + "modified": true, + "dependencies": [], + "currentStatus": "NOT_STARTED", + "progress": null + } + } + }, + "prjOwner": "bisgaard@itis.swiss", + "accessRights": { + "4": { + "read": true, + "write": true, + "delete": true + } + }, + "creationDate": "2025-06-13T13:31:09.169Z", + "lastChangeDate": "2025-06-13T13:31:09.169Z", + "state": { + "shareState": { + "locked": false, + "status": "CLOSED", + "currentUserGroupids": [] + }, + "state": { + "value": "NOT_STARTED" + } + }, + "trashedAt": null, + "trashedBy": null, + "tags": [], + "classifiers": [], + "quality": { + "enabled": true, + "tsr_target": { + "r01": { + "level": 4, + "references": "" + }, + "r02": { + "level": 4, + "references": "" + }, + "r03": { + "level": 4, + "references": "" + }, + "r04": { + "level": 4, + "references": "" + }, + "r05": { + "level": 4, + "references": "" + }, + "r06": { + "level": 4, + "references": "" + }, + "r07": { + "level": 4, + "references": "" + }, + "r08": { + "level": 4, + "references": "" + }, + "r09": { + "level": 4, + "references": "" + }, + "r10": { + "level": 4, + "references": "" + }, + "r03b": { + "references": "" + }, + "r03c": { + "references": "" + }, + "r07b": { + "references": "" + }, + "r07c": { + "references": "" + }, + "r07d": { + "references": "" + }, + "r07e": { + "references": "" + }, + "r08b": { + "references": "" + }, + "r10b": { + "references": "" + } + }, + "tsr_current": { + "r01": { + "level": 0, + "references": "" + }, + "r02": { + "level": 0, + "references": "" + }, + "r03": { + "level": 0, + "references": "" + }, + "r04": { + "level": 0, + "references": "" + }, + "r05": { + "level": 0, + "references": "" + }, + "r06": { + "level": 0, + "references": "" + }, + "r07": { + "level": 0, + "references": "" + }, + "r08": { + "level": 0, + "references": "" + }, + "r09": { + "level": 0, + "references": "" + }, + "r10": { + "level": 0, + "references": "" + }, + "r03b": { + "references": "" + }, + "r03c": { + "references": "" + }, + "r07b": { + "references": "" + }, + "r07c": { + "references": "" + }, + "r07d": { + "references": "" + }, + "r07e": { + "references": "" + }, + "r08b": { + "references": "" + }, + "r10b": { + "references": "" + } + } + }, + "ui": { + "workbench": { + "ed78079d-66fa-54fb-aca4-8f37ec436383": { + "position": { + "x": 250, + "y": 100 + } + } + }, + "slideshow": {}, + "currentNodeId": "b00f9b90-4857-11f0-bc8c-0242ac140019", + "mode": "pipeline" + }, + "dev": {}, + "workspaceId": null, + "folderId": null + } + }, + "status_code": 201 + }, + { + "name": "PATCH /projects/aa679b18-485a-11f0-89d1-0242ac14050c", + "description": "", + "method": "PATCH", + "host": "webserver", + "path": { + "path": "/v0/projects/{project_id}", + "path_parameters": [ + { + "in": "path", + "name": "project_id", + "required": true, + "schema": { + "title": "Project Id", + "type": "str", + "format": "uuid" + }, + "response_value": "projects" + } + ] + }, + "request_payload": { + "name": "studies/b8dd6dc4-4857-11f0-bc8c-0242ac140019/jobs/aa679b18-485a-11f0-89d1-0242ac14050c" + }, + "status_code": 204 + }, + { + "name": "GET /projects/aa679b18-485a-11f0-89d1-0242ac14050c/inputs", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/projects/{project_id}/inputs", + "path_parameters": [ + { + "in": "path", + "name": "project_id", + "required": true, + "schema": { + "title": "Project Id", + "type": "str", + "format": "uuid" + }, + "response_value": "projects" + } + ] + }, + "response_body": { + "data": {} + } + }, + { + "name": "POST /computations/aa679b18-485a-11f0-89d1-0242ac14050c:start", + "description": "", + "method": "POST", + "host": "webserver", + "path": { + "path": "/v0/computations/{project_id}:start", + "path_parameters": [ + { + "in": "path", + "name": "project_id", + "required": true, + "schema": { + "title": "Project Id", + "type": "str", + "format": "uuid" + }, + "response_value": "computations" + } + ] + }, + "request_payload": {}, + "response_body": { + "data": { + "pipeline_id": "aa679b18-485a-11f0-89d1-0242ac14050c" + } + }, + "status_code": 201 + }, + { + "name": "GET /v2/computations/aa679b18-485a-11f0-89d1-0242ac14050c", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v2/computations/{project_id}", + "path_parameters": [ + { + "in": "path", + "name": "project_id", + "required": true, + "schema": { + "title": "Project Id", + "type": "str", + "format": "uuid" + }, + "response_value": "computations" + } + ] + }, + "query": "user_id=1", + "response_body": { + "id": "aa679b18-485a-11f0-89d1-0242ac14050c", + "state": "STARTED", + "result": null, + "pipeline_details": { + "adjacency_list": { + "ed78079d-66fa-54fb-aca4-8f37ec436383": [] + }, + "progress": 0.05, + "node_states": { + "ed78079d-66fa-54fb-aca4-8f37ec436383": { + "modified": true, + "dependencies": [], + "currentStatus": "STARTED", + "progress": 0.05 + } + } + }, + "iteration": 1, + "started": "2025-06-13T13:31:17.841816Z", + "stopped": null, + "submitted": "2025-06-13T13:31:17.672682Z", + "url": "http://webserver:8080:30003/v2/computations/aa679b18-485a-11f0-89d1-0242ac14050c?user_id=1", + "stop_url": "http://webserver:8080:30003/v2/computations/aa679b18-485a-11f0-89d1-0242ac14050c:stop?user_id=1" + } + }, + { + "name": "POST /projects", + "description": "", + "method": "POST", + "host": "webserver", + "path": { + "path": "/v0/projects", + "path_parameters": [] + }, + "query": "from_study=b8dd6dc4-4857-11f0-bc8c-0242ac140019&hidden=true", + "response_body": { + "data": { + "task_id": "POST%20%2Fv0%2Fprojects%3Ffrom_study%3Db8dd6dc4-4857-11f0-bc8c-0242ac140019%26hidden%3Dtrue.4280a56d-58e7-4f2c-aa15-f35a137e60ca", + "task_name": "POST /v0/projects?from_study=b8dd6dc4-4857-11f0-bc8c-0242ac140019&hidden=true", + "status_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253Db8dd6dc4-4857-11f0-bc8c-0242ac140019%2526hidden%253Dtrue.4280a56d-58e7-4f2c-aa15-f35a137e60ca", + "result_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253Db8dd6dc4-4857-11f0-bc8c-0242ac140019%2526hidden%253Dtrue.4280a56d-58e7-4f2c-aa15-f35a137e60ca/result", + "abort_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253Db8dd6dc4-4857-11f0-bc8c-0242ac140019%2526hidden%253Dtrue.4280a56d-58e7-4f2c-aa15-f35a137e60ca" + } + }, + "status_code": 202 + }, + { + "name": "GET http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253Db8dd6dc4-4857-11f0-bc8c-0242ac140019%2526hidden%253Dtrue.4280a56d-58e7-4f2c-aa15-f35a137e60ca", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/tasks-legacy/{task_id}", + "path_parameters": [ + { + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "title": "Task Id", + "type": "str" + }, + "response_value": "tasks-legacy" + } + ] + }, + "response_body": { + "data": { + "task_progress": { + "task_id": "POST%20%2Fv0%2Fprojects%3Ffrom_study%3Db8dd6dc4-4857-11f0-bc8c-0242ac140019%26hidden%3Dtrue.4280a56d-58e7-4f2c-aa15-f35a137e60ca", + "message": "finished", + "percent": 1.0 + }, + "done": true, + "started": "2025-06-13T13:34:33.232952+00:00" + }, + "error": null + } + }, + { + "name": "GET http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253Db8dd6dc4-4857-11f0-bc8c-0242ac140019%2526hidden%253Dtrue.4280a56d-58e7-4f2c-aa15-f35a137e60ca/result", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/tasks-legacy/{task_id}/result", + "path_parameters": [ + { + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "title": "Task Id", + "type": "str" + }, + "response_value": "tasks-legacy" + } + ] + }, + "response_body": { + "data": { + "uuid": "240cb4c6-485b-11f0-89d1-0242ac14050c", + "name": "sleeper (Copy)", + "description": "", + "thumbnail": "", + "type": "STANDARD", + "templateType": null, + "workbench": { + "982c5607-783c-5295-9a4f-26c8e3c21211": { + "key": "simcore/services/comp/itis/sleeper", + "version": "2.2.1", + "label": "sleeper", + "progress": 0.0, + "inputs": { + "input_2": 2, + "input_3": false, + "input_4": 0, + "input_5": 0 + }, + "inputsRequired": [], + "inputNodes": [], + "state": { + "modified": true, + "dependencies": [], + "currentStatus": "NOT_STARTED", + "progress": null + } + } + }, + "prjOwner": "bisgaard@itis.swiss", + "accessRights": { + "4": { + "read": true, + "write": true, + "delete": true + } + }, + "creationDate": "2025-06-13T13:34:33.254Z", + "lastChangeDate": "2025-06-13T13:34:33.254Z", + "state": { + "shareState": { + "locked": false, + "status": "CLOSED", + "currentUserGroupids": [] + }, + "state": { + "value": "NOT_STARTED" + } + }, + "trashedAt": null, + "trashedBy": null, + "tags": [], + "classifiers": [], + "quality": { + "enabled": true, + "tsr_target": { + "r01": { + "level": 4, + "references": "" + }, + "r02": { + "level": 4, + "references": "" + }, + "r03": { + "level": 4, + "references": "" + }, + "r04": { + "level": 4, + "references": "" + }, + "r05": { + "level": 4, + "references": "" + }, + "r06": { + "level": 4, + "references": "" + }, + "r07": { + "level": 4, + "references": "" + }, + "r08": { + "level": 4, + "references": "" + }, + "r09": { + "level": 4, + "references": "" + }, + "r10": { + "level": 4, + "references": "" + }, + "r03b": { + "references": "" + }, + "r03c": { + "references": "" + }, + "r07b": { + "references": "" + }, + "r07c": { + "references": "" + }, + "r07d": { + "references": "" + }, + "r07e": { + "references": "" + }, + "r08b": { + "references": "" + }, + "r10b": { + "references": "" + } + }, + "tsr_current": { + "r01": { + "level": 0, + "references": "" + }, + "r02": { + "level": 0, + "references": "" + }, + "r03": { + "level": 0, + "references": "" + }, + "r04": { + "level": 0, + "references": "" + }, + "r05": { + "level": 0, + "references": "" + }, + "r06": { + "level": 0, + "references": "" + }, + "r07": { + "level": 0, + "references": "" + }, + "r08": { + "level": 0, + "references": "" + }, + "r09": { + "level": 0, + "references": "" + }, + "r10": { + "level": 0, + "references": "" + }, + "r03b": { + "references": "" + }, + "r03c": { + "references": "" + }, + "r07b": { + "references": "" + }, + "r07c": { + "references": "" + }, + "r07d": { + "references": "" + }, + "r07e": { + "references": "" + }, + "r08b": { + "references": "" + }, + "r10b": { + "references": "" + } + } + }, + "ui": { + "workbench": { + "982c5607-783c-5295-9a4f-26c8e3c21211": { + "position": { + "x": 250, + "y": 100 + } + } + }, + "slideshow": {}, + "currentNodeId": "b00f9b90-4857-11f0-bc8c-0242ac140019", + "mode": "pipeline" + }, + "dev": {}, + "workspaceId": null, + "folderId": null + } + }, + "status_code": 201 + }, + { + "name": "PATCH /projects/240cb4c6-485b-11f0-89d1-0242ac14050c", + "description": "", + "method": "PATCH", + "host": "webserver", + "path": { + "path": "/v0/projects/{project_id}", + "path_parameters": [ + { + "in": "path", + "name": "project_id", + "required": true, + "schema": { + "title": "Project Id", + "type": "str", + "format": "uuid" + }, + "response_value": "projects" + } + ] + }, + "request_payload": { + "name": "studies/b8dd6dc4-4857-11f0-bc8c-0242ac140019/jobs/240cb4c6-485b-11f0-89d1-0242ac14050c" + }, + "status_code": 204 + }, + { + "name": "GET /projects/240cb4c6-485b-11f0-89d1-0242ac14050c/inputs", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/projects/{project_id}/inputs", + "path_parameters": [ + { + "in": "path", + "name": "project_id", + "required": true, + "schema": { + "title": "Project Id", + "type": "str", + "format": "uuid" + }, + "response_value": "projects" + } + ] + }, + "response_body": { + "data": {} + } + }, + { + "name": "POST /computations/240cb4c6-485b-11f0-89d1-0242ac14050c:start", + "description": "", + "method": "POST", + "host": "webserver", + "path": { + "path": "/v0/computations/{project_id}:start", + "path_parameters": [ + { + "in": "path", + "name": "project_id", + "required": true, + "schema": { + "title": "Project Id", + "type": "str", + "format": "uuid" + }, + "response_value": "computations" + } + ] + }, + "request_payload": {}, + "response_body": { + "data": { + "pipeline_id": "240cb4c6-485b-11f0-89d1-0242ac14050c" + } + }, + "status_code": 201 + }, + { + "name": "GET /v2/computations/240cb4c6-485b-11f0-89d1-0242ac14050c", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v2/computations/{project_id}", + "path_parameters": [ + { + "in": "path", + "name": "project_id", + "required": true, + "schema": { + "title": "Project Id", + "type": "str", + "format": "uuid" + }, + "response_value": "computations" + } + ] + }, + "query": "user_id=1", + "response_body": { + "id": "240cb4c6-485b-11f0-89d1-0242ac14050c", + "state": "STARTED", + "result": null, + "pipeline_details": { + "adjacency_list": { + "982c5607-783c-5295-9a4f-26c8e3c21211": [] + }, + "progress": 0.05, + "node_states": { + "982c5607-783c-5295-9a4f-26c8e3c21211": { + "modified": true, + "dependencies": [], + "currentStatus": "STARTED", + "progress": 0.05 + } + } + }, + "iteration": 1, + "started": "2025-06-13T13:34:40.872636Z", + "stopped": null, + "submitted": "2025-06-13T13:34:40.690820Z", + "url": "http://webserver:8080:30003/v2/computations/240cb4c6-485b-11f0-89d1-0242ac14050c?user_id=1", + "stop_url": "http://webserver:8080:30003/v2/computations/240cb4c6-485b-11f0-89d1-0242ac14050c:stop?user_id=1" + } + }, + { + "name": "POST /projects", + "description": "", + "method": "POST", + "host": "webserver", + "path": { + "path": "/v0/projects", + "path_parameters": [] + }, + "query": "from_study=b8dd6dc4-4857-11f0-bc8c-0242ac140019&hidden=true", + "response_body": { + "data": { + "task_id": "POST%20%2Fv0%2Fprojects%3Ffrom_study%3Db8dd6dc4-4857-11f0-bc8c-0242ac140019%26hidden%3Dtrue.e43e3bf5-5910-42ad-8abe-636f7aa87d7b", + "task_name": "POST /v0/projects?from_study=b8dd6dc4-4857-11f0-bc8c-0242ac140019&hidden=true", + "status_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253Db8dd6dc4-4857-11f0-bc8c-0242ac140019%2526hidden%253Dtrue.e43e3bf5-5910-42ad-8abe-636f7aa87d7b", + "result_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253Db8dd6dc4-4857-11f0-bc8c-0242ac140019%2526hidden%253Dtrue.e43e3bf5-5910-42ad-8abe-636f7aa87d7b/result", + "abort_href": "http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253Db8dd6dc4-4857-11f0-bc8c-0242ac140019%2526hidden%253Dtrue.e43e3bf5-5910-42ad-8abe-636f7aa87d7b" + } + }, + "status_code": 202 + }, + { + "name": "GET http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253Db8dd6dc4-4857-11f0-bc8c-0242ac140019%2526hidden%253Dtrue.e43e3bf5-5910-42ad-8abe-636f7aa87d7b", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/tasks-legacy/{task_id}", + "path_parameters": [ + { + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "title": "Task Id", + "type": "str" + }, + "response_value": "tasks-legacy" + } + ] + }, + "response_body": { + "data": { + "task_progress": { + "task_id": "POST%20%2Fv0%2Fprojects%3Ffrom_study%3Db8dd6dc4-4857-11f0-bc8c-0242ac140019%26hidden%3Dtrue.e43e3bf5-5910-42ad-8abe-636f7aa87d7b", + "message": "finished", + "percent": 1.0 + }, + "done": true, + "started": "2025-06-13T13:48:21.407753+00:00" + }, + "error": null + } + }, + { + "name": "GET http://webserver:8080/v0/tasks-legacy/POST%2520%252Fv0%252Fprojects%253Ffrom_study%253Db8dd6dc4-4857-11f0-bc8c-0242ac140019%2526hidden%253Dtrue.e43e3bf5-5910-42ad-8abe-636f7aa87d7b/result", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/tasks-legacy/{task_id}/result", + "path_parameters": [ + { + "in": "path", + "name": "task_id", + "required": true, + "schema": { + "title": "Task Id", + "type": "str" + }, + "response_value": "tasks-legacy" + } + ] + }, + "response_body": { + "data": { + "uuid": "11ae9478-485d-11f0-bc8c-0242ac140019", + "name": "sleeper (Copy)", + "description": "", + "thumbnail": "", + "type": "STANDARD", + "templateType": null, + "workbench": { + "4a7d1258-dcfa-55ab-ab53-38d0fa99431a": { + "key": "simcore/services/comp/itis/sleeper", + "version": "2.2.1", + "label": "sleeper", + "progress": 0.0, + "inputs": { + "input_2": 2, + "input_3": false, + "input_4": 0, + "input_5": 0 + }, + "inputsRequired": [], + "inputNodes": [], + "state": { + "modified": true, + "dependencies": [], + "currentStatus": "NOT_STARTED", + "progress": null + } + } + }, + "prjOwner": "bisgaard@itis.swiss", + "accessRights": { + "4": { + "read": true, + "write": true, + "delete": true + } + }, + "creationDate": "2025-06-13T13:48:21.431Z", + "lastChangeDate": "2025-06-13T13:48:21.431Z", + "state": { + "shareState": { + "locked": false, + "status": "CLOSED", + "currentUserGroupids": [] + }, + "state": { + "value": "NOT_STARTED" + } + }, + "trashedAt": null, + "trashedBy": null, + "tags": [], + "classifiers": [], + "quality": { + "enabled": true, + "tsr_target": { + "r01": { + "level": 4, + "references": "" + }, + "r02": { + "level": 4, + "references": "" + }, + "r03": { + "level": 4, + "references": "" + }, + "r04": { + "level": 4, + "references": "" + }, + "r05": { + "level": 4, + "references": "" + }, + "r06": { + "level": 4, + "references": "" + }, + "r07": { + "level": 4, + "references": "" + }, + "r08": { + "level": 4, + "references": "" + }, + "r09": { + "level": 4, + "references": "" + }, + "r10": { + "level": 4, + "references": "" + }, + "r03b": { + "references": "" + }, + "r03c": { + "references": "" + }, + "r07b": { + "references": "" + }, + "r07c": { + "references": "" + }, + "r07d": { + "references": "" + }, + "r07e": { + "references": "" + }, + "r08b": { + "references": "" + }, + "r10b": { + "references": "" + } + }, + "tsr_current": { + "r01": { + "level": 0, + "references": "" + }, + "r02": { + "level": 0, + "references": "" + }, + "r03": { + "level": 0, + "references": "" + }, + "r04": { + "level": 0, + "references": "" + }, + "r05": { + "level": 0, + "references": "" + }, + "r06": { + "level": 0, + "references": "" + }, + "r07": { + "level": 0, + "references": "" + }, + "r08": { + "level": 0, + "references": "" + }, + "r09": { + "level": 0, + "references": "" + }, + "r10": { + "level": 0, + "references": "" + }, + "r03b": { + "references": "" + }, + "r03c": { + "references": "" + }, + "r07b": { + "references": "" + }, + "r07c": { + "references": "" + }, + "r07d": { + "references": "" + }, + "r07e": { + "references": "" + }, + "r08b": { + "references": "" + }, + "r10b": { + "references": "" + } + } + }, + "ui": { + "workbench": { + "4a7d1258-dcfa-55ab-ab53-38d0fa99431a": { + "position": { + "x": 250, + "y": 100 + } + } + }, + "slideshow": {}, + "currentNodeId": "b00f9b90-4857-11f0-bc8c-0242ac140019", + "mode": "pipeline" + }, + "dev": {}, + "workspaceId": null, + "folderId": null + } + }, + "status_code": 201 + }, + { + "name": "PATCH /projects/11ae9478-485d-11f0-bc8c-0242ac140019", + "description": "", + "method": "PATCH", + "host": "webserver", + "path": { + "path": "/v0/projects/{project_id}", + "path_parameters": [ + { + "in": "path", + "name": "project_id", + "required": true, + "schema": { + "title": "Project Id", + "type": "str", + "format": "uuid" + }, + "response_value": "projects" + } + ] + }, + "request_payload": { + "name": "studies/b8dd6dc4-4857-11f0-bc8c-0242ac140019/jobs/11ae9478-485d-11f0-bc8c-0242ac140019" + }, + "status_code": 204 + }, + { + "name": "GET /projects/11ae9478-485d-11f0-bc8c-0242ac140019/inputs", + "description": "", + "method": "GET", + "host": "webserver", + "path": { + "path": "/v0/projects/{project_id}/inputs", + "path_parameters": [ + { + "in": "path", + "name": "project_id", + "required": true, + "schema": { + "title": "Project Id", + "type": "str", + "format": "uuid" + }, + "response_value": "projects" + } + ] + }, + "response_body": { + "data": {} + } + }, + { + "name": "POST /computations/11ae9478-485d-11f0-bc8c-0242ac140019:start", + "description": "", + "method": "POST", + "host": "webserver", + "path": { + "path": "/v0/computations/{project_id}:start", + "path_parameters": [ + { + "in": "path", + "name": "project_id", + "required": true, + "schema": { + "title": "Project Id", + "type": "str", + "format": "uuid" + }, + "response_value": "computations" + } + ] + }, + "request_payload": {}, + "response_body": { + "data": { + "pipeline_id": "11ae9478-485d-11f0-bc8c-0242ac140019" + } + }, + "status_code": 201 + }, + { + "name": "GET /v2/computations/11ae9478-485d-11f0-bc8c-0242ac140019", + "description": "", + "method": "GET", + "host": "director-v2", + "path": { + "path": "/v2/computations/{project_id}", + "path_parameters": [ + { + "in": "path", + "name": "project_id", + "required": true, + "schema": { + "title": "Project Id", + "type": "str", + "format": "uuid" + }, + "response_value": "computations" + } + ] + }, + "query": "user_id=1", + "response_body": { + "id": "11ae9478-485d-11f0-bc8c-0242ac140019", + "state": "STARTED", + "result": null, + "pipeline_details": { + "adjacency_list": { + "4a7d1258-dcfa-55ab-ab53-38d0fa99431a": [] + }, + "progress": 0.05, + "node_states": { + "4a7d1258-dcfa-55ab-ab53-38d0fa99431a": { + "modified": true, + "dependencies": [], + "currentStatus": "STARTED", + "progress": 0.05 + } + } + }, + "iteration": 1, + "started": "2025-06-13T13:48:28.876507Z", + "stopped": null, + "submitted": "2025-06-13T13:48:28.689344Z", + "url": "http://director-v2:8000/v2/computations/11ae9478-485d-11f0-bc8c-0242ac140019?user_id=1", + "stop_url": "http://director-v2:8000/v2/computations/11ae9478-485d-11f0-bc8c-0242ac140019:stop?user_id=1" + } + } +] diff --git a/services/api-server/tests/mocks/run_study_workflow.json b/services/api-server/tests/mocks/run_study_workflow.json index 90216ee45ba0..9f1dfd48768d 100644 --- a/services/api-server/tests/mocks/run_study_workflow.json +++ b/services/api-server/tests/mocks/run_study_workflow.json @@ -513,9 +513,10 @@ "tags": [], "classifiers": [], "state": { - "locked": { - "value": false, - "status": "CLOSED" + "shareState": { + "locked": false, + "status": "CLOSED", + "currentUserGroupids": [] }, "state": { "value": "NOT_STARTED" diff --git a/services/api-server/tests/mocks/start_job_not_enough_credit.json b/services/api-server/tests/mocks/start_job_not_enough_credit.json index c942d8341ad3..c3381053cdd3 100644 --- a/services/api-server/tests/mocks/start_job_not_enough_credit.json +++ b/services/api-server/tests/mocks/start_job_not_enough_credit.json @@ -81,9 +81,10 @@ "tags": [], "classifiers": [], "state": { - "locked": { - "value": false, - "status": "CLOSED" + "shareState": { + "locked": false, + "status": "CLOSED", + "currentUserGroupids": [] }, "state": { "value": "NOT_STARTED" diff --git a/services/api-server/tests/mocks/start_job_with_payment.json b/services/api-server/tests/mocks/start_job_with_payment.json index 847dab9d0542..a66106d7a4d3 100644 --- a/services/api-server/tests/mocks/start_job_with_payment.json +++ b/services/api-server/tests/mocks/start_job_with_payment.json @@ -81,9 +81,10 @@ "tags": [], "classifiers": [], "state": { - "locked": { - "value": false, - "status": "CLOSED" + "shareState": { + "locked": false, + "status": "CLOSED", + "currentUserGroupids": [] }, "state": { "value": "NOT_STARTED" diff --git a/services/api-server/tests/mocks/test_get_and_update_study_job_metadata.json b/services/api-server/tests/mocks/test_get_and_update_study_job_metadata.json index b624e232ff97..b857b3693ee4 100644 --- a/services/api-server/tests/mocks/test_get_and_update_study_job_metadata.json +++ b/services/api-server/tests/mocks/test_get_and_update_study_job_metadata.json @@ -236,9 +236,10 @@ "tags": [], "classifiers": [], "state": { - "locked": { - "value": false, - "status": "CLOSED" + "shareState": { + "locked": false, + "status": "CLOSED", + "currentUserGroupids": [] }, "state": { "value": "UNKNOWN" diff --git a/services/api-server/tests/unit/_with_db/conftest.py b/services/api-server/tests/unit/_with_db/conftest.py index fd2441c879e5..90fcea5f8091 100644 --- a/services/api-server/tests/unit/_with_db/conftest.py +++ b/services/api-server/tests/unit/_with_db/conftest.py @@ -34,7 +34,7 @@ from simcore_postgres_database.models.products import products from simcore_postgres_database.models.users import users from simcore_service_api_server.clients.postgres import get_engine -from simcore_service_api_server.core.application import init_app +from simcore_service_api_server.core.application import create_app from simcore_service_api_server.core.settings import PostgresSettings from sqlalchemy.ext.asyncio import AsyncEngine @@ -187,7 +187,7 @@ def app(app_environment: EnvVarsDict, migrated_db: None) -> FastAPI: - it uses default environ as pg - db is started and initialized """ - return init_app() + return create_app() @pytest.fixture diff --git a/services/api-server/tests/unit/_with_db/data/docker-compose.yml b/services/api-server/tests/unit/_with_db/data/docker-compose.yml index ae76474af7cc..75eb2f482459 100644 --- a/services/api-server/tests/unit/_with_db/data/docker-compose.yml +++ b/services/api-server/tests/unit/_with_db/data/docker-compose.yml @@ -1,6 +1,6 @@ services: postgres: - image: postgres:14.5-alpine@sha256:db802f226b620fc0b8adbeca7859eb203c8d3c9ce5d84870fadee05dea8f50ce + image: "postgres:14.8-alpine@sha256:150dd39ccb7ae6c7ba6130c3582c39a30bb5d3d22cb08ad0ba37001e3f829abc" environment: - POSTGRES_USER=${POSTGRES_USER:-test} - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-test} diff --git a/services/api-server/tests/unit/_with_db/test_api_user.py b/services/api-server/tests/unit/_with_db/test_api_user.py index 5b29f72ef158..b255889b339a 100644 --- a/services/api-server/tests/unit/_with_db/test_api_user.py +++ b/services/api-server/tests/unit/_with_db/test_api_user.py @@ -9,7 +9,7 @@ import pytest import respx from fastapi import FastAPI -from models_library.api_schemas_webserver.users import MyProfileGet as WebProfileGet +from models_library.api_schemas_webserver.users import MyProfileRestGet as WebProfileGet from pytest_mock import MockType from respx import MockRouter from simcore_service_api_server._meta import API_VTAG diff --git a/services/api-server/tests/unit/api_functions/celery/conftest.py b/services/api-server/tests/unit/api_functions/celery/conftest.py new file mode 100644 index 000000000000..0a5c933a7281 --- /dev/null +++ b/services/api-server/tests/unit/api_functions/celery/conftest.py @@ -0,0 +1,146 @@ +# pylint: disable=unused-argument +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-positional-arguments +# pylint: disable=no-name-in-module + + +import datetime +from collections.abc import AsyncIterator, Callable +from typing import Any + +import pytest +from celery import Celery # pylint: disable=no-name-in-module +from celery.contrib.testing.worker import ( # pylint: disable=no-name-in-module + TestWorkController, + start_worker, +) +from celery.signals import ( # pylint: disable=no-name-in-module + worker_init, + worker_shutdown, +) +from celery.worker.worker import WorkController # pylint: disable=no-name-in-module +from celery_library.signals import on_worker_init, on_worker_shutdown +from pytest_mock import MockerFixture +from pytest_simcore.helpers.monkeypatch_envs import delenvs_from_dict, setenvs_from_dict +from pytest_simcore.helpers.typing_env import EnvVarsDict +from servicelib.fastapi.celery.app_server import FastAPIAppServer +from settings_library.redis import RedisSettings +from simcore_service_api_server.celery_worker.worker_main import setup_worker_tasks +from simcore_service_api_server.clients import celery_task_manager +from simcore_service_api_server.core.application import create_app +from simcore_service_api_server.core.settings import ApplicationSettings + + +@pytest.fixture(scope="session") +def celery_config() -> dict[str, Any]: + return { + "broker_connection_retry_on_startup": True, + "broker_url": "memory://localhost//", + "result_backend": "cache+memory://localhost//", + "result_expires": datetime.timedelta(days=7), + "result_extended": True, + "pool": "threads", + "task_default_queue": "default", + "task_send_sent_event": True, + "task_track_started": True, + "worker_send_task_events": True, + } + + +@pytest.fixture +async def mocked_log_streamer_setup(mocker: MockerFixture) -> MockerFixture: + # mock log streamer: He is looking for non-existent queues. Should be solved more elegantly + from simcore_service_api_server.services_http import rabbitmq + + mock_log_streamer = mocker.patch.object(rabbitmq, "LogDistributor", spec=True) + return mock_log_streamer + + +@pytest.fixture +def mock_celery_app(mocker: MockerFixture, celery_config: dict[str, Any]) -> Celery: + celery_app = Celery(**celery_config) + + mocker.patch.object( + celery_task_manager, + celery_task_manager.create_app.__name__, + lambda settings: celery_app, + ) + + return celery_app + + +@pytest.fixture +def app_environment( + mock_celery_app: Celery, + mocked_log_streamer_setup: MockerFixture, + use_in_memory_redis: RedisSettings, + monkeypatch: pytest.MonkeyPatch, + app_environment: EnvVarsDict, + rabbit_env_vars_dict: EnvVarsDict, +) -> EnvVarsDict: + # do not init other services + delenvs_from_dict(monkeypatch, ["API_SERVER_RABBITMQ", "API_SERVER_CELERY"]) + env_vars_dict = setenvs_from_dict( + monkeypatch, + { + **rabbit_env_vars_dict, + "API_SERVER_POSTGRES": "null", + "API_SERVER_HEALTH_CHECK_TASK_PERIOD_SECONDS": "3", + "API_SERVER_HEALTH_CHECK_TASK_TIMEOUT_SECONDS": "1", + }, + ) + + settings = ApplicationSettings.create_from_envs() + assert settings.API_SERVER_CELERY is not None + + return env_vars_dict + + +@pytest.fixture +def register_celery_tasks() -> Callable[[Celery], None]: + """override if tasks are needed""" + + def _(celery_app: Celery) -> None: ... + + return _ + + +@pytest.fixture +def add_worker_tasks() -> bool: + "override to not add default worker tasks" + return True + + +@pytest.fixture +async def with_api_server_celery_worker( + app_environment: EnvVarsDict, + celery_app: Celery, + monkeypatch: pytest.MonkeyPatch, + register_celery_tasks: Callable[[Celery], None], + add_worker_tasks: bool, +) -> AsyncIterator[TestWorkController]: + # Signals must be explicitily connected + monkeypatch.setenv("API_SERVER_WORKER_MODE", "true") + app_settings = ApplicationSettings.create_from_envs() + + app_server = FastAPIAppServer(app=create_app(app_settings)) + + def _on_worker_init_wrapper(sender: WorkController, **kwargs): + return on_worker_init(sender, app_server=app_server, **kwargs) + + worker_init.connect(_on_worker_init_wrapper) + worker_shutdown.connect(on_worker_shutdown) + + if add_worker_tasks: + setup_worker_tasks(celery_app) + register_celery_tasks(celery_app) + + with start_worker( + celery_app, + pool="threads", + concurrency=1, + loglevel="info", + perform_ping_check=False, + queues="api_worker_queue", + ) as worker: + yield worker diff --git a/services/api-server/tests/unit/api_functions/celery/test_functions_celery.py b/services/api-server/tests/unit/api_functions/celery/test_functions_celery.py new file mode 100644 index 000000000000..2e105cd9c131 --- /dev/null +++ b/services/api-server/tests/unit/api_functions/celery/test_functions_celery.py @@ -0,0 +1,683 @@ +# pylint: disable=unused-argument +# pylint: disable=redefined-outer-name +# pylint: disable=no-name-in-module +# pylint: disable=too-many-positional-arguments +# pylint: disable=too-many-arguments + + +import datetime +import inspect +from collections.abc import Callable +from functools import partial +from pathlib import Path +from typing import Any + +import httpx +import pytest +import respx +from celery import Celery, Task # type: ignore # pylint: disable=no-name-in-module +from celery.contrib.testing.worker import TestWorkController # type: ignore +from celery_library.task import register_task +from celery_library.types import register_pydantic_types +from faker import Faker +from fastapi import FastAPI, status +from httpx import AsyncClient, BasicAuth, HTTPStatusError +from models_library.api_schemas_long_running_tasks.tasks import TaskResult, TaskStatus +from models_library.functions import ( + FunctionClass, + FunctionID, + FunctionJobCollection, + FunctionJobID, + FunctionUserAccessRights, + FunctionUserApiAccessRights, + RegisteredFunction, + RegisteredFunctionJob, + RegisteredFunctionJobCollection, + RegisteredProjectFunction, + RegisteredProjectFunctionJob, + RegisteredProjectFunctionJobPatch, +) +from models_library.projects import ProjectID +from models_library.users import UserID +from pytest_mock import MockerFixture, MockType +from pytest_simcore.helpers.httpx_calls_capture_models import HttpApiCallCaptureModel +from servicelib.celery.models import ExecutionMetadata, TaskID, TasksQueue +from servicelib.common_headers import ( + X_SIMCORE_PARENT_NODE_ID, + X_SIMCORE_PARENT_PROJECT_UUID, +) +from simcore_service_api_server._meta import API_VTAG +from simcore_service_api_server.api.dependencies.authentication import Identity +from simcore_service_api_server.api.dependencies.celery import ( + get_task_manager, +) +from simcore_service_api_server.celery_worker.worker_tasks.functions_tasks import ( + run_function as run_function_task, +) +from simcore_service_api_server.exceptions.backend_errors import BaseBackEndError +from simcore_service_api_server.models.api_resources import JobLinks +from simcore_service_api_server.models.domain.celery_models import ( + ApiServerOwnerMetadata, +) +from simcore_service_api_server.models.domain.functions import ( + PreRegisteredFunctionJobData, +) +from simcore_service_api_server.models.schemas.jobs import ( + JobPricingSpecification, + NodeID, +) +from tenacity import ( + AsyncRetrying, + retry_if_exception_type, + stop_after_delay, + wait_fixed, +) + +pytest_simcore_core_services_selection = ["postgres", "rabbit"] +pytest_simcore_ops_services_selection = ["adminer"] + +_faker = Faker() + + +async def wait_for_task_result( + client: AsyncClient, + auth: BasicAuth, + task_id: str, + timeout: float = 30.0, # noqa: ASYNC109 +) -> TaskResult: + + async for attempt in AsyncRetrying( + stop=stop_after_delay(timeout), + wait=wait_fixed(wait=datetime.timedelta(seconds=1.0)), + reraise=True, + retry=retry_if_exception_type(AssertionError), + ): + with attempt: + + response = await client.get(f"/{API_VTAG}/tasks/{task_id}", auth=auth) + response.raise_for_status() + status = TaskStatus.model_validate(response.json()) + assert status.done is True + + assert status.done is True + response = await client.get(f"/{API_VTAG}/tasks/{task_id}/result", auth=auth) + response.raise_for_status() + return TaskResult.model_validate(response.json()) + + +def _register_fake_run_function_task() -> Callable[[Celery], None]: + + async def run_function( + task: Task, + task_id: TaskID, + *, + user_identity: Identity, + function: RegisteredFunction, + pre_registered_function_job_data: PreRegisteredFunctionJobData, + pricing_spec: JobPricingSpecification | None, + job_links: JobLinks, + x_simcore_parent_project_uuid: NodeID | None, + x_simcore_parent_node_id: NodeID | None, + ) -> RegisteredFunctionJob: + return RegisteredProjectFunctionJob( + title=_faker.sentence(), + description=_faker.paragraph(), + function_uid=FunctionID(_faker.uuid4()), + inputs=pre_registered_function_job_data.job_inputs.values, + outputs=None, + function_class=FunctionClass.PROJECT, + uid=FunctionJobID(_faker.uuid4()), + created_at=_faker.date_time(), + project_job_id=ProjectID(_faker.uuid4()), + job_creation_task_id=None, + ) + + # check our mock task is correct + assert run_function_task.__name__ == run_function.__name__ + assert inspect.signature(run_function_task) == inspect.signature( + run_function + ), f"Signature mismatch: {inspect.signature(run_function_task)} != {inspect.signature(run_function)}" + + def _(celery_app: Celery) -> None: + register_pydantic_types(RegisteredProjectFunctionJob) + register_task(celery_app, run_function) + + return _ + + +async def _patch_registered_function_job_side_effect( + mock_registered_project_function_job: RegisteredFunctionJob, *args, **kwargs +): + registered_function_job_patch = kwargs["registered_function_job_patch"] + assert isinstance(registered_function_job_patch, RegisteredProjectFunctionJobPatch) + job_creation_task_id = registered_function_job_patch.job_creation_task_id + uid = kwargs["function_job_uuid"] + return mock_registered_project_function_job.model_copy( + update={"job_creation_task_id": job_creation_task_id, "uid": uid} + ) + + +@pytest.mark.parametrize("register_celery_tasks", [_register_fake_run_function_task()]) +@pytest.mark.parametrize("add_worker_tasks", [False]) +async def test_with_fake_run_function( + app: FastAPI, + client: AsyncClient, + auth: BasicAuth, + mocker: MockerFixture, + with_api_server_celery_worker: TestWorkController, + mock_handler_in_functions_rpc_interface: Callable[ + [str, Any, Exception | None, Callable | None], None + ], + mock_registered_project_function: RegisteredProjectFunction, + mock_registered_project_function_job: RegisteredFunctionJob, + user_id: UserID, +): + + body = { + "input_1": _faker.uuid4(), + "input_2": _faker.pyfloat(min_value=0, max_value=100), + "input_3": _faker.pyint(min_value=0, max_value=100), + "input_4": _faker.boolean(), + "input_5": _faker.sentence(), + "input_6": [ + _faker.pyfloat(min_value=0, max_value=100) + for _ in range(_faker.pyint(min_value=5, max_value=100)) + ], + } + + mock_handler_in_functions_rpc_interface( + "get_function_user_permissions", + FunctionUserAccessRights( + user_id=user_id, + execute=True, + read=True, + write=True, + ), + None, + None, + ) + mock_handler_in_functions_rpc_interface( + "get_functions_user_api_access_rights", + FunctionUserApiAccessRights( + user_id=user_id, + read_functions=True, + write_functions=True, + execute_functions=True, + read_function_jobs=True, + write_function_jobs=True, + execute_function_jobs=True, + read_function_job_collections=True, + write_function_job_collections=True, + execute_function_job_collections=True, + ), + None, + None, + ) + mock_handler_in_functions_rpc_interface( + "get_function", mock_registered_project_function, None, None + ) + mock_handler_in_functions_rpc_interface("find_cached_function_jobs", [], None, None) + mock_handler_in_functions_rpc_interface( + "register_function_job", mock_registered_project_function_job, None, None + ) + + mock_handler_in_functions_rpc_interface( + "patch_registered_function_job", + None, + None, + partial( + _patch_registered_function_job_side_effect, + mock_registered_project_function_job, + ), + ) + + headers = {} + headers[X_SIMCORE_PARENT_PROJECT_UUID] = "null" + headers[X_SIMCORE_PARENT_NODE_ID] = "null" + + response = await client.post( + f"/{API_VTAG}/functions/{_faker.uuid4()}:run", + auth=auth, + json=body, + headers=headers, + ) + + assert response.status_code == status.HTTP_200_OK + function_job = RegisteredProjectFunctionJob.model_validate(response.json()) + celery_task_id = function_job.job_creation_task_id + assert celery_task_id is not None + # Poll until task completion and get result + result = await wait_for_task_result(client, auth, celery_task_id) + RegisteredProjectFunctionJob.model_validate(result.result) + + +def _register_exception_task(exception: Exception) -> Callable[[Celery], None]: + + async def exception_task( + task: Task, + task_id: TaskID, + ): + raise exception + + def _(celery_app: Celery) -> None: + register_task(celery_app, exception_task) + + return _ + + +@pytest.mark.parametrize( + "register_celery_tasks", + [ + _register_exception_task(ValueError("Test error")), + _register_exception_task(Exception("Test error")), + _register_exception_task(BaseBackEndError()), + ], +) +@pytest.mark.parametrize("add_worker_tasks", [False]) +async def test_celery_error_propagation( + app: FastAPI, + client: AsyncClient, + auth: BasicAuth, + user_identity: Identity, + with_api_server_celery_worker: TestWorkController, +): + + owner_metadata = ApiServerOwnerMetadata( + user_id=user_identity.user_id, + product_name=user_identity.product_name, + ) + task_manager = get_task_manager(app=app) + task_uuid = await task_manager.submit_task( + execution_metadata=ExecutionMetadata( + name="exception_task", queue=TasksQueue.API_WORKER_QUEUE + ), + owner_metadata=owner_metadata, + ) + + with pytest.raises(HTTPStatusError) as exc_info: + await wait_for_task_result(client, auth, f"{task_uuid}") + + assert exc_info.value.response.status_code == status.HTTP_503_SERVICE_UNAVAILABLE + + +@pytest.mark.parametrize( + "parent_project_uuid, parent_node_uuid, expected_status_code", + [ + (None, None, status.HTTP_422_UNPROCESSABLE_ENTITY), + (f"{_faker.uuid4()}", None, status.HTTP_422_UNPROCESSABLE_ENTITY), + (None, f"{_faker.uuid4()}", status.HTTP_422_UNPROCESSABLE_ENTITY), + (f"{_faker.uuid4()}", f"{_faker.uuid4()}", status.HTTP_200_OK), + ("null", "null", status.HTTP_200_OK), + ], +) +@pytest.mark.parametrize("capture", ["run_study_function_parent_info.json"]) +@pytest.mark.parametrize("mocked_app_dependencies", [None]) +async def test_run_project_function_parent_info( + app: FastAPI, + with_api_server_celery_worker: TestWorkController, + client: AsyncClient, + mock_handler_in_functions_rpc_interface: Callable[ + [str, Any, Exception | None, Callable | None], None + ], + mock_registered_project_function: RegisteredProjectFunction, + mock_registered_project_function_job: RegisteredFunctionJob, + auth: httpx.BasicAuth, + user_id: UserID, + mocked_webserver_rest_api_base: respx.MockRouter, + mocked_directorv2_rest_api_base: respx.MockRouter, + mocked_webserver_rpc_api: dict[str, MockType], + create_respx_mock_from_capture, + project_tests_dir: Path, + parent_project_uuid: str | None, + parent_node_uuid: str | None, + expected_status_code: int, + capture: str, +) -> None: + def _default_side_effect( + request: httpx.Request, + path_params: dict[str, Any], + capture: HttpApiCallCaptureModel, + ) -> Any: + if request.method == "POST" and request.url.path.endswith("/projects"): + if parent_project_uuid and parent_project_uuid != "null": + _parent_uuid = request.headers.get(X_SIMCORE_PARENT_PROJECT_UUID) + assert _parent_uuid is not None + assert parent_project_uuid == _parent_uuid + if parent_node_uuid and parent_node_uuid != "null": + _parent_node_uuid = request.headers.get(X_SIMCORE_PARENT_NODE_ID) + assert _parent_node_uuid is not None + assert parent_node_uuid == _parent_node_uuid + return capture.response_body + + create_respx_mock_from_capture( + respx_mocks=[mocked_webserver_rest_api_base, mocked_directorv2_rest_api_base], + capture_path=project_tests_dir / "mocks" / capture, + side_effects_callbacks=[_default_side_effect] * 50, + ) + + mock_handler_in_functions_rpc_interface( + "get_function_user_permissions", + FunctionUserAccessRights( + user_id=user_id, + execute=True, + read=True, + write=True, + ), + None, + None, + ) + mock_handler_in_functions_rpc_interface( + "get_function", mock_registered_project_function, None, None + ) + mock_handler_in_functions_rpc_interface("find_cached_function_jobs", [], None, None) + mock_handler_in_functions_rpc_interface( + "register_function_job", mock_registered_project_function_job, None, None + ) + mock_handler_in_functions_rpc_interface( + "get_functions_user_api_access_rights", + FunctionUserApiAccessRights( + user_id=user_id, + execute_functions=True, + write_functions=True, + read_functions=True, + ), + None, + None, + ) + mock_handler_in_functions_rpc_interface( + "patch_registered_function_job", + None, + None, + partial( + _patch_registered_function_job_side_effect, + mock_registered_project_function_job, + ), + ) + + headers = {} + if parent_project_uuid: + headers[X_SIMCORE_PARENT_PROJECT_UUID] = parent_project_uuid + if parent_node_uuid: + headers[X_SIMCORE_PARENT_NODE_ID] = parent_node_uuid + + response = await client.post( + f"{API_VTAG}/functions/{mock_registered_project_function.uid}:run", + json={}, + auth=auth, + headers=headers, + ) + assert response.status_code == expected_status_code + if response.status_code == status.HTTP_200_OK: + function_job = RegisteredProjectFunctionJob.model_validate(response.json()) + celery_task_id = function_job.job_creation_task_id + assert celery_task_id is not None + # Poll until task completion and get result + result = await wait_for_task_result(client, auth, celery_task_id) + RegisteredProjectFunctionJob.model_validate(result.result) + + +@pytest.mark.parametrize( + "parent_project_uuid, parent_node_uuid, expected_status_code", + [ + (None, None, status.HTTP_422_UNPROCESSABLE_ENTITY), + (f"{_faker.uuid4()}", None, status.HTTP_422_UNPROCESSABLE_ENTITY), + (None, f"{_faker.uuid4()}", status.HTTP_422_UNPROCESSABLE_ENTITY), + (f"{_faker.uuid4()}", f"{_faker.uuid4()}", status.HTTP_200_OK), + ("null", "null", status.HTTP_200_OK), + ], +) +@pytest.mark.parametrize("capture", ["run_study_function_parent_info.json"]) +@pytest.mark.parametrize("mocked_app_dependencies", [None]) +async def test_map_function_parent_info( + app: FastAPI, + with_api_server_celery_worker: TestWorkController, + client: AsyncClient, + mock_handler_in_functions_rpc_interface: Callable[ + [str, Any, Exception | None, Callable | None], MockType + ], + mock_registered_project_function: RegisteredProjectFunction, + mock_registered_project_function_job: RegisteredFunctionJob, + auth: httpx.BasicAuth, + user_id: UserID, + mocked_webserver_rest_api_base: respx.MockRouter, + mocked_directorv2_rest_api_base: respx.MockRouter, + mocked_webserver_rpc_api: dict[str, MockType], + create_respx_mock_from_capture, + project_tests_dir: Path, + parent_project_uuid: str | None, + parent_node_uuid: str | None, + expected_status_code: int, + capture: str, +) -> None: + + side_effect_checks = {} + + def _default_side_effect( + side_effect_checks: dict, + request: httpx.Request, + path_params: dict[str, Any], + capture: HttpApiCallCaptureModel, + ) -> Any: + if request.method == "POST" and request.url.path.endswith("/projects"): + side_effect_checks["headers_checked"] = True + if parent_project_uuid and parent_project_uuid != "null": + _parent_uuid = request.headers.get(X_SIMCORE_PARENT_PROJECT_UUID) + assert _parent_uuid is not None + assert parent_project_uuid == _parent_uuid + if parent_node_uuid and parent_node_uuid != "null": + _parent_node_uuid = request.headers.get(X_SIMCORE_PARENT_NODE_ID) + assert _parent_node_uuid is not None + assert parent_node_uuid == _parent_node_uuid + return capture.response_body + + create_respx_mock_from_capture( + respx_mocks=[mocked_webserver_rest_api_base, mocked_directorv2_rest_api_base], + capture_path=project_tests_dir / "mocks" / capture, + side_effects_callbacks=[partial(_default_side_effect, side_effect_checks)] * 50, + ) + + mock_handler_in_functions_rpc_interface( + "get_function_user_permissions", + FunctionUserAccessRights( + user_id=user_id, + execute=True, + read=True, + write=True, + ), + None, + None, + ) + mock_handler_in_functions_rpc_interface( + "get_function", mock_registered_project_function, None, None + ) + mock_handler_in_functions_rpc_interface("find_cached_function_jobs", [], None, None) + mock_handler_in_functions_rpc_interface( + "register_function_job", mock_registered_project_function_job, None, None + ) + mock_handler_in_functions_rpc_interface( + "get_functions_user_api_access_rights", + FunctionUserApiAccessRights( + user_id=user_id, + execute_functions=True, + write_functions=True, + read_functions=True, + ), + None, + None, + ) + mock_handler_in_functions_rpc_interface( + "register_function_job_collection", + RegisteredFunctionJobCollection( + uid=FunctionJobID(_faker.uuid4()), + title="Test Collection", + description="A test function job collection", + job_ids=[], + created_at=datetime.datetime.now(datetime.UTC), + ), + None, + None, + ) + + patch_mock = mock_handler_in_functions_rpc_interface( + "patch_registered_function_job", + None, + None, + partial( + _patch_registered_function_job_side_effect, + mock_registered_project_function_job, + ), + ) + + headers = {} + if parent_project_uuid: + headers[X_SIMCORE_PARENT_PROJECT_UUID] = parent_project_uuid + if parent_node_uuid: + headers[X_SIMCORE_PARENT_NODE_ID] = parent_node_uuid + + response = await client.post( + f"{API_VTAG}/functions/{mock_registered_project_function.uid}:map", + json=[{}, {}], + auth=auth, + headers=headers, + ) + assert response.status_code == expected_status_code + + if expected_status_code == status.HTTP_200_OK: + FunctionJobCollection.model_validate(response.json()) + task_id = patch_mock.call_args.kwargs[ + "registered_function_job_patch" + ].job_creation_task_id + await wait_for_task_result(client, auth, f"{task_id}") + assert side_effect_checks["headers_checked"] is True + + +@pytest.mark.parametrize("mocked_app_dependencies", [None]) +async def test_map_function( + app: FastAPI, + with_api_server_celery_worker: TestWorkController, + client: AsyncClient, + mock_handler_in_functions_rpc_interface: Callable[ + [str, Any, Exception | None, Callable | None], MockType + ], + mock_registered_project_function: RegisteredProjectFunction, + mock_registered_project_function_job: RegisteredFunctionJob, + auth: httpx.BasicAuth, + user_id: UserID, + mocked_webserver_rest_api_base: respx.MockRouter, + mocked_directorv2_rest_api_base: respx.MockRouter, + mocked_webserver_rpc_api: dict[str, MockType], + create_respx_mock_from_capture, + project_tests_dir: Path, +) -> None: + + # arrange + _capture = "run_study_function_parent_info.json" + + def _default_side_effect( + request: httpx.Request, + path_params: dict[str, Any], + capture: HttpApiCallCaptureModel, + ) -> Any: + return capture.response_body + + create_respx_mock_from_capture( + respx_mocks=[mocked_webserver_rest_api_base, mocked_directorv2_rest_api_base], + capture_path=project_tests_dir / "mocks" / _capture, + side_effects_callbacks=[_default_side_effect] * 50, + ) + + mock_handler_in_functions_rpc_interface( + "get_function_user_permissions", + FunctionUserAccessRights( + user_id=user_id, + execute=True, + read=True, + write=True, + ), + None, + None, + ) + mock_handler_in_functions_rpc_interface( + "get_function", mock_registered_project_function, None, None + ) + mock_handler_in_functions_rpc_interface("find_cached_function_jobs", [], None, None) + + _generated_function_job_ids: list[FunctionJobID] = [] + + async def _register_function_job_side_effect( + generated_function_job_ids: list[FunctionJobID], *args, **kwargs + ): + uid = FunctionJobID(_faker.uuid4()) + generated_function_job_ids.append(uid) + return mock_registered_project_function_job.model_copy(update={"uid": uid}) + + mock_handler_in_functions_rpc_interface( + "register_function_job", + None, + None, + partial(_register_function_job_side_effect, _generated_function_job_ids), + ) + mock_handler_in_functions_rpc_interface( + "get_functions_user_api_access_rights", + FunctionUserApiAccessRights( + user_id=user_id, + execute_functions=True, + write_functions=True, + read_functions=True, + ), + None, + None, + ) + + async def _register_function_job_collection_side_effect(*args, **kwargs): + job_collection = kwargs["function_job_collection"] + return RegisteredFunctionJobCollection( + uid=FunctionJobID(_faker.uuid4()), + title="Test Collection", + description="A test function job collection", + job_ids=job_collection.job_ids, + created_at=datetime.datetime.now(datetime.UTC), + ) + + mock_handler_in_functions_rpc_interface( + "register_function_job_collection", + None, + None, + _register_function_job_collection_side_effect, + ) + + patch_mock = mock_handler_in_functions_rpc_interface( + "patch_registered_function_job", + None, + None, + partial( + _patch_registered_function_job_side_effect, + mock_registered_project_function_job, + ), + ) + + # act + _inputs = [{}, {}] + response = await client.post( + f"{API_VTAG}/functions/{mock_registered_project_function.uid}:map", + json=_inputs, + auth=auth, + headers={ + X_SIMCORE_PARENT_PROJECT_UUID: "null", + X_SIMCORE_PARENT_NODE_ID: "null", + }, + ) + + # assert + assert response.status_code == status.HTTP_200_OK + job_collection = FunctionJobCollection.model_validate(response.json()) + assert ( + job_collection.job_ids == _generated_function_job_ids + ), "Job ID did not preserve order or were incorrectly propagated" + celery_task_ids = { + elm.kwargs["registered_function_job_patch"].job_creation_task_id + for elm in patch_mock.call_args_list + } + assert len(celery_task_ids) == len(_inputs) + for task_id in celery_task_ids: + await wait_for_task_result(client, auth, f"{task_id}") diff --git a/services/api-server/tests/unit/api_functions/conftest.py b/services/api-server/tests/unit/api_functions/conftest.py index 891fdc533f47..34c9cacd368d 100644 --- a/services/api-server/tests/unit/api_functions/conftest.py +++ b/services/api-server/tests/unit/api_functions/conftest.py @@ -26,13 +26,20 @@ RegisteredProjectFunction, RegisteredProjectFunctionJob, ) -from models_library.functions import RegisteredFunctionJobCollection +from models_library.functions import ( + RegisteredFunctionJobCollection, + RegisteredSolverFunction, + RegisteredSolverFunctionJob, + SolverFunctionJob, +) from models_library.functions_errors import FunctionIDNotFoundError from models_library.projects import ProjectID -from pytest_mock import MockerFixture +from pytest_mock import MockerFixture, MockType from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient +from simcore_service_api_server.api.dependencies import services +from simcore_service_api_server.api.dependencies.services import get_rabbitmq_rpc_client from simcore_service_api_server.api.routes.functions_routes import get_wb_api_rpc_client from simcore_service_api_server.services_rpc.wb_api_server import WbApiRpcClient @@ -66,6 +73,25 @@ async def request(self, namespace: str, method_name: str, **kwargs): return {"mocked_response": True} +@pytest.fixture +async def mock_rabbitmq_rpc_client( + app: FastAPI, mocker: MockerFixture +) -> MockerFixture: + def _(): + return DummyRpcClient() + + app.dependency_overrides[get_rabbitmq_rpc_client] = _ + return mocker + + +@pytest.fixture +async def mock_celery_task_manager(app: FastAPI, mocker: MockerFixture) -> MockType: + def _(app: FastAPI): + return None + + return mocker.patch.object(services, services.get_task_manager.__name__, _) + + @pytest.fixture async def mock_wb_api_server_rpc(app: FastAPI, mocker: MockerFixture) -> MockerFixture: @@ -119,38 +145,92 @@ def mock_function( @pytest.fixture -def mock_registered_function(mock_function: Function) -> RegisteredFunction: +def mock_registered_project_function(mock_function: Function) -> RegisteredFunction: return RegisteredProjectFunction( **{ - **mock_function.dict(), - "uid": str(uuid4()), + **mock_function.model_dump(), + "uid": f"{uuid4()}", "created_at": datetime.datetime.now(datetime.UTC), + "modified_at": datetime.datetime.now(datetime.UTC), } ) @pytest.fixture -def mock_function_job(mock_registered_function: RegisteredFunction) -> FunctionJob: +def mock_registered_solver_function( + mock_function: Function, + sample_input_schema: JSONFunctionInputSchema, + sample_output_schema: JSONFunctionOutputSchema, +) -> RegisteredFunction: + return RegisteredSolverFunction( + title="test_function", + function_class=FunctionClass.SOLVER, + description="A test function", + input_schema=sample_input_schema, + output_schema=sample_output_schema, + default_inputs=None, + uid=uuid4(), + created_at=datetime.datetime.now(datetime.UTC), + modified_at=datetime.datetime.now(datetime.UTC), + solver_key="simcore/services/comp/ans-model", + solver_version="1.0.1", + ) + + +@pytest.fixture +def mock_project_function_job( + mock_registered_project_function: RegisteredFunction, +) -> FunctionJob: mock_function_job = { - "function_uid": mock_registered_function.uid, + "function_uid": mock_registered_project_function.uid, "title": "Test Function Job", "description": "A test function job", "inputs": {"key": "value"}, "outputs": None, - "project_job_id": str(uuid4()), + "project_job_id": f"{uuid4()}", "function_class": FunctionClass.PROJECT, + "job_creation_task_id": None, } return ProjectFunctionJob(**mock_function_job) @pytest.fixture -def mock_registered_function_job( - mock_function_job: FunctionJob, +def mock_registered_project_function_job( + mock_project_function_job: FunctionJob, ) -> RegisteredFunctionJob: return RegisteredProjectFunctionJob( **{ - **mock_function_job.dict(), - "uid": str(uuid4()), + **mock_project_function_job.model_dump(), + "uid": f"{uuid4()}", + "created_at": datetime.datetime.now(datetime.UTC), + } + ) + + +@pytest.fixture +def mock_solver_function_job( + mock_registered_solver_function: RegisteredFunction, +) -> FunctionJob: + return SolverFunctionJob( + title="Test Function Job", + description="A test function job", + function_uid=mock_registered_solver_function.uid, + inputs={"key": "value"}, + outputs=None, + function_class=FunctionClass.SOLVER, + solver_job_id=ProjectID(f"{uuid4()}"), + job_creation_task_id=None, + ) + + +@pytest.fixture +def mock_registered_solver_function_job( + mock_solver_function_job: FunctionJob, +) -> RegisteredFunctionJob: + return RegisteredSolverFunctionJob( + **{ + **mock_solver_function_job.model_dump(), + "uid": f"{uuid4()}", "created_at": datetime.datetime.now(datetime.UTC), } ) @@ -158,15 +238,17 @@ def mock_registered_function_job( @pytest.fixture def mock_function_job_collection( - mock_registered_function_job: RegisteredFunctionJob, + mock_registered_project_function_job: RegisteredFunctionJob, ) -> FunctionJobCollection: mock_function_job_collection = { "title": "Test Function Job Collection", "description": "A test function job collection", - "function_uid": mock_registered_function_job.function_uid, + "function_uid": mock_registered_project_function_job.function_uid, "function_class": FunctionClass.PROJECT, - "project_id": str(uuid4()), - "function_job_ids": [mock_registered_function_job.uid for _ in range(5)], + "project_id": f"{uuid4()}", + "function_job_ids": [ + mock_registered_project_function_job.uid for _ in range(5) + ], } return FunctionJobCollection(**mock_function_job_collection) @@ -178,7 +260,7 @@ def mock_registered_function_job_collection( return RegisteredFunctionJobCollection( **{ **mock_function_job_collection.model_dump(), - "uid": str(uuid4()), + "uid": f"{uuid4()}", "created_at": datetime.datetime.now(datetime.UTC), } ) @@ -187,20 +269,44 @@ def mock_registered_function_job_collection( @pytest.fixture() def mock_handler_in_functions_rpc_interface( mock_wb_api_server_rpc: MockerFixture, -) -> Callable[[str, Any, Exception | None], None]: +) -> Callable[[str, Any, Exception | None, Callable | None], MockType]: def _mock( handler_name: str = "", return_value: Any = None, exception: Exception | None = None, - ) -> None: + side_effect: Callable | None = None, + ) -> MockType: from servicelib.rabbitmq.rpc_interfaces.webserver.functions import ( functions_rpc_interface, ) - mock_wb_api_server_rpc.patch.object( + assert exception is None or side_effect is None + + return mock_wb_api_server_rpc.patch.object( functions_rpc_interface, handler_name, return_value=return_value, + side_effect=exception or side_effect, + ) + + return _mock + + +@pytest.fixture() +def mock_method_in_jobs_service( + mock_wb_api_server_rpc: MockerFixture, +) -> Callable[[str, Any, Exception | None], None]: + def _mock( + method_name: str = "", + return_value: Any = None, + exception: Exception | None = None, + ) -> None: + from simcore_service_api_server._service_jobs import JobService + + mock_wb_api_server_rpc.patch.object( + JobService, + method_name, + return_value=return_value, side_effect=exception, ) diff --git a/services/api-server/tests/unit/api_functions/test_api_routers_function_job_collections.py b/services/api-server/tests/unit/api_functions/test_api_routers_function_job_collections.py new file mode 100644 index 000000000000..1ae7a5820408 --- /dev/null +++ b/services/api-server/tests/unit/api_functions/test_api_routers_function_job_collections.py @@ -0,0 +1,173 @@ +# pylint: disable=unused-argument +import datetime +from collections.abc import Callable +from typing import Any +from uuid import uuid4 + +import httpx +import pytest +from httpx import AsyncClient +from models_library.api_schemas_webserver.functions import ( + RegisteredFunctionJobCollection, + RegisteredProjectFunction, + RegisteredProjectFunctionJob, +) +from models_library.rest_pagination import PageMetaInfoLimitOffset +from pytest_mock import MockerFixture +from servicelib.aiohttp import status +from simcore_service_api_server._meta import API_VTAG + + +async def test_get_function_job_collection( + client: AsyncClient, + mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], + auth: httpx.BasicAuth, +) -> None: + mock_registered_function_job_collection = ( + RegisteredFunctionJobCollection.model_validate( + { + "uid": str(uuid4()), + "title": "Test Collection", + "description": "A test function job collection", + "job_ids": [str(uuid4()), str(uuid4())], + "created_at": datetime.datetime.now(datetime.UTC), + } + ) + ) + + mock_handler_in_functions_rpc_interface( + "get_function_job_collection", mock_registered_function_job_collection + ) + + response = await client.get( + f"{API_VTAG}/function_job_collections/{mock_registered_function_job_collection.uid}", + auth=auth, + ) + assert response.status_code == status.HTTP_200_OK + assert ( + RegisteredFunctionJobCollection.model_validate(response.json()) + == mock_registered_function_job_collection + ) + + +async def test_list_function_job_collections( + client: AsyncClient, + mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], + auth: httpx.BasicAuth, +) -> None: + mock_registered_function_job_collection = ( + RegisteredFunctionJobCollection.model_validate( + { + "uid": str(uuid4()), + "title": "Test Collection", + "description": "A test function job collection", + "job_ids": [str(uuid4()), str(uuid4())], + "created_at": datetime.datetime.now(datetime.UTC), + } + ) + ) + + mock_handler_in_functions_rpc_interface( + "list_function_job_collections", + ( + [mock_registered_function_job_collection for _ in range(5)], + PageMetaInfoLimitOffset(total=5, count=5, limit=10, offset=0), + ), + ) + + response = await client.get(f"{API_VTAG}/function_job_collections", auth=auth) + assert response.status_code == status.HTTP_200_OK + data = response.json()["items"] + assert len(data) == 5 + assert ( + RegisteredFunctionJobCollection.model_validate(data[0]) + == mock_registered_function_job_collection + ) + + +async def test_delete_function_job_collection( + client: AsyncClient, + mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], + mock_registered_function_job_collection: RegisteredFunctionJobCollection, + auth: httpx.BasicAuth, +) -> None: + + mock_handler_in_functions_rpc_interface("delete_function_job_collection", None) + + # Now, delete the function job collection + response = await client.delete( + f"{API_VTAG}/function_job_collections/{mock_registered_function_job_collection.uid}", + auth=auth, + ) + assert response.status_code == status.HTTP_200_OK + data = response.json() + assert data is None + + +@pytest.mark.parametrize("response_type", ["page", "list"]) +async def test_get_function_job_collection_jobs( + client: AsyncClient, + mock_rabbitmq_rpc_client: MockerFixture, + mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], + mock_registered_function_job_collection: RegisteredFunctionJobCollection, + mock_registered_project_function_job: RegisteredProjectFunctionJob, + auth: httpx.BasicAuth, + response_type: str | None, +) -> None: + mock_handler_in_functions_rpc_interface( + "list_function_jobs", + ( + [mock_registered_project_function_job for _ in range(5)], + PageMetaInfoLimitOffset(total=5, count=5, limit=10, offset=0), + ), + ) + query = {"limit": 10, "offset": 0} if response_type == "page" else None + response = await client.get( + f"{API_VTAG}/function_job_collections/{mock_registered_function_job_collection.uid}/function_jobs{'/page' if response_type == 'page' else ''}", + params=query, + auth=auth, + ) + assert response.status_code == status.HTTP_200_OK + data = response.json() + if response_type == "list": + assert isinstance(data, list) + assert len(data) == 5 + elif response_type == "page": + assert isinstance(data, dict) + assert data["total"] == 5 + assert data["limit"] == 10 + assert data["offset"] == 0 + assert len(data["items"]) == 5 + + +async def test_list_function_job_collections_with_function_filter( + client: AsyncClient, + mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], + mock_registered_function_job_collection: RegisteredFunctionJobCollection, + mock_registered_project_function: RegisteredProjectFunction, + auth: httpx.BasicAuth, +) -> None: + + mock_handler_in_functions_rpc_interface( + "list_function_job_collections", + ( + [mock_registered_function_job_collection for _ in range(2)], + PageMetaInfoLimitOffset(total=5, count=2, limit=2, offset=1), + ), + ) + + response = await client.get( + f"{API_VTAG}/function_job_collections?function_id={mock_registered_project_function.uid}&limit=2&offset=1", + auth=auth, + ) + assert response.status_code == status.HTTP_200_OK + data = response.json() + + assert data["total"] == 5 + assert data["limit"] == 2 + assert data["offset"] == 1 + assert len(data["items"]) == 2 + assert ( + RegisteredFunctionJobCollection.model_validate(data["items"][0]) + == mock_registered_function_job_collection + ) diff --git a/services/api-server/tests/unit/api_functions/test_api_routers_function_jobs.py b/services/api-server/tests/unit/api_functions/test_api_routers_function_jobs.py new file mode 100644 index 000000000000..5d25b8d8a407 --- /dev/null +++ b/services/api-server/tests/unit/api_functions/test_api_routers_function_jobs.py @@ -0,0 +1,447 @@ +# pylint: disable=unused-argument +# pylint: disable=too-many-arguments +# pylint: disable=too-many-positional-arguments + +import random +import uuid +from collections.abc import Callable +from datetime import datetime +from typing import Any +from unittest.mock import ANY + +import httpx +import pytest +from celery_library.task_manager import CeleryTaskManager +from faker import Faker +from fastapi import FastAPI, status +from httpx import AsyncClient +from models_library.api_schemas_webserver.functions import ( + ProjectFunctionJob, + RegisteredProjectFunctionJob, +) +from models_library.functions import ( + FunctionJobStatus, + RegisteredProjectFunction, + RegisteredProjectFunctionJobWithStatus, + TaskID, +) +from models_library.products import ProductName +from models_library.progress_bar import ProgressReport, ProgressStructuredMessage +from models_library.projects import ProjectID +from models_library.projects_state import RunningState +from models_library.rest_pagination import PageMetaInfoLimitOffset +from models_library.users import UserID +from models_library.utils.json_schema import GenerateResolvedJsonSchema +from pytest_mock import MockerFixture, MockType +from servicelib.celery.models import OwnerMetadata, TaskState, TaskStatus, TaskUUID +from simcore_service_api_server._meta import API_VTAG +from simcore_service_api_server._service_function_jobs_task_client import ( + FunctionJobTaskClientService, +) +from simcore_service_api_server.api.dependencies import services as service_dependencies +from simcore_service_api_server.models.schemas.functions import ( + FunctionJobCreationTaskStatus, +) +from simcore_service_api_server.models.schemas.jobs import JobStatus + +_faker = Faker() + + +async def test_delete_function_job( + client: AsyncClient, + mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], + mock_registered_project_function_job: RegisteredProjectFunctionJob, + auth: httpx.BasicAuth, +) -> None: + + mock_handler_in_functions_rpc_interface("delete_function_job", None) + + response = await client.delete( + f"{API_VTAG}/function_jobs/{mock_registered_project_function_job.uid}", + auth=auth, + ) + assert response.status_code == status.HTTP_200_OK + + +async def test_register_function_job( + client: AsyncClient, + mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], + mock_project_function_job: ProjectFunctionJob, + mock_registered_project_function_job: RegisteredProjectFunctionJob, + auth: httpx.BasicAuth, +) -> None: + """Test the register_function_job endpoint.""" + + mock_handler_in_functions_rpc_interface( + "register_function_job", mock_registered_project_function_job + ) + + response = await client.post( + f"{API_VTAG}/function_jobs", + json=mock_project_function_job.model_dump(mode="json"), + auth=auth, + ) + + assert response.status_code == status.HTTP_200_OK + assert ( + RegisteredProjectFunctionJob.model_validate(response.json()) + == mock_registered_project_function_job + ) + + +async def test_get_function_job( + client: AsyncClient, + mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], + mock_registered_project_function_job: RegisteredProjectFunctionJob, + auth: httpx.BasicAuth, +) -> None: + + mock_handler_in_functions_rpc_interface( + "get_function_job", mock_registered_project_function_job + ) + + # Now, get the function job + response = await client.get( + f"{API_VTAG}/function_jobs/{mock_registered_project_function_job.uid}", + auth=auth, + ) + assert response.status_code == status.HTTP_200_OK + assert ( + RegisteredProjectFunctionJob.model_validate(response.json()) + == mock_registered_project_function_job + ) + + +async def test_list_function_jobs( + client: AsyncClient, + mock_rabbitmq_rpc_client: MockerFixture, + mock_celery_task_manager: MockType, + mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], + mock_registered_project_function_job: RegisteredProjectFunctionJob, + auth: httpx.BasicAuth, +) -> None: + + mock_handler_in_functions_rpc_interface( + "list_function_jobs", + ( + [mock_registered_project_function_job for _ in range(5)], + PageMetaInfoLimitOffset(total=5, count=5, limit=10, offset=0), + ), + ) + response = await client.get(f"{API_VTAG}/function_jobs", auth=auth) + assert response.status_code == status.HTTP_200_OK + data = response.json()["items"] + assert len(data) == 5 + assert ( + RegisteredProjectFunctionJob.model_validate(data[0]) + == mock_registered_project_function_job + ) + + +@pytest.mark.parametrize("status_str", ["SUCCESS", "FAILED"]) +async def test_list_function_jobs_with_status( + client: AsyncClient, + mock_rabbitmq_rpc_client: MockerFixture, + mock_celery_task_manager: MockType, + mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], + mock_registered_project_function: RegisteredProjectFunction, + mock_registered_project_function_job: RegisteredProjectFunctionJob, + auth: httpx.BasicAuth, + mocker: MockerFixture, + status_str: str, +) -> None: + mock_status = FunctionJobStatus(status=status_str) + mock_outputs = {"X+Y": 42, "X-Y": 10} + mock_registered_project_function_job_with_status = ( + RegisteredProjectFunctionJobWithStatus( + **{ + **mock_registered_project_function_job.model_dump(), + "status": mock_status, + } + ) + ) + mock_handler_in_functions_rpc_interface( + "list_function_jobs_with_status", + ( + [mock_registered_project_function_job_with_status for _ in range(5)], + PageMetaInfoLimitOffset(total=5, count=5, limit=10, offset=0), + ), + ) + mock_handler_in_functions_rpc_interface( + "get_function", mock_registered_project_function + ) + + mock_function_job_outputs = mocker.patch.object( + FunctionJobTaskClientService, "function_job_outputs", return_value=mock_outputs + ) + mock_handler_in_functions_rpc_interface("get_function_job_status", mock_status) + response = await client.get( + f"{API_VTAG}/function_jobs?include_status=true", auth=auth + ) + assert response.status_code == status.HTTP_200_OK + data = response.json()["items"] + assert len(data) == 5 + returned_function_job = RegisteredProjectFunctionJobWithStatus.model_validate( + data[0] + ) + if status_str == "SUCCESS": + mock_function_job_outputs.assert_called() + assert returned_function_job.outputs == mock_outputs + else: + mock_function_job_outputs.assert_not_called() + assert returned_function_job.outputs is None + + assert returned_function_job == mock_registered_project_function_job_with_status + + +async def test_list_function_jobs_with_job_id_filter( + client: AsyncClient, + mock_celery_task_manager: MockType, + mock_rabbitmq_rpc_client: MockerFixture, + mock_handler_in_functions_rpc_interface: Callable[[str], MockType], + mock_registered_project_function_job: RegisteredProjectFunctionJob, + user_id: UserID, + product_name: ProductName, + auth: httpx.BasicAuth, +) -> None: + + PAGE_SIZE = 3 + TOTAL_SIZE = 10 + + def mocked_list_function_jobs(offset: int, limit: int): + start = offset + end = offset + limit + items = [ + mock_registered_project_function_job + for _ in range(start, min(end, TOTAL_SIZE)) + ] + return items, PageMetaInfoLimitOffset( + total=TOTAL_SIZE, count=len(items), limit=limit, offset=offset + ) + + mock_list_function_jobs = mock_handler_in_functions_rpc_interface( + "list_function_jobs", + ) + + mock_list_function_jobs.side_effect = lambda *args, **kwargs: ( # noqa: ARG005 + mocked_list_function_jobs( + kwargs.get("pagination_offset", 0), + kwargs.get("pagination_limit", PAGE_SIZE), + ) + ) + for page in range((TOTAL_SIZE + PAGE_SIZE - 1) // PAGE_SIZE): + offset = page * PAGE_SIZE + response = await client.get( + f"{API_VTAG}/function_jobs", + params={ + "function_job_ids": [str(mock_registered_project_function_job.uid)], + "limit": PAGE_SIZE, + "offset": offset, + }, + auth=auth, + ) + mock_list_function_jobs.assert_called_with( + ANY, # Dummy rpc client + filter_by_function_job_ids=[mock_registered_project_function_job.uid], + filter_by_function_job_collection_id=None, + filter_by_function_id=None, + pagination_offset=offset, + pagination_limit=PAGE_SIZE, + product_name=product_name, + user_id=user_id, + ) + assert response.status_code == status.HTTP_200_OK + data = response.json()["items"] + assert len(data) == min(PAGE_SIZE, TOTAL_SIZE - offset) + assert ( + RegisteredProjectFunctionJob.model_validate(data[0]) + == mock_registered_project_function_job + ) + + +@pytest.mark.parametrize("job_status", ["SUCCESS", "FAILED", "STARTED"]) +@pytest.mark.parametrize( + "project_job_id, job_creation_task_id, celery_task_state", + [ + ( + ProjectID(_faker.uuid4()), + TaskID(_faker.uuid4()), + random.choice(list(TaskState)), # noqa: S311 + ), + (None, None, random.choice(list(TaskState))), # noqa: S311 + (None, TaskID(_faker.uuid4()), random.choice(list(TaskState))), # noqa: S311 + ], +) +async def test_get_function_job_status( + app: FastAPI, + mocked_app_dependencies: None, + client: AsyncClient, + mocker: MockerFixture, + mock_handler_in_functions_rpc_interface: Callable[ + [str, Any, Exception | None, Callable | None], MockType + ], + mock_registered_project_function_job: RegisteredProjectFunctionJob, + mock_registered_project_function: RegisteredProjectFunction, + mock_method_in_jobs_service: Callable[[str, Any], None], + auth: httpx.BasicAuth, + job_status: str, + project_job_id: ProjectID, + job_creation_task_id: TaskID | None, + celery_task_state: TaskState, +) -> None: + + _expected_return_status = status.HTTP_200_OK + + def _mock_task_manager(*args, **kwargs) -> CeleryTaskManager: + async def _get_task_status( + task_uuid: TaskUUID, owner_metadata: OwnerMetadata + ) -> TaskStatus: + assert f"{task_uuid}" == job_creation_task_id + return TaskStatus( + task_uuid=task_uuid, + task_state=celery_task_state, + progress_report=ProgressReport( + actual_value=0.5, + total=1.0, + attempt=1, + unit=None, + message=ProgressStructuredMessage.model_validate( + ProgressStructuredMessage.model_json_schema( + schema_generator=GenerateResolvedJsonSchema + )["examples"][0] + ), + ), + ) + + obj = mocker.Mock(spec=CeleryTaskManager) + obj.get_task_status = _get_task_status + return obj + + mocker.patch.object(service_dependencies, "get_task_manager", _mock_task_manager) + + mock_handler_in_functions_rpc_interface( + "get_function_job", + mock_registered_project_function_job.model_copy( + update={ + "user_id": ANY, + "project_job_id": project_job_id, + "job_creation_task_id": job_creation_task_id, + } + ), + None, + None, + ) + mock_handler_in_functions_rpc_interface( + "get_function", mock_registered_project_function, None, None + ) + mock_handler_in_functions_rpc_interface( + "get_function_job_status", FunctionJobStatus(status=job_status), None, None + ) + mock_method_in_jobs_service( + "inspect_study_job", + JobStatus( + job_id=uuid.uuid4(), + submitted_at=datetime.fromisoformat("2023-01-01T00:00:00"), + started_at=datetime.fromisoformat("2023-01-01T01:00:00"), + stopped_at=datetime.fromisoformat("2023-01-01T02:00:00"), + state=RunningState(value=job_status), + ), + ) + + async def _update_function_job_status_side_effect(*args, **kwargs): + return kwargs["job_status"] + + mock_handler_in_functions_rpc_interface( + "update_function_job_status", + None, + None, + _update_function_job_status_side_effect, + ) + + response = await client.get( + f"{API_VTAG}/function_jobs/{mock_registered_project_function_job.uid}/status", + auth=auth, + ) + assert response.status_code == _expected_return_status + data = response.json() + if (project_job_id is not None and job_creation_task_id is not None) or ( + job_status in ("SUCCESS", "FAILED") + ): + assert data["status"] == job_status + elif project_job_id is None and job_creation_task_id is None: + assert data["status"] == FunctionJobCreationTaskStatus.NOT_YET_SCHEDULED + elif project_job_id is None and job_creation_task_id is not None: + assert data["status"] == FunctionJobCreationTaskStatus[celery_task_state.name] + else: + pytest.fail("Unexpected combination of parameters") + + +@pytest.mark.parametrize( + "job_outputs, project_job_id, job_status, expected_output, use_db_cache", + [ + (None, None, "created", None, True), + ( + {"X+Y": 42, "X-Y": 10}, + ProjectID(_faker.uuid4()), + RunningState.FAILED, + None, + False, + ), + ( + {"X+Y": 42, "X-Y": 10}, + ProjectID(_faker.uuid4()), + RunningState.SUCCESS, + {"X+Y": 42, "X-Y": 10}, + True, + ), + ], +) +async def test_get_function_job_outputs( + client: AsyncClient, + mock_celery_task_manager: MockType, + mock_rabbitmq_rpc_client: MockerFixture, + mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], + mock_registered_project_function_job: RegisteredProjectFunctionJob, + mock_registered_project_function: RegisteredProjectFunction, + mocked_webserver_rpc_api: dict[str, MockType], + auth: httpx.BasicAuth, + job_outputs: dict[str, Any] | None, + project_job_id: ProjectID | None, + job_status: str, + expected_output: dict[str, Any] | None, + use_db_cache: bool, +) -> None: + + mock_handler_in_functions_rpc_interface( + "get_function_job", + mock_registered_project_function_job.model_copy( + update={ + "user_id": ANY, + "project_job_id": project_job_id, + "job_creation_task_id": None, + } + ), + ) + mock_handler_in_functions_rpc_interface( + "get_function", mock_registered_project_function + ) + mock_handler_in_functions_rpc_interface( + "update_function_job_status", FunctionJobStatus(status="SUCCESS") + ) + if use_db_cache: + mock_handler_in_functions_rpc_interface("get_function_job_outputs", job_outputs) + else: + mock_handler_in_functions_rpc_interface("get_function_job_outputs", None) + + mock_handler_in_functions_rpc_interface( + "get_function_job_status", + FunctionJobStatus(status=job_status), + ) + + response = await client.get( + f"{API_VTAG}/function_jobs/{mock_registered_project_function_job.uid}/outputs", + auth=auth, + ) + assert response.status_code == status.HTTP_200_OK + data = response.json() + assert data == expected_output diff --git a/services/api-server/tests/unit/api_functions/test_api_routers_functions.py b/services/api-server/tests/unit/api_functions/test_api_routers_functions.py index dcac6487618b..25401868ad3d 100644 --- a/services/api-server/tests/unit/api_functions/test_api_routers_functions.py +++ b/services/api-server/tests/unit/api_functions/test_api_routers_functions.py @@ -1,26 +1,60 @@ # pylint: disable=unused-argument +# pylint: disable=too-many-arguments +# pylint: disable=too-many-positional-arguments # pylint: disable=redefined-outer-name -import datetime from collections.abc import Callable +from pathlib import Path from typing import Any +from unittest.mock import MagicMock from uuid import uuid4 import httpx import pytest +import respx +from celery import Task # pylint: disable=no-name-in-module +from celery_library.task_manager import CeleryTaskManager +from faker import Faker +from fastapi import FastAPI from httpx import AsyncClient -from models_library.api_schemas_webserver.functions import ( - FunctionJobCollection, +from models_library.api_schemas_long_running_tasks.tasks import TaskGet +from models_library.functions import ( + FunctionUserAccessRights, + FunctionUserApiAccessRights, ProjectFunction, - ProjectFunctionJob, - RegisteredFunctionJobCollection, + RegisteredFunction, + RegisteredFunctionJob, RegisteredProjectFunction, RegisteredProjectFunctionJob, ) -from models_library.functions_errors import FunctionIDNotFoundError +from models_library.functions_errors import ( + FunctionIDNotFoundError, + FunctionReadAccessDeniedError, +) from models_library.rest_pagination import PageMetaInfoLimitOffset +from models_library.users import UserID +from pydantic import EmailStr +from pytest_mock import MockerFixture, MockType +from pytest_simcore.helpers.httpx_calls_capture_models import HttpApiCallCaptureModel from servicelib.aiohttp import status +from servicelib.celery.app_server import BaseAppServer +from servicelib.celery.models import TaskID +from servicelib.common_headers import ( + X_SIMCORE_PARENT_NODE_ID, + X_SIMCORE_PARENT_PROJECT_UUID, +) +from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient from simcore_service_api_server._meta import API_VTAG +from simcore_service_api_server.api.dependencies.authentication import Identity +from simcore_service_api_server.celery_worker.worker_tasks import functions_tasks +from simcore_service_api_server.models.api_resources import JobLinks +from simcore_service_api_server.models.domain.functions import ( + PreRegisteredFunctionJobData, +) +from simcore_service_api_server.models.schemas.jobs import JobInputs +from simcore_service_api_server.services_rpc.wb_api_server import WbApiRpcClient + +_faker = Faker() async def test_register_function( @@ -28,10 +62,10 @@ async def test_register_function( mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], mock_function: ProjectFunction, auth: httpx.BasicAuth, - mock_registered_function: RegisteredProjectFunction, + mock_registered_project_function: RegisteredProjectFunction, ) -> None: mock_handler_in_functions_rpc_interface( - "register_function", mock_registered_function + "register_function", mock_registered_project_function ) response = await client.post( f"{API_VTAG}/functions", json=mock_function.model_dump(mode="json"), auth=auth @@ -40,7 +74,7 @@ async def test_register_function( data = response.json() returned_function = RegisteredProjectFunction.model_validate(data) assert returned_function.uid is not None - assert returned_function == mock_registered_function + assert returned_function == mock_registered_project_function async def test_register_function_invalid( @@ -66,16 +100,18 @@ async def test_register_function_invalid( async def test_get_function( client: AsyncClient, mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], - mock_registered_function: RegisteredProjectFunction, + mock_registered_project_function: RegisteredProjectFunction, auth: httpx.BasicAuth, ) -> None: function_id = str(uuid4()) - mock_handler_in_functions_rpc_interface("get_function", mock_registered_function) + mock_handler_in_functions_rpc_interface( + "get_function", mock_registered_project_function + ) response = await client.get(f"{API_VTAG}/functions/{function_id}", auth=auth) assert response.status_code == status.HTTP_200_OK returned_function = RegisteredProjectFunction.model_validate(response.json()) - assert returned_function == mock_registered_function + assert returned_function == mock_registered_project_function async def test_get_function_not_found( @@ -92,21 +128,48 @@ async def test_get_function_not_found( None, FunctionIDNotFoundError(function_id=non_existent_function_id), ) - with pytest.raises(FunctionIDNotFoundError): - await client.get(f"{API_VTAG}/functions/{non_existent_function_id}", auth=auth) + response = await client.get( + f"{API_VTAG}/functions/{non_existent_function_id}", auth=auth + ) + assert response.status_code == status.HTTP_404_NOT_FOUND + + +async def test_get_function_read_access_denied( + client: AsyncClient, + mock_handler_in_functions_rpc_interface: Callable[ + [str, Any, Exception | None], None + ], + mock_registered_project_function: RegisteredProjectFunction, + auth: httpx.BasicAuth, +) -> None: + unauthorized_user_id = "unauthorized user" + mock_handler_in_functions_rpc_interface( + "get_function", + None, + FunctionReadAccessDeniedError( + function_id=mock_registered_project_function.uid, + user_id=unauthorized_user_id, + ), + ) + response = await client.get( + f"{API_VTAG}/functions/{mock_registered_project_function.uid}", auth=auth + ) + assert response.status_code == status.HTTP_403_FORBIDDEN + assert response.json()["errors"][0] == ( + f"Function {mock_registered_project_function.uid} read access denied for user {unauthorized_user_id}" + ) async def test_list_functions( client: AsyncClient, mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], - mock_registered_function: RegisteredProjectFunction, + mock_registered_project_function: RegisteredProjectFunction, auth: httpx.BasicAuth, ) -> None: - mock_handler_in_functions_rpc_interface( "list_functions", ( - [mock_registered_function for _ in range(5)], + [mock_registered_project_function for _ in range(5)], PageMetaInfoLimitOffset(total=5, count=5, limit=10, offset=0), ), ) @@ -117,21 +180,20 @@ async def test_list_functions( assert response.status_code == status.HTTP_200_OK data = response.json()["items"] assert len(data) == 5 - assert data[0]["title"] == mock_registered_function.title + assert data[0]["title"] == mock_registered_project_function.title async def test_update_function_title( client: AsyncClient, mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], - mock_registered_function: RegisteredProjectFunction, + mock_registered_project_function: RegisteredProjectFunction, auth: httpx.BasicAuth, ) -> None: - mock_handler_in_functions_rpc_interface( "update_function_title", RegisteredProjectFunction( **{ - **mock_registered_function.model_dump(), + **mock_registered_project_function.model_dump(), "title": "updated_example_function", } ), @@ -140,7 +202,7 @@ async def test_update_function_title( # Update the function title updated_title = {"title": "updated_example_function"} response = await client.patch( - f"{API_VTAG}/functions/{mock_registered_function.uid}/title", + f"{API_VTAG}/functions/{mock_registered_project_function.uid}/title", params=updated_title, auth=auth, ) @@ -152,14 +214,14 @@ async def test_update_function_title( async def test_update_function_description( client: AsyncClient, mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], - mock_registered_function: RegisteredProjectFunction, + mock_registered_project_function: RegisteredProjectFunction, auth: httpx.BasicAuth, ) -> None: mock_handler_in_functions_rpc_interface( "update_function_description", RegisteredProjectFunction( **{ - **mock_registered_function.model_dump(), + **mock_registered_project_function.model_dump(), "description": "updated_example_function", } ), @@ -168,7 +230,7 @@ async def test_update_function_description( # Update the function description updated_description = {"description": "updated_example_function"} response = await client.patch( - f"{API_VTAG}/functions/{mock_registered_function.uid}/description", + f"{API_VTAG}/functions/{mock_registered_project_function.uid}/description", params=updated_description, auth=auth, ) @@ -180,56 +242,64 @@ async def test_update_function_description( async def test_get_function_input_schema( client: AsyncClient, mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], - mock_registered_function: RegisteredProjectFunction, + mock_registered_project_function: RegisteredProjectFunction, auth: httpx.BasicAuth, ) -> None: - - mock_handler_in_functions_rpc_interface("get_function", mock_registered_function) + mock_handler_in_functions_rpc_interface( + "get_function", mock_registered_project_function + ) response = await client.get( - f"{API_VTAG}/functions/{mock_registered_function.uid}/input_schema", auth=auth + f"{API_VTAG}/functions/{mock_registered_project_function.uid}/input_schema", + auth=auth, ) assert response.status_code == status.HTTP_200_OK data = response.json() assert ( - data["schema_content"] == mock_registered_function.input_schema.schema_content + data["schema_content"] + == mock_registered_project_function.input_schema.schema_content ) async def test_get_function_output_schema( client: AsyncClient, mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], - mock_registered_function: RegisteredProjectFunction, + mock_registered_project_function: RegisteredProjectFunction, auth: httpx.BasicAuth, ) -> None: - - mock_handler_in_functions_rpc_interface("get_function", mock_registered_function) + mock_handler_in_functions_rpc_interface( + "get_function", mock_registered_project_function + ) response = await client.get( - f"{API_VTAG}/functions/{mock_registered_function.uid}/output_schema", auth=auth + f"{API_VTAG}/functions/{mock_registered_project_function.uid}/output_schema", + auth=auth, ) assert response.status_code == status.HTTP_200_OK data = response.json() assert ( - data["schema_content"] == mock_registered_function.output_schema.schema_content + data["schema_content"] + == mock_registered_project_function.output_schema.schema_content ) async def test_validate_function_inputs( client: AsyncClient, + mock_rabbitmq_rpc_client: MockerFixture, mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], - mock_registered_function: RegisteredProjectFunction, + mock_registered_project_function: RegisteredProjectFunction, auth: httpx.BasicAuth, ) -> None: - - mock_handler_in_functions_rpc_interface("get_function", mock_registered_function) + mock_handler_in_functions_rpc_interface( + "get_function", mock_registered_project_function + ) # Validate inputs validate_payload = {"input1": 10} response = await client.post( - f"{API_VTAG}/functions/{mock_registered_function.uid}:validate_inputs", + f"{API_VTAG}/functions/{mock_registered_project_function.uid}:validate_inputs", json=validate_payload, auth=auth, ) @@ -241,313 +311,243 @@ async def test_validate_function_inputs( async def test_delete_function( client: AsyncClient, mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], - mock_registered_function: RegisteredProjectFunction, + mock_registered_project_function: RegisteredProjectFunction, auth: httpx.BasicAuth, ) -> None: mock_handler_in_functions_rpc_interface("delete_function", None) # Delete the function response = await client.delete( - f"{API_VTAG}/functions/{mock_registered_function.uid}", auth=auth + f"{API_VTAG}/functions/{mock_registered_project_function.uid}", auth=auth ) assert response.status_code == status.HTTP_200_OK -async def test_register_function_job( +@pytest.mark.parametrize("user_has_execute_right", [False, True]) +@pytest.mark.parametrize( + "funcapi_endpoint,endpoint_inputs", [("run", {}), ("map", [{}, {}])] +) +async def test_run_map_function_not_allowed( client: AsyncClient, + mocker: MockerFixture, + mock_celery_task_manager: MockType, mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], - mock_function_job: ProjectFunctionJob, - mock_registered_function_job: RegisteredProjectFunctionJob, + mock_registered_project_function: RegisteredProjectFunction, auth: httpx.BasicAuth, + user_id: UserID, + mocked_webserver_rest_api_base: respx.MockRouter, + mocked_webserver_rpc_api: dict[str, MockType], + user_has_execute_right: bool, + funcapi_endpoint: str, + endpoint_inputs: dict | list[dict], ) -> None: - """Test the register_function_job endpoint.""" + """Test that running a function is not allowed.""" - mock_handler_in_functions_rpc_interface( - "register_function_job", mock_registered_function_job + mocker.patch( + "simcore_service_api_server.api.dependencies.services.get_task_manager", + return_value=mocker.MagicMock(spec=CeleryTaskManager), ) - response = await client.post( - f"{API_VTAG}/function_jobs", - json=mock_function_job.model_dump(mode="json"), - auth=auth, - ) - - assert response.status_code == status.HTTP_200_OK - assert ( - RegisteredProjectFunctionJob.model_validate(response.json()) - == mock_registered_function_job - ) - - -async def test_get_function_job( - client: AsyncClient, - mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], - mock_registered_function_job: RegisteredProjectFunctionJob, - auth: httpx.BasicAuth, -) -> None: - mock_handler_in_functions_rpc_interface( - "get_function_job", mock_registered_function_job - ) - - # Now, get the function job - response = await client.get( - f"{API_VTAG}/function_jobs/{mock_registered_function_job.uid}", auth=auth - ) - assert response.status_code == status.HTTP_200_OK - assert ( - RegisteredProjectFunctionJob.model_validate(response.json()) - == mock_registered_function_job + "get_function_user_permissions", + FunctionUserAccessRights( + user_id=user_id, + execute=False, + read=True, + write=True, + ), ) - - -async def test_list_function_jobs( - client: AsyncClient, - mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], - mock_registered_function_job: RegisteredProjectFunctionJob, - auth: httpx.BasicAuth, -) -> None: - mock_handler_in_functions_rpc_interface( - "list_function_jobs", - ( - [mock_registered_function_job for _ in range(5)], - PageMetaInfoLimitOffset(total=5, count=5, limit=10, offset=0), + "get_functions_user_api_access_rights", + FunctionUserApiAccessRights( + user_id=user_id, + execute_functions=user_has_execute_right, + write_functions=True, + read_functions=True, ), ) - # Now, list function jobs - response = await client.get(f"{API_VTAG}/function_jobs", auth=auth) - assert response.status_code == status.HTTP_200_OK - data = response.json()["items"] - assert len(data) == 5 - assert ( - RegisteredProjectFunctionJob.model_validate(data[0]) - == mock_registered_function_job - ) - - -async def test_list_function_jobs_with_function_filter( - client: AsyncClient, - mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], - mock_registered_function_job: RegisteredProjectFunctionJob, - mock_registered_function: RegisteredProjectFunction, - auth: httpx.BasicAuth, -) -> None: - mock_handler_in_functions_rpc_interface( - "list_function_jobs", - ( - [mock_registered_function_job for _ in range(5)], - PageMetaInfoLimitOffset(total=5, count=5, limit=10, offset=0), - ), + "get_function", + mock_registered_project_function, ) - # Now, list function jobs with a filter - response = await client.get( - f"{API_VTAG}/functions/{mock_registered_function.uid}/jobs", auth=auth - ) + # Monkeypatching MagicMock because otherwise it refuse to be used in an await statement + async def async_magic(): + pass - assert response.status_code == status.HTTP_200_OK - data = response.json()["items"] - assert len(data) == 5 - assert ( - RegisteredProjectFunctionJob.model_validate(data[0]) - == mock_registered_function_job - ) + MagicMock.__await__ = lambda _: async_magic().__await__() + + response = await client.post( + f"{API_VTAG}/functions/{mock_registered_project_function.uid}:{funcapi_endpoint}", + json=endpoint_inputs, + auth=auth, + headers={ + X_SIMCORE_PARENT_PROJECT_UUID: "null", + X_SIMCORE_PARENT_NODE_ID: "null", + }, + ) + if user_has_execute_right: + assert response.status_code == status.HTTP_403_FORBIDDEN + assert response.json()["errors"][0] == ( + f"Function {mock_registered_project_function.uid} execute access denied for user {user_id}" + ) + else: + assert response.status_code == status.HTTP_403_FORBIDDEN + assert response.json()["errors"][0] == ( + f"User {user_id} does not have the permission to execute functions" + ) -async def test_delete_function_job( +@pytest.mark.parametrize("capture", ["run_study_function_parent_info.json"]) +async def test_run_project_function( + mocker: MockerFixture, + mocked_webserver_rpc_api: dict[str, MockType], + app: FastAPI, client: AsyncClient, mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], - mock_registered_function_job: RegisteredProjectFunctionJob, + mock_registered_project_function: RegisteredProjectFunction, + mock_registered_project_function_job: RegisteredFunctionJob, auth: httpx.BasicAuth, + user_identity: Identity, + user_email: EmailStr, + job_links: JobLinks, + mocked_webserver_rest_api_base: respx.MockRouter, + mocked_directorv2_rest_api_base: respx.MockRouter, + create_respx_mock_from_capture, + project_tests_dir: Path, + capture: str, ) -> None: - mock_handler_in_functions_rpc_interface("delete_function_job", None) + def _get_app_server(celery_app: Any) -> FastAPI: + app_server = mocker.Mock(spec=BaseAppServer) + app_server.app = app + return app_server - # Now, delete the function job - response = await client.delete( - f"{API_VTAG}/function_jobs/{mock_registered_function_job.uid}", auth=auth - ) - assert response.status_code == status.HTTP_200_OK + mocker.patch.object(functions_tasks, "get_app_server", _get_app_server) + def _get_rabbitmq_rpc_client(app: FastAPI) -> RabbitMQRPCClient: + return mocker.MagicMock(spec=RabbitMQRPCClient) -async def test_register_function_job_collection( - client: AsyncClient, - mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], - auth: httpx.BasicAuth, -) -> None: - mock_function_job_collection = FunctionJobCollection.model_validate( - { - "title": "Test Collection", - "description": "A test function job collection", - "job_ids": [str(uuid4()), str(uuid4())], - } - ) - - mock_registered_function_job_collection = ( - RegisteredFunctionJobCollection.model_validate( - { - **mock_function_job_collection.model_dump(), - "uid": str(uuid4()), - "created_at": datetime.datetime.now(datetime.UTC), - } - ) + mocker.patch.object( + functions_tasks, "get_rabbitmq_rpc_client", _get_rabbitmq_rpc_client ) - mock_handler_in_functions_rpc_interface( - "register_function_job_collection", mock_registered_function_job_collection - ) + async def _get_wb_api_rpc_client(app: FastAPI) -> WbApiRpcClient: + return WbApiRpcClient(_client=mocker.MagicMock(spec=RabbitMQRPCClient)) - response = await client.post( - f"{API_VTAG}/function_job_collections", - json=mock_function_job_collection.model_dump(mode="json"), - auth=auth, - ) - - # Assert - assert response.status_code == status.HTTP_200_OK - assert ( - RegisteredFunctionJobCollection.model_validate(response.json()) - == mock_registered_function_job_collection + mocker.patch.object( + functions_tasks, "get_wb_api_rpc_client", _get_wb_api_rpc_client ) + def _default_side_effect( + request: httpx.Request, + path_params: dict[str, Any], + capture: HttpApiCallCaptureModel, + ) -> Any: + return capture.response_body -async def test_get_function_job_collection( - client: AsyncClient, - mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], - auth: httpx.BasicAuth, -) -> None: - mock_registered_function_job_collection = ( - RegisteredFunctionJobCollection.model_validate( - { - "uid": str(uuid4()), - "title": "Test Collection", - "description": "A test function job collection", - "job_ids": [str(uuid4()), str(uuid4())], - "created_at": datetime.datetime.now(datetime.UTC), - } - ) + create_respx_mock_from_capture( + respx_mocks=[mocked_webserver_rest_api_base, mocked_directorv2_rest_api_base], + capture_path=project_tests_dir / "mocks" / capture, + side_effects_callbacks=[_default_side_effect] * 50, ) mock_handler_in_functions_rpc_interface( - "get_function_job_collection", mock_registered_function_job_collection - ) - - response = await client.get( - f"{API_VTAG}/function_job_collections/{mock_registered_function_job_collection.uid}", - auth=auth, + "get_function_user_permissions", + FunctionUserAccessRights( + user_id=user_identity.user_id, + execute=True, + read=True, + write=True, + ), ) - assert response.status_code == status.HTTP_200_OK - assert ( - RegisteredFunctionJobCollection.model_validate(response.json()) - == mock_registered_function_job_collection + mock_handler_in_functions_rpc_interface( + "get_function", mock_registered_project_function ) - - -async def test_list_function_job_collections( - client: AsyncClient, - mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], - auth: httpx.BasicAuth, -) -> None: - mock_registered_function_job_collection = ( - RegisteredFunctionJobCollection.model_validate( - { - "uid": str(uuid4()), - "title": "Test Collection", - "description": "A test function job collection", - "job_ids": [str(uuid4()), str(uuid4())], - "created_at": datetime.datetime.now(datetime.UTC), - } - ) + mock_handler_in_functions_rpc_interface("find_cached_function_jobs", []) + mock_handler_in_functions_rpc_interface( + "register_function_job", mock_registered_project_function_job ) - mock_handler_in_functions_rpc_interface( - "list_function_job_collections", - ( - [mock_registered_function_job_collection for _ in range(5)], - PageMetaInfoLimitOffset(total=5, count=5, limit=10, offset=0), + "get_functions_user_api_access_rights", + FunctionUserApiAccessRights( + user_id=user_identity.user_id, + execute_functions=True, + write_functions=True, + read_functions=True, ), ) - - response = await client.get(f"{API_VTAG}/function_job_collections", auth=auth) - assert response.status_code == status.HTTP_200_OK - data = response.json()["items"] - assert len(data) == 5 - assert ( - RegisteredFunctionJobCollection.model_validate(data[0]) - == mock_registered_function_job_collection + mock_handler_in_functions_rpc_interface( + "patch_registered_function_job", mock_registered_project_function_job ) + pre_registered_function_job_data = PreRegisteredFunctionJobData( + job_inputs=JobInputs(values={}), + function_job_id=mock_registered_project_function.uid, + ) -async def test_delete_function_job_collection( - client: AsyncClient, - mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], - mock_registered_function_job_collection: RegisteredFunctionJobCollection, - auth: httpx.BasicAuth, -) -> None: - - mock_handler_in_functions_rpc_interface("delete_function_job_collection", None) - - # Now, delete the function job collection - response = await client.delete( - f"{API_VTAG}/function_job_collections/{mock_registered_function_job_collection.uid}", - auth=auth, + job = await functions_tasks.run_function( + task=MagicMock(spec=Task), + task_id=TaskID(_faker.uuid4()), + user_identity=user_identity, + function=mock_registered_project_function, + pre_registered_function_job_data=pre_registered_function_job_data, + pricing_spec=None, + job_links=job_links, + x_simcore_parent_project_uuid=None, + x_simcore_parent_node_id=None, ) - assert response.status_code == status.HTTP_200_OK - data = response.json() - assert data is None + assert isinstance(job, RegisteredProjectFunctionJob) -async def test_get_function_job_collection_jobs( +async def test_export_logs_project_function_job( client: AsyncClient, mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], - mock_registered_function_job_collection: RegisteredFunctionJobCollection, + mock_registered_project_function: RegisteredFunction, + mock_registered_project_function_job: RegisteredFunctionJob, + mocked_directorv2_rpc_api: dict[str, MockType], + mocked_storage_rpc_api: dict[str, MockType], auth: httpx.BasicAuth, -) -> None: - + user_id: UserID, +): mock_handler_in_functions_rpc_interface( - "get_function_job_collection", mock_registered_function_job_collection + "get_function", mock_registered_project_function + ) + mock_handler_in_functions_rpc_interface( + "get_function_job", mock_registered_project_function_job ) - response = await client.get( - f"{API_VTAG}/function_job_collections/{mock_registered_function_job_collection.uid}/function_jobs", + response = await client.post( + f"{API_VTAG}/function_jobs/{mock_registered_project_function_job.uid}/log", auth=auth, ) + assert response.status_code == status.HTTP_200_OK - data = response.json() - assert len(data) == len(mock_registered_function_job_collection.job_ids) + TaskGet.model_validate(response.json()) -async def test_list_function_job_collections_with_function_filter( +async def test_export_logs_solver_function_job( client: AsyncClient, mock_handler_in_functions_rpc_interface: Callable[[str, Any], None], - mock_registered_function_job_collection: RegisteredFunctionJobCollection, - mock_registered_function: RegisteredProjectFunction, + mock_registered_solver_function: RegisteredFunction, + mock_registered_solver_function_job: RegisteredFunctionJob, + mocked_directorv2_rpc_api: dict[str, MockType], + mocked_storage_rpc_api: dict[str, MockType], auth: httpx.BasicAuth, -) -> None: - + user_id: UserID, +): mock_handler_in_functions_rpc_interface( - "list_function_job_collections", - ( - [mock_registered_function_job_collection for _ in range(2)], - PageMetaInfoLimitOffset(total=5, count=2, limit=2, offset=1), - ), + "get_function", mock_registered_solver_function + ) + mock_handler_in_functions_rpc_interface( + "get_function_job", mock_registered_solver_function_job ) - response = await client.get( - f"{API_VTAG}/function_job_collections?function_id={mock_registered_function.uid}&limit=2&offset=1", + response = await client.post( + f"{API_VTAG}/function_jobs/{mock_registered_solver_function_job.uid}/log", auth=auth, ) - assert response.status_code == status.HTTP_200_OK - data = response.json() - assert data["total"] == 5 - assert data["limit"] == 2 - assert data["offset"] == 1 - assert len(data["items"]) == 2 - assert ( - RegisteredFunctionJobCollection.model_validate(data["items"][0]) - == mock_registered_function_job_collection - ) + assert response.status_code == status.HTTP_200_OK + TaskGet.model_validate(response.json()) diff --git a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_delete.py b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_delete.py index 899a18553dcb..edb8531a289b 100644 --- a/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_delete.py +++ b/services/api-server/tests/unit/api_solvers/test_api_routers_solvers_jobs_delete.py @@ -227,3 +227,64 @@ def create_project_side_effect(request: httpx.Request): ) assert resp.status_code == status.HTTP_201_CREATED job = Job.model_validate(resp.json()) + + +@pytest.fixture +def mocked_backend_services_apis_for_delete_job_assets( + mocked_webserver_rest_api: MockRouter, + mocked_webserver_rpc_api: dict[str, MockType], + mocked_storage_rest_api_base: MockRouter, +) -> dict[str, MockRouter | dict[str, MockType]]: + + # Patch PATCH /projects/{project_id} + def _patch_project(request: httpx.Request, **kwargs): + # Accept any patch, return 204 No Content + return httpx.Response(status_code=status.HTTP_204_NO_CONTENT) + + mocked_webserver_rest_api.patch( + path__regex=r"/projects/(?P[\w-]+)$", + name="patch_project", + ).mock(side_effect=_patch_project) + + # Mock storage REST delete_project_s3_assets + def _delete_project_s3_assets(request: httpx.Request, **kwargs): + return httpx.Response(status_code=status.HTTP_204_NO_CONTENT) + + mocked_storage_rest_api_base.delete( + path__regex=r"/simcore-s3/folders/(?P[\w-]+)$", + name="delete_project_s3_assets", + ).mock(side_effect=_delete_project_s3_assets) + + return { + "webserver_rest": mocked_webserver_rest_api, + "webserver_rpc": mocked_webserver_rpc_api, + "storage_rest": mocked_storage_rest_api_base, + } + + +@pytest.mark.acceptance_test("Test delete_job_assets endpoint") +async def test_delete_job_assets_endpoint( + auth: httpx.BasicAuth, + client: httpx.AsyncClient, + solver_key: str, + solver_version: str, + mocked_backend_services_apis_for_delete_job_assets: dict[ + str, MockRouter | dict[str, MockType] + ], +): + job_id = "123e4567-e89b-12d3-a456-426614174000" + url = f"/{API_VTAG}/solvers/{solver_key}/releases/{solver_version}/jobs/{job_id}/assets" + + resp = await client.delete(url, auth=auth) + assert resp.status_code == status.HTTP_204_NO_CONTENT + + webserver_rest = mocked_backend_services_apis_for_delete_job_assets[ + "webserver_rest" + ] + assert webserver_rest["patch_project"].called + + storage_rest = mocked_backend_services_apis_for_delete_job_assets["storage_rest"] + assert storage_rest["delete_project_s3_assets"].called + + webserver_rpc = mocked_backend_services_apis_for_delete_job_assets["webserver_rpc"] + assert webserver_rpc["mark_project_as_job"].called diff --git a/services/api-server/tests/unit/api_studies/test_api_routes_studies.py b/services/api-server/tests/unit/api_studies/test_api_routes_studies.py index 6c289763d7bb..e4349aaab119 100644 --- a/services/api-server/tests/unit/api_studies/test_api_routes_studies.py +++ b/services/api-server/tests/unit/api_studies/test_api_routes_studies.py @@ -1,8 +1,11 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument # pylint: disable=unused-variable +# pylint: disable=too-many-arguments +# pylint: disable=too-many-statements +import json from collections.abc import Callable from pathlib import Path from typing import Any, TypedDict @@ -12,6 +15,8 @@ import pytest from faker import Faker from fastapi import status +from models_library.api_schemas_webserver.projects import ProjectGet +from models_library.generics import Envelope from pydantic import TypeAdapter from pytest_mock import MockType from pytest_simcore.helpers.httpx_calls_capture_models import HttpApiCallCaptureModel @@ -188,6 +193,7 @@ def clone_project_side_effect(request: httpx.Request): _headers[X_SIMCORE_PARENT_PROJECT_UUID] = f"{parent_project_id}" if parent_node_id is not None: _headers[X_SIMCORE_PARENT_NODE_ID] = f"{parent_node_id}" + resp = await client.post( f"/{API_VTAG}/studies/{study_id}:clone", headers=_headers, auth=auth ) @@ -197,6 +203,118 @@ def clone_project_side_effect(request: httpx.Request): assert resp.status_code == status.HTTP_201_CREATED +# string length limits: https://github.com/ITISFoundation/osparc-simcore/blob/master/packages/models-library/src/models_library/api_schemas_webserver/projects.py#L242 +@pytest.mark.parametrize("hidden", [True, False, None]) +@pytest.mark.parametrize( + "title, description, expected_status_code", + [ + ( + _faker.text(max_nb_chars=600), + _faker.text(max_nb_chars=65536), + status.HTTP_201_CREATED, + ), + ("a" * 999, "b" * 99999, status.HTTP_201_CREATED), + (None, None, status.HTTP_201_CREATED), + ], + ids=[ + "valid_title_and_description", + "very_long_title_and_description", + "no_title_or_description", + ], +) +async def test_clone_study_with_title( + client: httpx.AsyncClient, + auth: httpx.BasicAuth, + study_id: StudyID, + mocked_webserver_rest_api_base: MockRouter, + patch_webserver_long_running_project_tasks: Callable[[MockRouter], MockRouter], + mock_webserver_patch_project: Callable[[MockRouter], MockRouter], + mock_webserver_get_project: Callable[[MockRouter], MockRouter], + hidden: bool | None, + title: str | None, + description: str | None, + expected_status_code: int, +): + # Mocks /projects + patch_webserver_long_running_project_tasks(mocked_webserver_rest_api_base) + mock_webserver_patch_project(mocked_webserver_rest_api_base) + mock_webserver_get_project(mocked_webserver_rest_api_base) + + create_callback = mocked_webserver_rest_api_base["create_projects"].side_effect + assert create_callback is not None + patch_callback = mocked_webserver_rest_api_base["project_patch"].side_effect + assert patch_callback is not None + get_callback = mocked_webserver_rest_api_base["project_get"].side_effect + assert get_callback is not None + + def clone_project_side_effect(request: httpx.Request): + if hidden is not None: + _hidden = request.url.params.get("hidden") + assert _hidden == str(hidden).lower() + return create_callback(request) + + def patch_project_side_effect(request: httpx.Request, *args, **kwargs): + body = json.loads(request.content.decode("utf-8")) + if title is not None: + _name = body.get("name") + assert _name is not None and _name in title + if description is not None: + _description = body.get("description") + assert _description is not None and _description in description + return patch_callback(request, *args, **kwargs) + + def get_project_side_effect(request: httpx.Request, *args, **kwargs): + # this is needed to return the patched project + _project_id = kwargs.get("project_id") + assert _project_id is not None + result = Envelope[ProjectGet].model_validate( + {"data": ProjectGet.model_json_schema()["examples"][0]} + ) + assert result.data is not None + if title is not None: + result.data.name = title + if description is not None: + result.data.description = description + result.data.uuid = UUID(_project_id) + return httpx.Response(status.HTTP_200_OK, content=result.model_dump_json()) + + mocked_webserver_rest_api_base["create_projects"].side_effect = ( + clone_project_side_effect + ) + mocked_webserver_rest_api_base["project_patch"].side_effect = ( + patch_project_side_effect + ) + mocked_webserver_rest_api_base["project_get"].side_effect = get_project_side_effect + + query = dict() + if hidden is not None: + query["hidden"] = str(hidden).lower() + + body = dict() + if hidden is not None: + body["hidden"] = hidden + if title is not None: + body["title"] = title + if description is not None: + body["description"] = description + + resp = await client.post( + f"/{API_VTAG}/studies/{study_id}:clone", auth=auth, json=body, params=query + ) + + assert mocked_webserver_rest_api_base["create_projects"].called + if title or description: + assert mocked_webserver_rest_api_base["project_patch"].called + assert mocked_webserver_rest_api_base["project_get"].called + + assert resp.status_code == expected_status_code + study = Study.model_validate(resp.json()) + if title is not None: + assert study.title == title + if description is not None: + assert study.description == description + + async def test_clone_study_not_found( client: httpx.AsyncClient, auth: httpx.BasicAuth, diff --git a/services/api-server/tests/unit/conftest.py b/services/api-server/tests/unit/conftest.py index 174bf1bd6013..64c2adeac9e9 100644 --- a/services/api-server/tests/unit/conftest.py +++ b/services/api-server/tests/unit/conftest.py @@ -5,12 +5,14 @@ # pylint: disable=broad-exception-caught import json +import re import subprocess from collections.abc import AsyncIterator, Callable, Iterator from copy import deepcopy from pathlib import Path from typing import Any from unittest.mock import MagicMock +from urllib.parse import urlparse, urlunparse import aiohttp.test_utils import httpx @@ -21,33 +23,44 @@ from faker import Faker from fastapi import FastAPI, status from fastapi.encoders import jsonable_encoder -from httpx import ASGITransport +from httpx import ASGITransport, Request, Response from models_library.api_schemas_long_running_tasks.tasks import ( TaskGet, TaskProgress, TaskStatus, ) -from models_library.api_schemas_storage.storage_schemas import HealthCheck +from models_library.api_schemas_storage.storage_schemas import ( + FileUploadCompleteFutureResponse, + FileUploadCompleteResponse, + FileUploadCompleteState, + FileUploadSchema, + HealthCheck, +) from models_library.api_schemas_webserver.projects import ProjectGet from models_library.app_diagnostics import AppStatusCheck from models_library.generics import Envelope from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import BaseFileLink, SimcoreS3FileID +from models_library.rpc.webserver.projects import ProjectJobRpcGet from models_library.users import UserID from moto.server import ThreadedMotoServer from packaging.version import Version from pydantic import EmailStr, HttpUrl, TypeAdapter from pytest_mock import MockerFixture, MockType from pytest_simcore.helpers.catalog_rpc_server import CatalogRpcSideEffects +from pytest_simcore.helpers.director_v2_rpc_server import DirectorV2SideEffects from pytest_simcore.helpers.host import get_localhost_ip from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.storage_rpc_server import StorageSideEffects from pytest_simcore.helpers.webserver_rpc_server import WebserverRpcSideEffects from pytest_simcore.simcore_webserver_projects_rest_api import GET_PROJECT from requests.auth import HTTPBasicAuth from respx import MockRouter -from simcore_service_api_server.core.application import init_app +from simcore_service_api_server.api.dependencies.authentication import Identity +from simcore_service_api_server.core.application import create_app from simcore_service_api_server.core.settings import ApplicationSettings +from simcore_service_api_server.models.api_resources import JobLinks from simcore_service_api_server.repository.api_keys import UserAndProductTuple from simcore_service_api_server.services_http.solver_job_outputs import ResultsTypes from simcore_service_api_server.services_rpc.wb_api_server import WbApiRpcClient @@ -58,6 +71,19 @@ def product_name() -> ProductName: return "osparc" +@pytest.fixture +def user_identity( + user_id: UserID, + user_email: EmailStr, + product_name: ProductName, +) -> Identity: + return Identity( + user_id=user_id, + product_name=product_name, + email=user_email, + ) + + @pytest.fixture def app_environment( monkeypatch: pytest.MonkeyPatch, @@ -103,7 +129,6 @@ def mock_missing_plugins(app_environment: EnvVarsDict, mocker: MockerFixture): "setup_prometheus_instrumentation", autospec=True, ) - return app_environment @@ -123,7 +148,7 @@ def app( patch_lrt_response_urls() - return init_app() + return create_app() MAX_TIME_FOR_APP_TO_STARTUP = 10 @@ -174,7 +199,7 @@ def auth( # mock engine if db was not init if app.state.settings.API_SERVER_POSTGRES is None: engine = mocker.MagicMock() - engine.minsize = 1 + engine.minsize = 2 engine.size = 10 engine.freesize = 3 engine.maxsize = 10 @@ -422,6 +447,77 @@ def mocked_storage_rest_api_base( ).model_dump(mode="json"), ) + assert ( + openapi["paths"]["/v0/locations/{location_id}/files/{file_id}"]["put"][ + "operationId" + ] + == "upload_file_v0_locations__location_id__files__file_id__put" + ) + respx_mock.put( + re.compile(r"^http://[a-z\-_]*storage:[0-9]+/v0/locations/[0-9]+/files.+$"), + name="upload_file_v0_locations__location_id__files__file_id__put", + ).respond( + status.HTTP_200_OK, + json=Envelope[FileUploadSchema]( + data=FileUploadSchema.model_json_schema()["examples"][0] + ).model_dump(mode="json"), + ) + + # Add mocks for completion and abort endpoints + def generate_future_link(request: Request, **kwargs): + parsed_url = urlparse(f"{request.url}") + stripped_url = urlunparse( + (parsed_url.scheme, parsed_url.netloc, parsed_url.path, "", "", "") + ) + + payload = FileUploadCompleteResponse.model_validate( + { + "links": { + "state": stripped_url + + ":complete/futures/" + + str(faker.uuid4()) + }, + }, + ) + return Response( + status_code=status.HTTP_200_OK, + json=jsonable_encoder( + Envelope[FileUploadCompleteResponse](data=payload) + ), + ) + + respx_mock.post( + re.compile( + r"^http://[a-z\-_]*storage:[0-9]+/v0/locations/[0-9]+/files/.+complete(?:\?.*)?$" + ), + name="complete_upload_file_v0_locations__location_id__files__file_id__complete_post", + ).side_effect = generate_future_link + + respx_mock.post( + re.compile( + r"^http://[a-z\-_]*storage:[0-9]+/v0/locations/[0-9]+/files/.+complete/futures/.+" + ) + ).respond( + status_code=status.HTTP_200_OK, + json=jsonable_encoder( + Envelope[FileUploadCompleteFutureResponse]( + data=FileUploadCompleteFutureResponse( + state=FileUploadCompleteState.OK, + e_tag="07d1c1a4-b073-4be7-b022-f405d90e99aa", + ) + ) + ), + ) + + respx_mock.post( + re.compile( + r"^http://[a-z\-_]*storage:[0-9]+/v0/locations/[0-9]+/files/.+:abort(?:\?.*)?$" + ), + name="abort_upload_file_v0_locations__location_id__files__file_id__abort_post", + ).respond( + status.HTTP_204_NO_CONTENT, + ) + # SEE https://github.com/pcrespov/sandbox-python/blob/f650aad57aced304aac9d0ad56c00723d2274ad0/respx-lib/test_disable_mock.py if not services_mocks_enabled: respx_mock.stop() @@ -462,9 +558,23 @@ def mocked_catalog_rest_api_base( yield respx_mock +@pytest.fixture +def project_job_rpc_get() -> ProjectJobRpcGet: + example = ProjectJobRpcGet.model_json_schema()["examples"][0] + return ProjectJobRpcGet.model_validate(example) + + +@pytest.fixture +def job_links() -> JobLinks: + example = JobLinks.model_json_schema()["examples"][0] + return JobLinks.model_validate(example) + + @pytest.fixture def mocked_webserver_rpc_api( - mocked_app_dependencies: None, mocker: MockerFixture + mocked_app_dependencies: None, + mocker: MockerFixture, + project_job_rpc_get: ProjectJobRpcGet, ) -> dict[str, MockType]: """ Mocks the webserver's simcore service RPC API for testing purposes. @@ -473,7 +583,7 @@ def mocked_webserver_rpc_api( projects as projects_rpc, # keep import here ) - side_effects = WebserverRpcSideEffects() + side_effects = WebserverRpcSideEffects(project_job_rpc_get=project_job_rpc_get) return { "mark_project_as_job": mocker.patch.object( @@ -482,6 +592,12 @@ def mocked_webserver_rpc_api( autospec=True, side_effect=side_effects.mark_project_as_job, ), + "get_project_marked_as_job": mocker.patch.object( + projects_rpc, + "get_project_marked_as_job", + autospec=True, + side_effect=side_effects.get_project_marked_as_job, + ), "list_projects_marked_as_jobs": mocker.patch.object( projects_rpc, "list_projects_marked_as_jobs", @@ -532,6 +648,92 @@ def mocked_catalog_rpc_api( return mocks +@pytest.fixture +def directorv2_rpc_side_effects(request) -> Any: + if "param" in dir(request) and request.param is not None: + return request.param + return DirectorV2SideEffects() + + +@pytest.fixture +def mocked_directorv2_rpc_api( + mocked_app_dependencies: None, + mocker: MockerFixture, + directorv2_rpc_side_effects: Any, +) -> dict[str, MockType]: + """ + Mocks the director-v2's simcore service RPC API for testing purposes. + """ + from servicelib.rabbitmq.rpc_interfaces.director_v2 import ( + computations_tasks as directorv2_rpc, # keep import here + ) + + mocks = {} + + # Get all callable methods from the side effects class that are not built-ins + side_effect_methods = [ + method_name + for method_name in dir(directorv2_rpc_side_effects) + if not method_name.startswith("_") + and callable(getattr(directorv2_rpc_side_effects, method_name)) + ] + + # Create mocks for each method in directorv2_rpc that has a corresponding side effect + for method_name in side_effect_methods: + if hasattr(directorv2_rpc, method_name): + mocks[method_name] = mocker.patch.object( + directorv2_rpc, + method_name, + autospec=True, + side_effect=getattr(directorv2_rpc_side_effects, method_name), + ) + + return mocks + + +@pytest.fixture +def storage_rpc_side_effects(request) -> Any: + if "param" in dir(request) and request.param is not None: + return request.param + return StorageSideEffects() + + +@pytest.fixture +def mocked_storage_rpc_api( + mocked_app_dependencies: None, + mocker: MockerFixture, + storage_rpc_side_effects: Any, +) -> dict[str, MockType]: + """ + Mocks the storage's simcore service RPC API for testing purposes. + """ + from servicelib.rabbitmq.rpc_interfaces.storage import ( + simcore_s3 as storage_rpc, # keep import here + ) + + mocks = {} + + # Get all callable methods from the side effects class that are not built-ins + side_effect_methods = [ + method_name + for method_name in dir(storage_rpc_side_effects) + if not method_name.startswith("_") + and callable(getattr(storage_rpc_side_effects, method_name)) + ] + + # Create mocks for each method in storage_rpc that has a corresponding side effect + for method_name in side_effect_methods: + if hasattr(storage_rpc, method_name): + mocks[method_name] = mocker.patch.object( + storage_rpc, + method_name, + autospec=True, + side_effect=getattr(storage_rpc_side_effects, method_name), + ) + + return mocks + + # # Other Mocks # @@ -550,7 +752,7 @@ def mocked_solver_job_outputs(mocker) -> None: eTag=None, ) mocker.patch( - "simcore_service_api_server.api.routes.solvers_jobs_read.get_solver_output_results", + "simcore_service_api_server._service_jobs.get_solver_output_results", autospec=True, return_value=result, ) @@ -715,6 +917,51 @@ def _mock(webserver_mock_router: MockRouter) -> MockRouter: return _mock +@pytest.fixture +def mock_webserver_patch_project( + app: FastAPI, services_mocks_enabled: bool +) -> Callable[[MockRouter], MockRouter]: + settings: ApplicationSettings = app.state.settings + assert settings.API_SERVER_WEBSERVER is not None + + def _mock(webserver_mock_router: MockRouter) -> MockRouter: + def _patch_project(request: httpx.Request, *args, **kwargs): + return httpx.Response(status.HTTP_200_OK) + + if services_mocks_enabled: + webserver_mock_router.patch( + path__regex=r"/projects/(?P[\w-]+)$", + name="project_patch", + ).mock(side_effect=_patch_project) + return webserver_mock_router + + return _mock + + +@pytest.fixture +def mock_webserver_get_project( + app: FastAPI, services_mocks_enabled: bool +) -> Callable[[MockRouter], MockRouter]: + settings: ApplicationSettings = app.state.settings + assert settings.API_SERVER_WEBSERVER is not None + + def _mock(webserver_mock_router: MockRouter) -> MockRouter: + def _get_project(request: httpx.Request, *args, **kwargs): + result = Envelope[ProjectGet].model_validate( + {"data": ProjectGet.model_json_schema()["examples"][0]} + ) + return httpx.Response(status.HTTP_200_OK, json=result.model_dump()) + + if services_mocks_enabled: + webserver_mock_router.get( + path__regex=r"/projects/(?P[\w-]+)$", + name="project_get", + ).mock(side_effect=_get_project) + return webserver_mock_router + + return _mock + + @pytest.fixture def openapi_dev_specs(project_slug_dir: Path) -> dict[str, Any]: openapi_file = (project_slug_dir / "openapi-dev.json").resolve() diff --git a/services/api-server/tests/unit/service/conftest.py b/services/api-server/tests/unit/service/conftest.py index 542f234d69e2..fb04ff773d83 100644 --- a/services/api-server/tests/unit/service/conftest.py +++ b/services/api-server/tests/unit/service/conftest.py @@ -19,10 +19,14 @@ from simcore_service_api_server._service_jobs import JobService from simcore_service_api_server._service_programs import ProgramService from simcore_service_api_server._service_solvers import SolverService -from simcore_service_api_server._service_studies import StudyService +from simcore_service_api_server.services_http.director_v2 import DirectorV2Api +from simcore_service_api_server.services_http.storage import StorageApi from simcore_service_api_server.services_http.webserver import AuthSession from simcore_service_api_server.services_rpc.catalog import CatalogService +from simcore_service_api_server.services_rpc.director_v2 import DirectorV2Service +from simcore_service_api_server.services_rpc.storage import StorageService from simcore_service_api_server.services_rpc.wb_api_server import WbApiRpcClient +from sqlalchemy.ext.asyncio import AsyncEngine async def catalog_rpc_side_effect(): @@ -71,6 +75,24 @@ def wb_api_rpc_client( return WbApiRpcClient(_client=mocked_rpc_client) +@pytest.fixture +def director_v2_rpc_client( + mocked_rpc_client: MockType, +) -> DirectorV2Service: + return DirectorV2Service(_rpc_client=mocked_rpc_client) + + +@pytest.fixture +def storage_rpc_client( + mocked_rpc_client: MockType, + user_id: UserID, + product_name: ProductName, +) -> StorageService: + return StorageService( + _rpc_client=mocked_rpc_client, _user_id=user_id, _product_name=product_name + ) + + @pytest.fixture def auth_session( mocker: MockerFixture, @@ -89,18 +111,15 @@ async def _create_project(project: ProjectCreateNew, **kwargs): @pytest.fixture -def job_service( - auth_session: AuthSession, - wb_api_rpc_client: WbApiRpcClient, - product_name: ProductName, - user_id: UserID, -) -> JobService: - return JobService( - _web_rest_client=auth_session, - _web_rpc_client=wb_api_rpc_client, - user_id=user_id, - product_name=product_name, - ) +def director2_api(mocker: MockerFixture) -> DirectorV2Api: + return mocker.AsyncMock(spec=DirectorV2Api) + + +@pytest.fixture +def storage_rest_client( + mocker: MockerFixture, +) -> StorageApi: + return mocker.AsyncMock(spec=StorageApi) @pytest.fixture @@ -117,34 +136,49 @@ def catalog_service( @pytest.fixture def solver_service( catalog_service: CatalogService, - job_service: JobService, product_name: ProductName, user_id: UserID, ) -> SolverService: return SolverService( catalog_service=catalog_service, - job_service=job_service, user_id=user_id, product_name=product_name, ) @pytest.fixture -def study_service( - job_service: JobService, +def program_service( + catalog_service: CatalogService, +) -> ProgramService: + return ProgramService(catalog_service=catalog_service) + + +@pytest.fixture +def async_pg_engine(mocker: MockerFixture) -> AsyncEngine: + return mocker.MagicMock(spec=AsyncEngine) + + +@pytest.fixture +def job_service( + auth_session: AuthSession, + director_v2_rpc_client: DirectorV2Service, + storage_rpc_client: StorageService, + wb_api_rpc_client: WbApiRpcClient, + director2_api: DirectorV2Api, + storage_rest_client: StorageApi, product_name: ProductName, user_id: UserID, -) -> StudyService: - - return StudyService( - job_service=job_service, + solver_service: SolverService, + async_pg_engine: AsyncEngine, +) -> JobService: + return JobService( + _web_rest_client=auth_session, + _web_rpc_client=wb_api_rpc_client, + _storage_rpc_client=storage_rpc_client, + _storage_rest_client=storage_rest_client, + _directorv2_rpc_client=director_v2_rpc_client, + _director2_api=director2_api, + _solver_service=solver_service, user_id=user_id, product_name=product_name, ) - - -@pytest.fixture -def program_service( - catalog_service: CatalogService, -) -> ProgramService: - return ProgramService(catalog_service=catalog_service) diff --git a/services/api-server/tests/unit/service/test_service_function_jobs_task_client.py b/services/api-server/tests/unit/service/test_service_function_jobs_task_client.py new file mode 100644 index 000000000000..0a5adc695ea8 --- /dev/null +++ b/services/api-server/tests/unit/service/test_service_function_jobs_task_client.py @@ -0,0 +1,83 @@ +# pylint: disable=redefined-outer-name + +from collections.abc import Callable + +import pytest +from celery_library.errors import TaskNotFoundError +from faker import Faker +from models_library.products import ProductName +from models_library.progress_bar import ProgressReport +from models_library.users import UserID +from pytest_mock import MockerFixture, MockType +from servicelib.celery.models import TaskState, TaskStatus, TaskUUID +from servicelib.celery.task_manager import TaskManager +from simcore_service_api_server._service_function_jobs_task_client import ( + _celery_task_status, +) +from simcore_service_api_server.models.schemas.functions import ( + FunctionJobCreationTaskStatus, +) + +_faker = Faker() + + +@pytest.fixture +async def create_mock_task_manager( + mocker: MockerFixture, +) -> Callable[[TaskStatus | Exception], MockType]: + + def _(status_or_exception: TaskStatus | Exception) -> MockType: + mock_task_manager = mocker.Mock(spec=TaskManager) + if isinstance(status_or_exception, Exception): + + async def _raise(*args, **kwargs): + raise status_or_exception + + mock_task_manager.get_task_status.side_effect = _raise + else: + mock_task_manager.get_task_status.return_value = status_or_exception + return mock_task_manager + + return _ + + +@pytest.mark.parametrize( + "status_or_exception", + [ + TaskStatus( + task_uuid=TaskUUID(_faker.uuid4()), + task_state=state, + progress_report=ProgressReport(actual_value=3.14), + ) + for state in list(TaskState) + ] + + [TaskNotFoundError(task_id=_faker.uuid4())], +) +@pytest.mark.parametrize("job_creation_task_id", [_faker.uuid4(), None]) +async def test_celery_status_conversion( + status_or_exception: TaskStatus | Exception, + job_creation_task_id: str | None, + create_mock_task_manager: Callable[[TaskStatus | Exception], MockType], + user_id: UserID, + product_name: ProductName, +): + + mock_task_manager = create_mock_task_manager(status_or_exception) + + status = await _celery_task_status( + job_creation_task_id=job_creation_task_id, + task_manager=mock_task_manager, + user_id=user_id, + product_name=product_name, + ) + + if job_creation_task_id is None: + assert status == FunctionJobCreationTaskStatus.NOT_YET_SCHEDULED + elif isinstance(status_or_exception, TaskNotFoundError): + assert status == FunctionJobCreationTaskStatus.ERROR + elif isinstance(status_or_exception, TaskStatus): + assert ( + status == FunctionJobCreationTaskStatus[status_or_exception.task_state.name] + ) + else: + pytest.fail("Unexpected test input") diff --git a/services/api-server/tests/unit/service/test_service_jobs.py b/services/api-server/tests/unit/service/test_service_jobs.py index d6829339507f..b8686819ed06 100644 --- a/services/api-server/tests/unit/service/test_service_jobs.py +++ b/services/api-server/tests/unit/service/test_service_jobs.py @@ -4,18 +4,22 @@ # pylint: disable=unused-variable +from faker import Faker from pytest_mock import MockType from simcore_service_api_server._service_jobs import JobService +from simcore_service_api_server.models.api_resources import JobLinks from simcore_service_api_server.models.schemas.jobs import Job, JobInputs from simcore_service_api_server.models.schemas.solvers import Solver +_faker = Faker() + async def test_list_jobs_by_resource_prefix( mocked_rpc_client: MockType, job_service: JobService, ): # Test with default pagination parameters - jobs, page_meta = await job_service.list_jobs( + jobs, page_meta = await job_service._list_jobs( job_parent_resource_name="solvers/some-solver" ) @@ -53,13 +57,19 @@ async def test_create_job( def mock_url_for(*args, **kwargs): return "https://example.com/api/v1/jobs/test-job" + job_rest_interface_links = JobLinks( + url_template=_faker.url() + "/{job_id}", + runner_url_template=_faker.url(), + outputs_url_template=_faker.url() + "/{job_id}", + ) + # Test job creation - job, project = await job_service.create_job( + job, project = await job_service.create_project_marked_as_job( solver_or_program=solver, inputs=inputs, parent_project_uuid=None, parent_node_id=None, - url_for=mock_url_for, + job_links=job_rest_interface_links, hidden=False, project_name="Test Job Project", description="Test description", diff --git a/services/api-server/tests/unit/service/test_service_solvers.py b/services/api-server/tests/unit/service/test_service_solvers.py index a32d3b82f6f7..191eb50969ea 100644 --- a/services/api-server/tests/unit/service/test_service_solvers.py +++ b/services/api-server/tests/unit/service/test_service_solvers.py @@ -5,15 +5,25 @@ import pytest from models_library.products import ProductName +from models_library.projects import ProjectID +from models_library.rpc.webserver.projects import ProjectJobRpcGet from models_library.users import UserID from pytest_mock import MockType from simcore_service_api_server._service_jobs import JobService from simcore_service_api_server._service_solvers import SolverService +from simcore_service_api_server.exceptions.backend_errors import ( + JobForbiddenAccessError, + JobNotFoundError, +) from simcore_service_api_server.exceptions.custom_errors import ( ServiceConfigurationError, ) from simcore_service_api_server.models.schemas.solvers import Solver from simcore_service_api_server.services_rpc.catalog import CatalogService +from simcore_service_api_server.services_rpc.wb_api_server import ( + ProjectForbiddenRpcError, + ProjectNotFoundRpcError, +) async def test_get_solver( @@ -36,10 +46,10 @@ async def test_get_solver( async def test_list_jobs( mocked_rpc_client: MockType, - solver_service: SolverService, + job_service: JobService, ): # Test default parameters - jobs, page_meta = await solver_service.list_jobs() + jobs, page_meta = await job_service.list_solver_jobs() assert jobs assert len(jobs) == page_meta.count @@ -67,9 +77,56 @@ async def test_solver_service_init_raises_configuration_error( with pytest.raises(ServiceConfigurationError, match="SolverService"): SolverService( catalog_service=catalog_service, - job_service=job_service, user_id=user_id, product_name=invalid_product_name, ) # Verify the RPC call was made to check consistency assert not mocked_rpc_client.request.called + + +async def test_job_service_get_job_success( + mocked_rpc_client: MockType, + job_service: JobService, +): + job_parent_resource_name = "solver-resource" + job_id = ProjectID("123e4567-e89b-12d3-a456-426614174000") + + # Act + result = await job_service.get_job(job_parent_resource_name, job_id) + + # Assert + assert isinstance(result, ProjectJobRpcGet) + assert result.job_parent_resource_name.startswith(job_parent_resource_name) + assert mocked_rpc_client.request.called + assert mocked_rpc_client.request.call_args.args == ( + "webserver", + "get_project_marked_as_job", + ) + assert ( + mocked_rpc_client.request.call_args.kwargs["job_parent_resource_name"] + == job_parent_resource_name + ) + assert mocked_rpc_client.request.call_args.kwargs["project_uuid"] == job_id + + +@pytest.mark.parametrize( + "client_exception_type,api_exception_type", + [ + (ProjectForbiddenRpcError, JobForbiddenAccessError), + (ProjectNotFoundRpcError, JobNotFoundError), + ], +) +async def test_job_service_get_job_exceptions( + mocker, + job_service: JobService, + client_exception_type: type[Exception], + api_exception_type: type[Exception], +): + job_parent_resource_name = "solver-resource" + job_id = ProjectID("123e4567-e89b-12d3-a456-426614174000") + # Patch the actual RPC interface method + patch_path = "servicelib.rabbitmq.rpc_interfaces.webserver.projects.get_project_marked_as_job" + mocker.patch(patch_path, side_effect=client_exception_type()) + + with pytest.raises(api_exception_type): + await job_service.get_job(job_parent_resource_name, job_id) diff --git a/services/api-server/tests/unit/service/test_service_studies.py b/services/api-server/tests/unit/service/test_service_studies.py index fa9b9921866e..5cf292340869 100644 --- a/services/api-server/tests/unit/service/test_service_studies.py +++ b/services/api-server/tests/unit/service/test_service_studies.py @@ -4,16 +4,15 @@ # pylint: disable=unused-variable from pytest_mock import MockType -from simcore_service_api_server._service_studies import StudyService +from simcore_service_api_server._service_jobs import JobService from simcore_service_api_server.models.schemas.studies import StudyID async def test_list_jobs_no_study_id( - mocked_rpc_client: MockType, - study_service: StudyService, + mocked_rpc_client: MockType, job_service: JobService ): # Test with default parameters - jobs, page_meta = await study_service.list_jobs() + jobs, page_meta = await job_service.list_study_jobs() assert isinstance(jobs, list) assert mocked_rpc_client.request.call_args.args == ( @@ -41,11 +40,11 @@ async def test_list_jobs_no_study_id( async def test_list_jobs_with_study_id( mocked_rpc_client: MockType, - study_service: StudyService, + job_service: JobService, ): # Test with a specific study ID study_id = StudyID("914c7c33-8fb6-4164-9787-7b88b5c148bf") - jobs, page_meta = await study_service.list_jobs(filter_by_study_id=study_id) + jobs, page_meta = await job_service.list_study_jobs(filter_by_study_id=study_id) assert isinstance(jobs, list) diff --git a/services/api-server/tests/unit/test_api_files.py b/services/api-server/tests/unit/test_api_files.py index 78a150c6c616..54f3577a33d6 100644 --- a/services/api-server/tests/unit/test_api_files.py +++ b/services/api-server/tests/unit/test_api_files.py @@ -5,6 +5,7 @@ # pylint: disable=unused-variable import datetime +import re from pathlib import Path from typing import Any from uuid import UUID @@ -13,7 +14,6 @@ import pytest import respx import yarl -from aioresponses import aioresponses as AioResponsesMock from faker import Faker from fastapi import status from fastapi.encoders import jsonable_encoder @@ -25,6 +25,7 @@ ) from models_library.basic_types import SHA256Str from pydantic import TypeAdapter +from pytest_mock import MockerFixture from pytest_simcore.helpers.httpx_calls_capture_models import ( CreateRespxMockCallback, HttpApiCallCaptureModel, @@ -233,12 +234,10 @@ async def test_get_upload_links( follow_up_request: str, client: AsyncClient, auth: httpx.BasicAuth, - storage_v0_service_mock: AioResponsesMock, + mocked_storage_rest_api_base: MockRouter, ): """Test that we can get data needed for performing multipart upload directly to S3""" - assert storage_v0_service_mock # nosec - msg = { "filename": DummyFileData.file().filename, "filesize": DummyFileData.file_size(), @@ -282,6 +281,28 @@ async def test_get_upload_links( raise AssertionError +async def test_get_upload_links_timeout( + client: AsyncClient, + auth: httpx.BasicAuth, + mocked_storage_rest_api_base: MockRouter, + mocker: MockerFixture, +): + mocked_endpoint = mocked_storage_rest_api_base.put( + re.compile(r"^http://[a-z\-_]*storage:[0-9]+/v0/locations/[0-9]+/files.+$"), + ).mock(side_effect=httpx.ReadTimeout("Mocked timeout error")) + + msg = { + "filename": DummyFileData.file().filename, + "filesize": DummyFileData.file_size(), + "sha256_checksum": DummyFileData.checksum(), + } + + response = await client.post(f"{API_VTAG}/files/content", json=msg, auth=auth) + + assert mocked_endpoint.called + assert response.status_code == status.HTTP_504_GATEWAY_TIMEOUT + + @pytest.mark.parametrize( "query", [ diff --git a/services/api-server/tests/unit/test_api_solver_jobs.py b/services/api-server/tests/unit/test_api_solver_jobs.py index 518d183603dc..8d2129a7b459 100644 --- a/services/api-server/tests/unit/test_api_solver_jobs.py +++ b/services/api-server/tests/unit/test_api_solver_jobs.py @@ -3,10 +3,11 @@ # pylint: disable=unused-variable # pylint: disable=too-many-arguments +from datetime import datetime from decimal import Decimal from pathlib import Path from typing import Any, Final -from uuid import UUID +from uuid import UUID, uuid4 import httpx import pytest @@ -15,7 +16,10 @@ from fastapi.encoders import jsonable_encoder from httpx import AsyncClient from models_library.generics import Envelope +from models_library.projects_nodes import Node +from models_library.rpc.webserver.projects import ProjectJobRpcGet from pydantic import TypeAdapter +from pytest_mock import MockType from pytest_simcore.helpers.httpx_calls_capture_models import ( CreateRespxMockCallback, HttpApiCallCaptureModel, @@ -202,9 +206,11 @@ def _get_pricing_unit_side_effect( ], ) async def test_start_solver_job_pricing_unit_with_payment( + mocked_app_dependencies: None, client: AsyncClient, mocked_webserver_rest_api_base: MockRouter, mocked_directorv2_rest_api_base: MockRouter, + mocked_webserver_rpc_api: dict[str, MockType], create_respx_mock_from_capture: CreateRespxMockCallback, auth: httpx.BasicAuth, project_tests_dir: Path, @@ -277,9 +283,11 @@ def _put_pricing_plan_and_unit_side_effect( async def test_get_solver_job_pricing_unit_no_payment( + mocked_app_dependencies: None, client: AsyncClient, mocked_webserver_rest_api_base: MockRouter, mocked_directorv2_rest_api_base: MockRouter, + mocked_webserver_rpc_api: dict[str, MockType], create_respx_mock_from_capture: CreateRespxMockCallback, auth: httpx.BasicAuth, project_tests_dir: Path, @@ -310,9 +318,11 @@ async def test_get_solver_job_pricing_unit_no_payment( async def test_start_solver_job_conflict( + mocked_app_dependencies: None, client: AsyncClient, mocked_webserver_rest_api_base: MockRouter, mocked_directorv2_rest_api_base: MockRouter, + mocked_webserver_rpc_api: dict[str, MockType], create_respx_mock_from_capture: CreateRespxMockCallback, auth: httpx.BasicAuth, project_tests_dir: Path, @@ -343,6 +353,57 @@ async def test_start_solver_job_conflict( assert f"{job_status.job_id}" == _job_id +@pytest.mark.parametrize( + "project_job_rpc_get", + [ + pytest.param( + ProjectJobRpcGet( + uuid=UUID("00000000-1234-5678-1234-123456789012"), + name="A study job", + description="A description of a study job with many node", + workbench={}, + created_at=datetime.fromisoformat("2023-02-01T00:00:00Z"), + modified_at=datetime.fromisoformat("2023-02-01T00:00:00Z"), + job_parent_resource_name="studies/96642f2a-a72c-11ef-8776-02420a00087d", + storage_assets_deleted=True, + ), + id="storage_assets_deleted", + ) + ], +) +async def test_start_solver_job_storage_data_missing( + client: AsyncClient, + mocked_webserver_rest_api_base: MockRouter, + mocked_directorv2_rest_api_base: MockRouter, + mocked_webserver_rpc_api: dict[str, MockType], + create_respx_mock_from_capture: CreateRespxMockCallback, + auth: httpx.BasicAuth, + project_tests_dir: Path, +): + _solver_key: str = "simcore/services/comp/itis/sleeper" + _version: str = "2.0.2" + _job_id: str = "b9faf8d8-4928-4e50-af40-3690712c5481" + + create_respx_mock_from_capture( + respx_mocks=[ + mocked_directorv2_rest_api_base, + mocked_webserver_rest_api_base, + ], + capture_path=project_tests_dir / "mocks" / "start_solver_job.json", + side_effects_callbacks=[ + _start_job_side_effect, + _get_inspect_job_side_effect(job_id=_job_id), + ], + ) + + response = await client.post( + f"{API_VTAG}/solvers/{_solver_key}/releases/{_version}/jobs/{_job_id}:start", + auth=auth, + ) + + assert response.status_code == status.HTTP_409_CONFLICT + + async def test_stop_job( client: AsyncClient, mocked_directorv2_rest_api_base: MockRouter, @@ -393,6 +454,7 @@ async def test_get_solver_job_outputs( mocked_webserver_rest_api_base: MockRouter, mocked_storage_rest_api_base: MockRouter, mocked_solver_job_outputs: None, + mocked_webserver_rpc_api: dict[str, MockType], create_respx_mock_from_capture: CreateRespxMockCallback, auth: httpx.BasicAuth, project_tests_dir: Path, @@ -442,3 +504,59 @@ def _wallet_side_effect( ) assert response.status_code == expected_status_code + + +@pytest.mark.parametrize( + "project_job_rpc_get", + [ + ProjectJobRpcGet( + uuid=UUID("12345678-1234-5678-1234-123456789012"), + name="A solver job", + description="A description of a solver job with a single node", + workbench={ + f"{uuid4()}": Node.model_validate( + Node.model_json_schema()["examples"][0] + ) + }, + created_at=datetime.fromisoformat("2023-01-01T00:00:00Z"), + modified_at=datetime.fromisoformat("2023-01-01T00:00:00Z"), + job_parent_resource_name="solvers/simcore%2Fservices%2Fcomp%2Fitis%2Fsleeper/releases/2.0.2", + storage_assets_deleted=True, + ) + ], +) +async def test_get_solver_job_outputs_assets_deleted( + client: AsyncClient, + mocked_webserver_rest_api_base: MockRouter, + mocked_storage_rest_api_base: MockRouter, + mocked_solver_job_outputs: None, + mocked_webserver_rpc_api: dict[str, MockType], + create_respx_mock_from_capture: CreateRespxMockCallback, + auth: httpx.BasicAuth, + project_tests_dir: Path, +): + def _sf( + request: httpx.Request, + path_params: dict[str, Any], + capture: HttpApiCallCaptureModel, + ) -> Any: + return capture.response_body + + create_respx_mock_from_capture( + respx_mocks=[ + mocked_webserver_rest_api_base, + mocked_storage_rest_api_base, + ], + capture_path=project_tests_dir / "mocks" / "get_solver_outputs.json", + side_effects_callbacks=[_sf, _sf, _sf, _sf, _sf], + ) + + _solver_key: Final[str] = "simcore/services/comp/isolve" + _version: Final[str] = "2.1.24" + _job_id: Final[str] = "1eefc09b-5d08-4022-bc18-33dedbbd7d0f" + response = await client.get( + f"{API_VTAG}/solvers/{_solver_key}/releases/{_version}/jobs/{_job_id}/outputs", + auth=auth, + ) + + assert response.status_code == status.HTTP_409_CONFLICT diff --git a/services/api-server/tests/unit/test_cli.py b/services/api-server/tests/unit/test_cli.py index febeca14b1f3..f96be2ffb2ff 100644 --- a/services/api-server/tests/unit/test_cli.py +++ b/services/api-server/tests/unit/test_cli.py @@ -34,7 +34,7 @@ def test_cli_list_settings(cli_runner: CliRunner, app_environment: EnvVarsDict): def test_main(app_environment: EnvVarsDict): - from simcore_service_api_server.main import the_app + from simcore_service_api_server.main import app_factory - assert the_app + the_app = app_factory() assert isinstance(the_app, FastAPI) diff --git a/services/api-server/tests/unit/test_core_settings.py b/services/api-server/tests/unit/test_core_settings.py index feb5052ab0fb..a2301b4951d5 100644 --- a/services/api-server/tests/unit/test_core_settings.py +++ b/services/api-server/tests/unit/test_core_settings.py @@ -3,38 +3,16 @@ # pylint: disable=redefined-outer-name -import pytest from pytest_simcore.helpers.monkeypatch_envs import ( EnvVarsDict, - delenvs_from_dict, - setenvs_from_dict, ) from simcore_service_api_server.core.settings import ApplicationSettings -@pytest.fixture -def app_environment( - monkeypatch: pytest.MonkeyPatch, - app_environment: EnvVarsDict, - external_envfile_dict: EnvVarsDict, -) -> EnvVarsDict: - """ - NOTE: To run against repo.config in osparc-config repo - - ln -s /path/to/osparc-config/deployments/mydeploy.com/repo.config .secrets - pytest --external-envfile=.secrets tests/unit/test_core_settings.py - - """ - if external_envfile_dict: - delenvs_from_dict(monkeypatch, app_environment, raising=False) - return setenvs_from_dict( - monkeypatch, - {**external_envfile_dict}, - ) - return app_environment +def test_valid_application_settings(app_environment: EnvVarsDict): + assert app_environment + settings = ApplicationSettings() # type: ignore + assert settings -def test_unit_app_environment(app_environment: EnvVarsDict): - assert app_environment - settings = ApplicationSettings.create_from_envs() - print("captured settings: \n", settings.model_dump_json(indent=2)) + assert settings == ApplicationSettings.create_from_envs() diff --git a/services/api-server/tests/unit/test_services_solver_job_models_converters.py b/services/api-server/tests/unit/test_services_solver_job_models_converters.py index 5be97bdca2ae..97a777387a7c 100644 --- a/services/api-server/tests/unit/test_services_solver_job_models_converters.py +++ b/services/api-server/tests/unit/test_services_solver_job_models_converters.py @@ -6,7 +6,8 @@ from faker import Faker from models_library.projects import Project from models_library.projects_nodes import InputsDict, InputTypes, SimCoreFileLink -from pydantic import HttpUrl, RootModel, TypeAdapter, create_model +from pydantic import RootModel, TypeAdapter, create_model +from simcore_service_api_server.models.api_resources import JobLinks from simcore_service_api_server.models.schemas.files import File from simcore_service_api_server.models.schemas.jobs import ArgumentTypes, Job, JobInputs from simcore_service_api_server.models.schemas.solvers import Solver @@ -182,9 +183,10 @@ def test_create_job_from_project(faker: Faker): "quality": {}, "tags": [], "state": { - "locked": { - "value": False, + "share_state": { "status": "CLOSED", + "locked": False, + "current_user_groupids": [], }, "state": {"value": "SUCCESS"}, }, @@ -207,8 +209,11 @@ def test_create_job_from_project(faker: Faker): solver_key = "simcore/services/comp/itis/sleeper" solver_version = "2.0.2" - def fake_url_for(*args, **kwargs) -> HttpUrl: - return HttpUrl(faker.url()) + fake_job_links = JobLinks( + url_template=faker.url() + "/{job_id}", + runner_url_template=faker.url(), + outputs_url_template=faker.url() + "/{job_id}", + ) solver = Solver( id=solver_key, @@ -220,13 +225,17 @@ def fake_url_for(*args, **kwargs) -> HttpUrl: ) job = create_job_from_project( - solver_or_program=solver, project=project, url_for=fake_url_for + solver_or_program=solver, + project=project, + job_links=fake_job_links, ) assert job.id == project.uuid + field_names = Job.model_fields.keys() + non_propagated_fields = { - name for name in job.model_fields if name.endswith("url") + name for name in field_names if name.endswith("url") }.union({"name"}) assert all(getattr(job, _) for _ in non_propagated_fields) diff --git a/services/api-server/tests/unit/test_tasks.py b/services/api-server/tests/unit/test_tasks.py new file mode 100644 index 000000000000..02af824cc87d --- /dev/null +++ b/services/api-server/tests/unit/test_tasks.py @@ -0,0 +1,168 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument + + +from typing import Literal + +import pytest +from celery.exceptions import CeleryError # pylint: disable=no-name-in-module +from faker import Faker +from fastapi import status +from httpx import AsyncClient, BasicAuth +from models_library.api_schemas_long_running_tasks.tasks import TaskGet, TaskStatus +from models_library.progress_bar import ProgressReport, ProgressStructuredMessage +from models_library.utils.json_schema import GenerateResolvedJsonSchema +from pytest_mock import MockerFixture, MockType +from servicelib.celery.models import TaskState +from servicelib.celery.models import TaskStatus as CeleryTaskStatus +from servicelib.celery.models import TaskUUID +from simcore_service_api_server.api.routes import tasks as task_routes +from simcore_service_api_server.models.schemas.base import ApiServerEnvelope + +pytest_simcore_core_services_selection = ["postgres", "rabbit"] +pytest_plugins = [ + "pytest_simcore.celery_library_mocks", +] + +_faker = Faker() + + +@pytest.fixture +def mock_task_manager( + mocker: MockerFixture, mock_task_manager_object: MockType +) -> MockType: + + def _get_task_manager(app): + return mock_task_manager_object + + mocker.patch.object(task_routes, "get_task_manager", _get_task_manager) + return mock_task_manager_object + + +async def test_list_celery_tasks( + mock_task_manager: MockType, + client: AsyncClient, + auth: BasicAuth, +): + + response = await client.get("/v0/tasks", auth=auth) + assert mock_task_manager.list_tasks.called + assert response.status_code == status.HTTP_200_OK + + result = ApiServerEnvelope[list[TaskGet]].model_validate_json(response.text) + assert len(result.data) > 0 + assert all(isinstance(task, TaskGet) for task in result.data) + task = result.data[0] + assert task.abort_href == f"/v0/tasks/{task.task_id}:cancel" + assert task.result_href == f"/v0/tasks/{task.task_id}/result" + assert task.status_href == f"/v0/tasks/{task.task_id}" + + +async def test_get_task_status( + mock_task_manager: MockType, + client: AsyncClient, + auth: BasicAuth, +): + task_id = f"{_faker.uuid4()}" + response = await client.get(f"/v0/tasks/{task_id}", auth=auth) + assert mock_task_manager.get_task_status.called + assert response.status_code == status.HTTP_200_OK + TaskStatus.model_validate_json(response.text) + + +async def test_cancel_task( + mock_task_manager: MockType, + client: AsyncClient, + auth: BasicAuth, +): + task_id = f"{_faker.uuid4()}" + response = await client.post(f"/v0/tasks/{task_id}:cancel", auth=auth) + assert mock_task_manager.cancel_task.called + assert response.status_code == status.HTTP_204_NO_CONTENT + + +async def test_get_task_result( + mock_task_manager: MockType, + client: AsyncClient, + auth: BasicAuth, +): + task_id = f"{_faker.uuid4()}" + response = await client.get(f"/v0/tasks/{task_id}/result", auth=auth) + assert response.status_code == status.HTTP_200_OK + assert mock_task_manager.get_task_result.called + assert f"{mock_task_manager.get_task_result.call_args[1]['task_uuid']}" == task_id + + +@pytest.mark.parametrize( + "method, url, list_tasks_return_value, get_task_status_return_value, cancel_task_return_value, get_task_result_return_value, expected_status_code", + [ + ( + "GET", + "/v0/tasks", + CeleryError(), + None, + None, + None, + status.HTTP_503_SERVICE_UNAVAILABLE, + ), + ( + "GET", + f"/v0/tasks/{_faker.uuid4()}", + None, + CeleryError(), + None, + None, + status.HTTP_503_SERVICE_UNAVAILABLE, + ), + ( + "POST", + f"/v0/tasks/{_faker.uuid4()}:cancel", + None, + None, + CeleryError(), + None, + status.HTTP_503_SERVICE_UNAVAILABLE, + ), + ( + "GET", + f"/v0/tasks/{_faker.uuid4()}/result", + None, + CeleryError(), + None, + None, + status.HTTP_503_SERVICE_UNAVAILABLE, + ), + ( + "GET", + f"/v0/tasks/{_faker.uuid4()}/result", + None, + CeleryTaskStatus( + task_uuid=TaskUUID("123e4567-e89b-12d3-a456-426614174000"), + task_state=TaskState.STARTED, + progress_report=ProgressReport( + actual_value=0.5, + total=1.0, + unit="Byte", + message=ProgressStructuredMessage.model_validate( + ProgressStructuredMessage.model_json_schema( + schema_generator=GenerateResolvedJsonSchema + )["examples"][0] + ), + ), + ), + None, + None, + status.HTTP_404_NOT_FOUND, + ), + ], +) +async def test_celery_error_propagation( + mock_task_manager: MockType, + client: AsyncClient, + auth: BasicAuth, + method: Literal["GET", "POST"], + url: str, + expected_status_code: int, +): + response = await client.request(method=method, url=url, auth=auth) + assert response.status_code == expected_status_code diff --git a/services/autoscaling/Dockerfile b/services/autoscaling/Dockerfile index 2f854f730281..cd1a088ec083 100644 --- a/services/autoscaling/Dockerfile +++ b/services/autoscaling/Dockerfile @@ -2,7 +2,7 @@ # Define arguments in the global scope ARG PYTHON_VERSION="3.11.9" -ARG UV_VERSION="0.6" +ARG UV_VERSION="0.7" FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build FROM python:${PYTHON_VERSION}-slim-bookworm AS base-arm64 @@ -34,6 +34,7 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=private \ set -eux; \ apt-get update; \ apt-get install -y --no-install-recommends \ + fd-find \ gosu \ ca-certificates \ curl \ @@ -109,10 +110,7 @@ RUN uv venv "${VIRTUAL_ENV}" -RUN --mount=type=cache,target=/root/.cache/uv \ - uv pip install --upgrade \ - wheel \ - setuptools + WORKDIR /build @@ -125,6 +123,9 @@ WORKDIR /build FROM build AS prod-only-deps ENV SC_BUILD_TARGET=prod-only-deps +# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode +ENV UV_COMPILE_BYTECODE=1 \ + UV_LINK_MODE=copy WORKDIR /build/services/autoscaling @@ -150,8 +151,6 @@ ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production ENV PYTHONOPTIMIZE=TRUE -# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode -ENV UV_COMPILE_BYTECODE=1 WORKDIR /home/scu # ensure home folder is read/writable for user scu diff --git a/services/autoscaling/docker/boot.sh b/services/autoscaling/docker/boot.sh index c78cf322c2d1..8a5d3adb7092 100755 --- a/services/autoscaling/docker/boot.sh +++ b/services/autoscaling/docker/boot.sh @@ -24,7 +24,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/autoscaling - uv pip --quiet sync requirements/dev.txt + uv pip --quiet sync --link-mode=copy requirements/dev.txt cd - uv pip list fi @@ -32,7 +32,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy if command -v uv >/dev/null 2>&1; then - uv pip install debugpy + uv pip install --link-mode=copy debugpy else pip install debugpy fi @@ -47,19 +47,22 @@ SERVER_LOG_LEVEL=$(echo "${APP_LOG_LEVEL}" | tr '[:upper:]' '[:lower:]') echo "$INFO" "Log-level app/server: $APP_LOG_LEVEL/$SERVER_LOG_LEVEL" if [ "${SC_BOOT_MODE}" = "debug" ]; then - reload_dir_packages=$(find /devel/packages -maxdepth 3 -type d -path "*/src/*" ! -path "*.*" -exec echo '--reload-dir {} \' \;) + reload_dir_packages=$(fdfind src /devel/packages --exec echo '--reload-dir {} ' | tr '\n' ' ') exec sh -c " cd services/autoscaling/src/simcore_service_autoscaling && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${AUTOSCALING_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${AUTOSCALING_REMOTE_DEBUGGING_PORT} -m \ + uvicorn \ + --factory main:app_factory \ --host 0.0.0.0 \ --reload \ - $reload_dir_packages + $reload_dir_packages \ --reload-dir . \ --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_autoscaling.main:the_app \ + exec uvicorn \ + --factory simcore_service_autoscaling.main:app_factory \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/autoscaling/docker/entrypoint.sh b/services/autoscaling/docker/entrypoint.sh index ad982fd8d5cc..651a1ea875f0 100755 --- a/services/autoscaling/docker/entrypoint.sh +++ b/services/autoscaling/docker/entrypoint.sh @@ -26,6 +26,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" # # DEVELOPMENT MODE @@ -63,10 +64,9 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME" echo "$INFO" "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \; - # change user property of files already around + fdfind --owner ":$SC_USER_ID" --exclude proc --exec-batch chgrp --no-dereference "$CONT_GROUPNAME" . '/' echo "$INFO" "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \; + fdfind --owner "$SC_USER_ID:" --exclude proc --exec-batch chown --no-dereference "$SC_USER_NAME" . '/' fi fi diff --git a/services/autoscaling/requirements/_base.txt b/services/autoscaling/requirements/_base.txt index adbb78fa0f4a..d892e757f8b0 100644 --- a/services/autoscaling/requirements/_base.txt +++ b/services/autoscaling/requirements/_base.txt @@ -28,7 +28,7 @@ aiofiles==24.1.0 # aioboto3 aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -80,6 +80,8 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi attrs==25.3.0 @@ -125,9 +127,10 @@ certifi==2025.4.26 # httpcore # httpx # requests + # sentry-sdk charset-normalizer==3.4.2 # via requests -click==8.1.8 +click==8.2.1 # via # -c requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # dask @@ -145,12 +148,6 @@ dask==2025.5.0 # -c requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # -r requirements/_base.in # distributed -deprecated==1.2.18 - # via - # opentelemetry-api - # opentelemetry-exporter-otlp-proto-grpc - # opentelemetry-exporter-otlp-proto-http - # opentelemetry-semantic-conventions distributed==2025.5.0 # via # -c requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt @@ -165,12 +162,14 @@ exceptiongroup==1.3.0 # via aio-pika fast-depends==2.4.12 # via faststream -fastapi==0.115.12 +fastapi==0.116.1 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi-lifespan-manager -fastapi-cli==0.0.7 +fastapi-cli==0.0.8 # via fastapi +fastapi-cloud-cli==0.1.5 + # via fastapi-cli fastapi-lifespan-manager==0.1.4 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in faststream==0.5.41 @@ -231,6 +230,7 @@ httpx==0.28.1 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi + # fastapi-cloud-cli hyperframe==6.1.0 # via h2 idna==3.10 @@ -279,6 +279,12 @@ jmespath==1.0.1 # aiobotocore # boto3 # botocore +jsonref==1.1.0 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema==4.23.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in @@ -309,7 +315,7 @@ multidict==6.4.4 # aiobotocore # aiohttp # yarl -opentelemetry-api==1.33.1 +opentelemetry-api==1.34.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in @@ -318,6 +324,7 @@ opentelemetry-api==1.33.1 # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-botocore # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx @@ -327,76 +334,82 @@ opentelemetry-api==1.33.1 # opentelemetry-propagator-aws-xray # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.33.1 +opentelemetry-exporter-otlp==1.34.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.33.1 +opentelemetry-exporter-otlp-proto-common==1.34.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.33.1 +opentelemetry-exporter-otlp-proto-grpc==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.33.1 +opentelemetry-exporter-otlp-proto-http==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.54b1 +opentelemetry-instrumentation==0.55b1 # via # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-botocore # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aio-pika==0.54b1 +opentelemetry-instrumentation-aio-pika==0.55b1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-asgi==0.54b1 +opentelemetry-instrumentation-asgi==0.55b1 # via opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-botocore==0.54b1 +opentelemetry-instrumentation-asyncpg==0.55b1 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-botocore==0.55b1 # via -r requirements/../../../packages/aws-library/requirements/_base.in -opentelemetry-instrumentation-fastapi==0.54b1 +opentelemetry-instrumentation-fastapi==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-httpx==0.54b1 +opentelemetry-instrumentation-httpx==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-logging==0.54b1 +opentelemetry-instrumentation-logging==0.55b1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.54b1 +opentelemetry-instrumentation-redis==0.55b1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.54b1 +opentelemetry-instrumentation-requests==0.55b1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in opentelemetry-propagator-aws-xray==1.0.2 # via opentelemetry-instrumentation-botocore -opentelemetry-proto==1.33.1 +opentelemetry-proto==1.34.1 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.33.1 +opentelemetry-sdk==1.34.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.54b1 +opentelemetry-semantic-conventions==0.55b1 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-botocore # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.54b1 +opentelemetry-util-http==0.55b1 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi @@ -463,7 +476,7 @@ propcache==0.3.1 # via # aiohttp # yarl -protobuf==5.29.4 +protobuf==5.29.5 # via # googleapis-common-protos # opentelemetry-proto @@ -475,7 +488,7 @@ psutil==7.0.0 # distributed pycryptodome==3.23.0 # via stream-zip -pydantic==2.11.4 +pydantic==2.11.7 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -526,11 +539,12 @@ pydantic==2.11.4 # -r requirements/../../../packages/settings-library/requirements/_base.in # fast-depends # fastapi + # fastapi-cloud-cli # pydantic-extra-types # pydantic-settings pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.10.4 +pydantic-extra-types==2.10.5 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -687,9 +701,9 @@ referencing==0.35.1 # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications -requests==2.32.3 +requests==2.32.4 # via opentelemetry-exporter-otlp-proto-http -rich==14.0.0 +rich==14.1.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -697,14 +711,20 @@ rich==14.0.0 # -r requirements/../../../packages/settings-library/requirements/_base.in # rich-toolkit # typer -rich-toolkit==0.14.7 - # via fastapi-cli +rich-toolkit==0.15.0 + # via + # fastapi-cli + # fastapi-cloud-cli +rignore==0.6.4 + # via fastapi-cloud-cli rpds-py==0.25.0 # via # jsonschema # referencing s3transfer==0.11.3 # via boto3 +sentry-sdk==2.35.0 + # via fastapi-cloud-cli sh==2.2.2 # via -r requirements/../../../packages/aws-library/requirements/_base.in shellingham==1.5.4 @@ -712,12 +732,14 @@ shellingham==1.5.4 six==1.17.0 # via python-dateutil sniffio==1.3.1 - # via anyio + # via + # anyio + # asgi-lifespan sortedcontainers==2.4.0 # via # -c requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # distributed -starlette==0.46.2 +starlette==0.47.2 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -772,13 +794,14 @@ tqdm==4.67.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.15.4 +typer==0.16.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # fastapi-cli + # fastapi-cloud-cli types-aiobotocore==2.22.0 # via -r requirements/../../../packages/aws-library/requirements/_base.in types-aiobotocore-ec2==2.22.0 @@ -791,18 +814,23 @@ types-awscrt==0.27.2 # via botocore-stubs types-python-dateutil==2.9.0.20250516 # via arrow -typing-extensions==4.13.2 +typing-extensions==4.14.1 # via # aiodebug # anyio # exceptiongroup # fastapi # faststream + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http # opentelemetry-sdk + # opentelemetry-semantic-conventions # pydantic # pydantic-core # pydantic-extra-types # rich-toolkit + # starlette # typer # types-aiobotocore # types-aiobotocore-ec2 @@ -811,7 +839,7 @@ typing-extensions==4.13.2 # typing-inspection typing-inspection==0.4.0 # via pydantic -urllib3==2.4.0 +urllib3==2.5.0 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -841,10 +869,12 @@ urllib3==2.4.0 # botocore # distributed # requests + # sentry-sdk uvicorn==0.34.2 # via # fastapi # fastapi-cli + # fastapi-cloud-cli uvloop==0.21.0 # via uvicorn watchfiles==1.0.5 @@ -854,7 +884,6 @@ websockets==15.0.1 wrapt==1.17.2 # via # aiobotocore - # deprecated # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-httpx diff --git a/services/autoscaling/requirements/_test.txt b/services/autoscaling/requirements/_test.txt index 669083ffa99e..fa2a061ebfda 100644 --- a/services/autoscaling/requirements/_test.txt +++ b/services/autoscaling/requirements/_test.txt @@ -9,7 +9,9 @@ anyio==4.9.0 # -c requirements/_base.txt # httpx asgi-lifespan==2.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in attrs==25.3.0 # via # -c requirements/_base.txt @@ -52,7 +54,7 @@ charset-normalizer==3.4.2 # via # -c requirements/_base.txt # requests -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # flask @@ -81,7 +83,7 @@ flask==3.1.1 # via # flask-cors # moto -flask-cors==6.0.0 +flask-cors==6.0.1 # via moto graphql-core==3.2.6 # via moto @@ -173,7 +175,9 @@ packaging==25.0 pathable==0.4.4 # via jsonschema-path pluggy==1.6.0 - # via pytest + # via + # pytest + # pytest-cov ply==3.11 # via jsonpath-ng pprintpp==0.4.0 @@ -186,7 +190,7 @@ py-partiql-parser==0.6.1 # via moto pycparser==2.22 # via cffi -pydantic==2.11.4 +pydantic==2.11.7 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -195,9 +199,13 @@ pydantic-core==2.33.2 # via # -c requirements/_base.txt # pydantic +pygments==2.19.1 + # via + # -c requirements/_base.txt + # pytest pyparsing==3.2.3 # via moto -pytest==8.3.5 +pytest==8.4.1 # via # -r requirements/_test.in # pytest-asyncio @@ -205,13 +213,13 @@ pytest==8.3.5 # pytest-icdiff # pytest-mock # pytest-sugar -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via -r requirements/_test.in -pytest-cov==6.1.1 +pytest-cov==6.2.1 # via -r requirements/_test.in pytest-icdiff==0.9 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in @@ -248,7 +256,7 @@ referencing==0.35.1 # jsonschema-specifications regex==2024.11.6 # via cfn-lint -requests==2.32.3 +requests==2.32.4 # via # -c requirements/_base.txt # docker @@ -270,7 +278,7 @@ s3transfer==0.11.3 # via # -c requirements/_base.txt # boto3 -setuptools==80.7.1 +setuptools==80.9.0 # via moto six==1.17.0 # via @@ -314,7 +322,7 @@ types-awscrt==0.27.2 # botocore-stubs types-pyyaml==6.0.12.20250516 # via -r requirements/_test.in -typing-extensions==4.13.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # anyio @@ -334,7 +342,7 @@ typing-inspection==0.4.0 # pydantic tzdata==2025.2 # via faker -urllib3==2.4.0 +urllib3==2.5.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt diff --git a/services/autoscaling/requirements/_tools.txt b/services/autoscaling/requirements/_tools.txt index c76d3992bbee..cebc096a0295 100644 --- a/services/autoscaling/requirements/_tools.txt +++ b/services/autoscaling/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # -c requirements/_test.txt @@ -28,7 +28,7 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.1.0 # via @@ -43,7 +43,9 @@ packaging==25.0 # black # build pathspec==0.12.1 - # via black + # via + # black + # mypy pip==25.1.1 # via pip-tools pip-tools==7.4.1 @@ -70,13 +72,13 @@ pyyaml==6.0.2 # watchdog ruff==0.11.10 # via -r requirements/../../../requirements/devenv.txt -setuptools==80.7.1 +setuptools==80.9.0 # via # -c requirements/_test.txt # pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.13.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # -c requirements/_test.txt diff --git a/services/autoscaling/src/simcore_service_autoscaling/_meta.py b/services/autoscaling/src/simcore_service_autoscaling/_meta.py index c421cfae966a..645f463f72ea 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/_meta.py +++ b/services/autoscaling/src/simcore_service_autoscaling/_meta.py @@ -8,7 +8,7 @@ info: Final = PackageInfo(package_name="simcore-service-autoscaling") __version__: Final[VersionStr] = info.__version__ -APP_NAME: Final[str] = info.project_name +APP_NAME: Final[str] = info.app_name API_VERSION: Final[VersionStr] = info.__version__ VERSION: Final[Version] = info.version API_VTAG: Final[VersionTag] = TypeAdapter(VersionTag).validate_python( diff --git a/services/autoscaling/src/simcore_service_autoscaling/cli.py b/services/autoscaling/src/simcore_service_autoscaling/cli.py index c02e20e348b1..c280b67368af 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/cli.py +++ b/services/autoscaling/src/simcore_service_autoscaling/cli.py @@ -19,6 +19,6 @@ def run(): """Runs application""" typer.secho("Sorry, this entrypoint is intentionally disabled. Use instead") typer.secho( - "$ uvicorn simcore_service_autoscaling.main:the_app", + "$ uvicorn --factory simcore_service_autoscaling.main:app_factory", fg=typer.colors.BLUE, ) diff --git a/services/autoscaling/src/simcore_service_autoscaling/core/application.py b/services/autoscaling/src/simcore_service_autoscaling/core/application.py index 95fcff3b4b76..ba833512565e 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/core/application.py +++ b/services/autoscaling/src/simcore_service_autoscaling/core/application.py @@ -17,8 +17,12 @@ APP_STARTED_DYNAMIC_BANNER_MSG, ) from ..api.routes import setup_api_routes -from ..modules.auto_scaling_task import setup as setup_auto_scaler_background_task -from ..modules.buffer_machines_pool_task import setup as setup_buffer_machines_pool_task +from ..modules.cluster_scaling.auto_scaling_task import ( + setup as setup_auto_scaler_background_task, +) +from ..modules.cluster_scaling.warm_buffer_machines_pool_task import ( + setup as setup_warm_buffer_machines_pool_task, +) from ..modules.docker import setup as setup_docker from ..modules.ec2 import setup as setup_ec2 from ..modules.instrumentation import setup as setup_instrumentation @@ -27,28 +31,10 @@ from ..modules.ssm import setup as setup_ssm from .settings import ApplicationSettings -_LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR -_NOISY_LOGGERS = ( - "aiobotocore", - "aio_pika", - "aiormq", - "botocore", - "werkzeug", -) - logger = logging.getLogger(__name__) def create_app(settings: ApplicationSettings) -> FastAPI: - # keep mostly quiet noisy loggers - quiet_level: int = max( - min(logging.root.level + _LOG_LEVEL_STEP, logging.CRITICAL), logging.WARNING - ) - for name in _NOISY_LOGGERS: - logging.getLogger(name).setLevel(quiet_level) - - logger.info("app settings: %s", settings.model_dump_json(indent=1)) - app = FastAPI( debug=settings.AUTOSCALING_DEBUG, title=APP_NAME, @@ -78,7 +64,7 @@ def create_app(settings: ApplicationSettings) -> FastAPI: initialize_fastapi_app_tracing(app) setup_auto_scaler_background_task(app) - setup_buffer_machines_pool_task(app) + setup_warm_buffer_machines_pool_task(app) # ERROR HANDLERS diff --git a/services/autoscaling/src/simcore_service_autoscaling/core/settings.py b/services/autoscaling/src/simcore_service_autoscaling/core/settings.py index ff67aeeaab14..c240dcc68c32 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/core/settings.py +++ b/services/autoscaling/src/simcore_service_autoscaling/core/settings.py @@ -3,6 +3,7 @@ from typing import Annotated, Final, Self, cast from aws_library.ec2 import EC2InstanceBootSpecific, EC2Tags +from common_library.logging.logging_utils_filtering import LoggerName, MessageSubstring from fastapi import FastAPI from models_library.basic_types import LogLevel, PortInt, VersionTag from models_library.clusters import ClusterAuthentication @@ -17,7 +18,7 @@ model_validator, ) from pydantic_settings import SettingsConfigDict -from servicelib.logging_utils_filtering import LoggerName, MessageSubstring +from servicelib.logging_utils import LogLevelInt from settings_library.application import BaseApplicationSettings from settings_library.base import BaseCustomSettings from settings_library.docker_registry import RegistrySettings @@ -34,8 +35,7 @@ AUTOSCALING_ENV_PREFIX: Final[str] = "AUTOSCALING_" -class AutoscalingSSMSettings(SSMSettings): - ... +class AutoscalingSSMSettings(SSMSettings): ... class AutoscalingEC2Settings(EC2Settings): @@ -112,13 +112,14 @@ class EC2InstancesSettings(BaseCustomSettings): " this is required to start a new EC2 instance", ), ] - EC2_INSTANCES_SUBNET_ID: Annotated[ - str, + EC2_INSTANCES_SUBNET_IDS: Annotated[ + list[str], Field( min_length=1, description="A subnet is a range of IP addresses in your VPC " " (https://docs.aws.amazon.com/vpc/latest/userguide/configure-subnets.html), " - "this is required to start a new EC2 instance", + "this is required to start a new EC2 instance. The subnets are used in the given order " + "until the capacity is used up.", ), ] EC2_INSTANCES_TIME_BEFORE_DRAINING: Annotated[ @@ -361,8 +362,8 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): ] = False @cached_property - def LOG_LEVEL(self): # noqa: N802 - return self.AUTOSCALING_LOGLEVEL + def log_level(self) -> LogLevelInt: + return cast(LogLevelInt, self.AUTOSCALING_LOGLEVEL) @field_validator("AUTOSCALING_LOGLEVEL", mode="before") @classmethod diff --git a/services/autoscaling/src/simcore_service_autoscaling/main.py b/services/autoscaling/src/simcore_service_autoscaling/main.py index 102258cac707..b1f7055d75df 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/main.py +++ b/services/autoscaling/src/simcore_service_autoscaling/main.py @@ -1,22 +1,39 @@ -"""Main application to be deployed by uvicorn (or equivalent) server - -""" +"""Main application to be deployed by uvicorn (or equivalent) server""" import logging +from typing import Final +from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.logging_utils import config_all_loggers +from servicelib.fastapi.logging_lifespan import create_logging_shutdown_event from simcore_service_autoscaling.core.application import create_app from simcore_service_autoscaling.core.settings import ApplicationSettings -the_settings = ApplicationSettings.create_from_envs() -logging.basicConfig(level=the_settings.log_level) -logging.root.setLevel(the_settings.log_level) -config_all_loggers( - log_format_local_dev_enabled=the_settings.AUTOSCALING_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=the_settings.AUTOSCALING_LOG_FILTER_MAPPING, - tracing_settings=the_settings.AUTOSCALING_TRACING, +_logger = logging.getLogger(__name__) + +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aiobotocore", + "aio_pika", + "aiormq", + "botocore", + "werkzeug", ) -# SINGLETON FastAPI app -the_app: FastAPI = create_app(the_settings) + +def app_factory() -> FastAPI: + app_settings = ApplicationSettings.create_from_envs() + logging_shutdown_event = create_logging_shutdown_event( + log_format_local_dev_enabled=app_settings.AUTOSCALING_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.AUTOSCALING_LOG_FILTER_MAPPING, + tracing_settings=app_settings.AUTOSCALING_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) + + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + app = create_app(settings=app_settings) + app.add_event_handler("shutdown", logging_shutdown_event) + return app diff --git a/services/autoscaling/src/simcore_service_autoscaling/models.py b/services/autoscaling/src/simcore_service_autoscaling/models.py index c77f9fe349cc..7645b300e8de 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/models.py +++ b/services/autoscaling/src/simcore_service_autoscaling/models.py @@ -47,8 +47,7 @@ class AssociatedInstance(_BaseInstance): @dataclass(frozen=True, kw_only=True, slots=True) -class NonAssociatedInstance(_BaseInstance): - ... +class NonAssociatedInstance(_BaseInstance): ... @dataclass(frozen=True, kw_only=True, slots=True) @@ -68,9 +67,9 @@ class Cluster: # pylint: disable=too-many-instance-attributes "description": "This is a EC2-backed docker node which is drained (cannot accept tasks)" } ) - buffer_drained_nodes: list[AssociatedInstance] = field( + hot_buffer_drained_nodes: list[AssociatedInstance] = field( metadata={ - "description": "This is a EC2-backed docker node which is drained in the reserve if this is enabled (with no tasks)" + "description": "This is a EC2-backed docker node which is drained in the reserve if this is enabled (with no tasks, a.k.a. hot buffer)" } ) pending_ec2s: list[NonAssociatedInstance] = field( @@ -83,9 +82,9 @@ class Cluster: # pylint: disable=too-many-instance-attributes "description": "This is an existing EC2 instance that never properly joined the cluster and is deemed as broken and will be terminated" } ) - buffer_ec2s: list[NonAssociatedInstance] = field( + warm_buffer_ec2s: list[NonAssociatedInstance] = field( metadata={ - "description": "This is a prepared stopped EC2 instance, not yet associated to a docker node, ready to be used" + "description": "This is a prepared stopped EC2 instance, not yet associated to a docker node, ready to be used (a.k.a. warm buffer)" } ) disconnected_nodes: list[Node] = field( @@ -121,7 +120,7 @@ def total_number_of_machines(self) -> int: len(self.active_nodes) + len(self.pending_nodes) + len(self.drained_nodes) - + len(self.buffer_drained_nodes) + + len(self.hot_buffer_drained_nodes) + len(self.pending_ec2s) + len(self.broken_ec2s) + len(self.terminating_nodes) @@ -138,10 +137,10 @@ def _get_instance_ids( f"Cluster(active-nodes: count={len(self.active_nodes)} {_get_instance_ids(self.active_nodes)}, " f"pending-nodes: count={len(self.pending_nodes)} {_get_instance_ids(self.pending_nodes)}, " f"drained-nodes: count={len(self.drained_nodes)} {_get_instance_ids(self.drained_nodes)}, " - f"reserve-drained-nodes: count={len(self.buffer_drained_nodes)} {_get_instance_ids(self.buffer_drained_nodes)}, " + f"hot-buffer-drained-nodes: count={len(self.hot_buffer_drained_nodes)} {_get_instance_ids(self.hot_buffer_drained_nodes)}, " f"pending-ec2s: count={len(self.pending_ec2s)} {_get_instance_ids(self.pending_ec2s)}, " f"broken-ec2s: count={len(self.broken_ec2s)} {_get_instance_ids(self.broken_ec2s)}, " - f"buffer-ec2s: count={len(self.buffer_ec2s)} {_get_instance_ids(self.buffer_ec2s)}, " + f"warm-buffer-ec2s: count={len(self.warm_buffer_ec2s)} {_get_instance_ids(self.warm_buffer_ec2s)}, " f"disconnected-nodes: count={len(self.disconnected_nodes)}, " f"terminating-nodes: count={len(self.terminating_nodes)} {_get_instance_ids(self.terminating_nodes)}, " f"retired-nodes: count={len(self.retired_nodes)} {_get_instance_ids(self.retired_nodes)}, " @@ -159,7 +158,7 @@ class DaskTask: @dataclass(kw_only=True, slots=True) -class BufferPool: +class WarmBufferPool: ready_instances: set[EC2InstanceData] = field(default_factory=set) pending_instances: set[EC2InstanceData] = field(default_factory=set) waiting_to_pull_instances: set[EC2InstanceData] = field(default_factory=set) @@ -170,7 +169,7 @@ class BufferPool: def __repr__(self) -> str: return ( - f"BufferPool(ready-count={len(self.ready_instances)}, " + f"WarmBufferPool(ready-count={len(self.ready_instances)}, " f"pending-count={len(self.pending_instances)}, " f"waiting-to-pull-count={len(self.waiting_to_pull_instances)}, " f"waiting-to-stop-count={len(self.waiting_to_stop_instances)}, " @@ -213,20 +212,20 @@ def remove_instance(self, instance: EC2InstanceData) -> None: @dataclass -class BufferPoolManager: - buffer_pools: dict[InstanceTypeType, BufferPool] = field( - default_factory=lambda: defaultdict(BufferPool) +class WarmBufferPoolManager: + buffer_pools: dict[InstanceTypeType, WarmBufferPool] = field( + default_factory=lambda: defaultdict(WarmBufferPool) ) def __repr__(self) -> str: - return f"BufferPoolManager({dict(self.buffer_pools)})" + return f"WarmBufferPoolManager({dict(self.buffer_pools)})" - def flatten_buffer_pool(self) -> BufferPool: + def flatten_buffer_pool(self) -> WarmBufferPool: """returns a flattened buffer pool with all the EC2InstanceData""" - flat_pool = BufferPool() + flat_pool = WarmBufferPool() for buffer_pool in self.buffer_pools.values(): - for f in fields(BufferPool): + for f in fields(WarmBufferPool): getattr(flat_pool, f.name).update(getattr(buffer_pool, f.name)) return flat_pool diff --git a/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_mode_base.py b/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_mode_base.py deleted file mode 100644 index b9df042c622d..000000000000 --- a/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_mode_base.py +++ /dev/null @@ -1,80 +0,0 @@ -from abc import ABC, abstractmethod -from dataclasses import dataclass - -from aws_library.ec2 import EC2InstanceData, EC2Tags, Resources -from fastapi import FastAPI -from models_library.docker import DockerLabelKey -from models_library.generated_models.docker_rest_api import Node as DockerNode -from types_aiobotocore_ec2.literals import InstanceTypeType - -from ..models import AssociatedInstance -from ..utils import utils_docker - - -@dataclass -class BaseAutoscaling(ABC): # pragma: no cover - @staticmethod - @abstractmethod - async def get_monitored_nodes(app: FastAPI) -> list[DockerNode]: ... - - @staticmethod - @abstractmethod - def get_ec2_tags(app: FastAPI) -> EC2Tags: ... - - @staticmethod - @abstractmethod - def get_new_node_docker_tags( - app: FastAPI, ec2_instance_data: EC2InstanceData - ) -> dict[DockerLabelKey, str]: ... - - @staticmethod - @abstractmethod - async def list_unrunnable_tasks(app: FastAPI) -> list: ... - - @staticmethod - @abstractmethod - def get_task_required_resources(task) -> Resources: ... - - @staticmethod - @abstractmethod - async def get_task_defined_instance( - app: FastAPI, task - ) -> InstanceTypeType | None: ... - - @staticmethod - @abstractmethod - async def compute_node_used_resources( - app: FastAPI, instance: AssociatedInstance - ) -> Resources: ... - - @staticmethod - @abstractmethod - async def compute_cluster_used_resources( - app: FastAPI, instances: list[AssociatedInstance] - ) -> Resources: ... - - @staticmethod - @abstractmethod - async def compute_cluster_total_resources( - app: FastAPI, instances: list[AssociatedInstance] - ) -> Resources: ... - - @staticmethod - @abstractmethod - async def is_instance_active( - app: FastAPI, instance: AssociatedInstance - ) -> bool: ... - - @staticmethod - @abstractmethod - async def is_instance_retired( - app: FastAPI, instance: AssociatedInstance - ) -> bool: ... - - @staticmethod - def is_instance_drained(instance: AssociatedInstance) -> bool: - return not utils_docker.is_node_osparc_ready(instance.node) - - @staticmethod - @abstractmethod - async def try_retire_nodes(app: FastAPI) -> None: ... diff --git a/services/autoscaling/src/simcore_service_autoscaling/modules/cluster_scaling/__init__.py b/services/autoscaling/src/simcore_service_autoscaling/modules/cluster_scaling/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_core.py b/services/autoscaling/src/simcore_service_autoscaling/modules/cluster_scaling/_auto_scaling_core.py similarity index 84% rename from services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_core.py rename to services/autoscaling/src/simcore_service_autoscaling/modules/cluster_scaling/_auto_scaling_core.py index a3a34e7b5d04..a7a027a89452 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_core.py +++ b/services/autoscaling/src/simcore_service_autoscaling/modules/cluster_scaling/_auto_scaling_core.py @@ -15,65 +15,63 @@ EC2Tags, Resources, ) -from aws_library.ec2._errors import EC2TooManyInstancesError +from aws_library.ec2._errors import ( + EC2AccessError, + EC2InsufficientCapacityError, + EC2TooManyInstancesError, +) from fastapi import FastAPI -from models_library.generated_models.docker_rest_api import Node, NodeState +from models_library.generated_models.docker_rest_api import Node from models_library.rabbitmq_messages import ProgressType from servicelib.logging_utils import log_catch, log_context from servicelib.utils import limited_gather from servicelib.utils_formatting import timedelta_as_minute_second from types_aiobotocore_ec2.literals import InstanceTypeType -from ..constants import DOCKER_JOIN_COMMAND_EC2_TAG_KEY, DOCKER_JOIN_COMMAND_NAME -from ..core.errors import ( +from ...constants import DOCKER_JOIN_COMMAND_EC2_TAG_KEY, DOCKER_JOIN_COMMAND_NAME +from ...core.errors import ( Ec2InvalidDnsNameError, TaskBestFittingInstanceNotFoundError, TaskRequirementsAboveRequiredEC2InstanceTypeError, TaskRequiresUnauthorizedEC2InstanceTypeError, ) -from ..core.settings import ApplicationSettings, get_application_settings -from ..models import ( +from ...core.settings import ApplicationSettings, get_application_settings +from ...models import ( AssignedTasksToInstanceType, AssociatedInstance, Cluster, NonAssociatedInstance, ) -from ..utils import utils_docker, utils_ec2 -from ..utils.auto_scaling_core import ( +from ...utils import utils_docker, utils_ec2 +from ...utils.cluster_scaling import ( associate_ec2_instances_with_nodes, ec2_startup_script, find_selected_instance_type_for_task, - get_machine_buffer_type, - node_host_name_from_ec2_private_dns, + get_hot_buffer_type, sort_drained_nodes, ) -from ..utils.buffer_machines_pool_core import ( - get_activated_buffer_ec2_tags, - get_deactivated_buffer_ec2_tags, - is_buffer_machine, -) -from ..utils.rabbitmq import ( +from ...utils.rabbitmq import ( post_autoscaling_status_message, post_tasks_log_message, post_tasks_progress_message, ) -from .auto_scaling_mode_base import BaseAutoscaling -from .docker import get_docker_client -from .ec2 import get_ec2_client -from .instrumentation import get_instrumentation, has_instrumentation -from .ssm import get_ssm_client +from ...utils.warm_buffer_machines import ( + get_activated_warm_buffer_ec2_tags, + get_deactivated_warm_buffer_ec2_tags, + is_warm_buffer_machine, +) +from ..docker import get_docker_client +from ..ec2 import get_ec2_client +from ..instrumentation import get_instrumentation, has_instrumentation +from ..ssm import get_ssm_client +from ._provider_protocol import AutoscalingProvider _logger = logging.getLogger(__name__) -def _node_not_ready(node: Node) -> bool: - assert node.status # nosec - return bool(node.status.state != NodeState.ready) - - async def _analyze_current_cluster( app: FastAPI, - auto_scaling_mode: BaseAutoscaling, + auto_scaling_mode: AutoscalingProvider, allowed_instance_types: list[EC2InstanceType], ) -> Cluster: app_settings = get_application_settings(app) @@ -95,13 +93,13 @@ async def _analyze_current_cluster( state_names=["terminated"], ) - buffer_ec2_instances = await get_ec2_client(app).get_instances( + warm_buffer_ec2_instances = await get_ec2_client(app).get_instances( key_names=[app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_KEY_NAME], - tags=get_deactivated_buffer_ec2_tags(app, auto_scaling_mode), + tags=get_deactivated_warm_buffer_ec2_tags(auto_scaling_mode.get_ec2_tags(app)), state_names=["stopped"], ) - attached_ec2s, pending_ec2s = await associate_ec2_instances_with_nodes( + attached_ec2s, pending_ec2s = associate_ec2_instances_with_nodes( docker_nodes, existing_ec2_instances ) @@ -141,7 +139,7 @@ async def _analyze_current_cluster( - node_used_resources, ) ) - elif auto_scaling_mode.is_instance_drained(instance): + elif utils_docker.is_instance_drained(instance): all_drained_nodes.append(instance) elif await auto_scaling_mode.is_instance_retired(app, instance): # it should be drained, but it is not, so we force it to be drained such that it might be re-used if needed @@ -149,24 +147,26 @@ async def _analyze_current_cluster( else: pending_nodes.append(instance) - drained_nodes, buffer_drained_nodes, terminating_nodes = sort_drained_nodes( + drained_nodes, hot_buffer_drained_nodes, terminating_nodes = sort_drained_nodes( app_settings, all_drained_nodes, allowed_instance_types ) cluster = Cluster( active_nodes=active_nodes, pending_nodes=pending_nodes, drained_nodes=drained_nodes, - buffer_drained_nodes=buffer_drained_nodes, + hot_buffer_drained_nodes=hot_buffer_drained_nodes, pending_ec2s=[NonAssociatedInstance(ec2_instance=i) for i in pending_ec2s], broken_ec2s=[NonAssociatedInstance(ec2_instance=i) for i in broken_ec2s], - buffer_ec2s=[ - NonAssociatedInstance(ec2_instance=i) for i in buffer_ec2_instances + warm_buffer_ec2s=[ + NonAssociatedInstance(ec2_instance=i) for i in warm_buffer_ec2_instances ], terminating_nodes=terminating_nodes, terminated_instances=[ NonAssociatedInstance(ec2_instance=i) for i in terminated_ec2_instances ], - disconnected_nodes=[n for n in docker_nodes if _node_not_ready(n)], + disconnected_nodes=[ + n for n in docker_nodes if not utils_docker.is_node_ready(n) + ], retired_nodes=retired_nodes, ) _logger.info("current state: %s", f"{cluster!r}") @@ -207,7 +207,7 @@ async def _terminate_broken_ec2s(app: FastAPI, cluster: Cluster) -> Cluster: ) -async def _make_pending_buffer_ec2s_join_cluster( +async def _make_pending_warm_buffer_ec2s_join_cluster( app: FastAPI, cluster: Cluster, ) -> Cluster: @@ -215,7 +215,7 @@ async def _make_pending_buffer_ec2s_join_cluster( if buffer_ec2s_pending := [ i.ec2_instance for i in cluster.pending_ec2s - if is_buffer_machine(i.ec2_instance.tags) + if is_warm_buffer_machine(i.ec2_instance.tags) and (DOCKER_JOIN_COMMAND_EC2_TAG_KEY not in i.ec2_instance.tags) ]: # started buffer instance shall be asked to join the cluster once they are running @@ -278,7 +278,7 @@ async def _make_pending_buffer_ec2s_join_cluster( async def _try_attach_pending_ec2s( app: FastAPI, cluster: Cluster, - auto_scaling_mode: BaseAutoscaling, + auto_scaling_mode: AutoscalingProvider, allowed_instance_types: list[EC2InstanceType], ) -> Cluster: """label the drained instances that connected to the swarm which are missing the monitoring labels""" @@ -288,7 +288,7 @@ async def _try_attach_pending_ec2s( assert app_settings.AUTOSCALING_EC2_INSTANCES # nosec for instance_data in cluster.pending_ec2s: try: - node_host_name = node_host_name_from_ec2_private_dns( + node_host_name = utils_ec2.node_host_name_from_ec2_private_dns( instance_data.ec2_instance ) if new_node := await utils_docker.find_node_with_name( @@ -317,15 +317,15 @@ async def _try_attach_pending_ec2s( _logger.exception("Unexpected EC2 private dns") # NOTE: first provision the reserve drained nodes if possible all_drained_nodes = ( - cluster.drained_nodes + cluster.buffer_drained_nodes + new_found_instances + cluster.drained_nodes + cluster.hot_buffer_drained_nodes + new_found_instances ) - drained_nodes, buffer_drained_nodes, _ = sort_drained_nodes( + drained_nodes, hot_buffer_drained_nodes, _ = sort_drained_nodes( app_settings, all_drained_nodes, allowed_instance_types ) return dataclasses.replace( cluster, drained_nodes=drained_nodes, - buffer_drained_nodes=buffer_drained_nodes, + hot_buffer_drained_nodes=hot_buffer_drained_nodes, pending_ec2s=still_pending_ec2s, ) @@ -389,7 +389,9 @@ async def _activate_drained_nodes( ) -> Cluster: nodes_to_activate = [ node - for node in itertools.chain(cluster.drained_nodes, cluster.buffer_drained_nodes) + for node in itertools.chain( + cluster.drained_nodes, cluster.hot_buffer_drained_nodes + ) if node.assigned_tasks ] @@ -412,31 +414,67 @@ async def _activate_drained_nodes( ] remaining_reserved_drained_nodes = [ node - for node in cluster.buffer_drained_nodes + for node in cluster.hot_buffer_drained_nodes if node.ec2_instance.id not in new_active_node_ids ] return dataclasses.replace( cluster, active_nodes=cluster.active_nodes + activated_nodes, drained_nodes=remaining_drained_nodes, - buffer_drained_nodes=remaining_reserved_drained_nodes, + hot_buffer_drained_nodes=remaining_reserved_drained_nodes, ) -async def _start_warm_buffer_instances( - app: FastAPI, cluster: Cluster, auto_scaling_mode: BaseAutoscaling -) -> Cluster: - """starts warm buffer if there are assigned tasks, or if a hot buffer of the same type is needed""" +def _de_assign_tasks_from_warm_buffer_ec2s( + cluster: Cluster, instances_to_start: list[EC2InstanceData] +) -> tuple[Cluster, list]: + # de-assign tasks from the warm buffer instances that could not be started + deassigned_tasks = list( + itertools.chain.from_iterable( + i.assigned_tasks + for i in cluster.warm_buffer_ec2s + if i.ec2_instance in instances_to_start + ) + ) + # upgrade the cluster + return ( + dataclasses.replace( + cluster, + warm_buffer_ec2s=[ + ( + dataclasses.replace(i, assigned_tasks=[]) + if i.ec2_instance in instances_to_start + else i + ) + for i in cluster.warm_buffer_ec2s + ], + ), + deassigned_tasks, + ) + + +async def _try_start_warm_buffer_instances( + app: FastAPI, cluster: Cluster, auto_scaling_mode: AutoscalingProvider +) -> tuple[Cluster, list]: + """ + starts warm buffer if there are assigned tasks, or if a hot buffer of the same type is needed + + Returns: + A tuple containing: + - The updated cluster instance after attempting to start warm buffer instances. + - In case warm buffer could not be started, a list of de-assigned tasks (tasks whose resource requirements cannot be fulfilled by warm buffers anymore). + + """ app_settings = get_application_settings(app) assert app_settings.AUTOSCALING_EC2_INSTANCES # nosec instances_to_start = [ - i.ec2_instance for i in cluster.buffer_ec2s if i.assigned_tasks + i.ec2_instance for i in cluster.warm_buffer_ec2s if i.assigned_tasks ] if ( - len(cluster.buffer_drained_nodes) + len(cluster.hot_buffer_drained_nodes) < app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER ): # check if we can migrate warm buffers to hot buffers @@ -448,7 +486,7 @@ async def _start_warm_buffer_instances( ) free_startable_warm_buffers_to_replace_hot_buffers = [ warm_buffer.ec2_instance - for warm_buffer in cluster.buffer_ec2s + for warm_buffer in cluster.warm_buffer_ec2s if (warm_buffer.ec2_instance.type == hot_buffer_instance_type) and not warm_buffer.assigned_tasks ] @@ -462,36 +500,61 @@ async def _start_warm_buffer_instances( instances_to_start += free_startable_warm_buffers_to_replace_hot_buffers[ : app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER - - len(cluster.buffer_drained_nodes) + - len(cluster.hot_buffer_drained_nodes) - len(unnassigned_pending_ec2s) - len(unnassigned_pending_nodes) ] if not instances_to_start: - return cluster + return cluster, [] with log_context( - _logger, logging.INFO, f"start {len(instances_to_start)} buffer machines" + _logger, + logging.INFO, + f"start {len(instances_to_start)} warm buffer machines '{[i.id for i in instances_to_start]}'", ): - started_instances = await get_ec2_client(app).start_instances( - instances_to_start - ) + try: + started_instances = await get_ec2_client(app).start_instances( + instances_to_start + ) + except EC2InsufficientCapacityError: + # NOTE: this warning is only raised if none of the instances could be started due to InsufficientCapacity + _logger.warning( + "Could not start warm buffer instances: %s due to Insufficient Capacity in the current AWS Availability Zone! " + "The warm buffer assigned tasks will be moved to new instances if possible.", + [i.id for i in instances_to_start], + ) + return _de_assign_tasks_from_warm_buffer_ec2s(cluster, instances_to_start) + + except EC2AccessError: + _logger.exception( + "Could not start warm buffer instances %s! TIP: This needs to be analysed!" + "The warm buffer assigned tasks will be moved to new instances if possible.", + [i.id for i in instances_to_start], + ) + return _de_assign_tasks_from_warm_buffer_ec2s(cluster, instances_to_start) + # NOTE: first start the instance and then set the tags in case the instance cannot start (e.g. InsufficientInstanceCapacity) await get_ec2_client(app).set_instances_tags( started_instances, - tags=get_activated_buffer_ec2_tags(app, auto_scaling_mode), + tags=get_activated_warm_buffer_ec2_tags( + auto_scaling_mode.get_ec2_tags(app) + ), ) started_instance_ids = [i.id for i in started_instances] - return dataclasses.replace( - cluster, - buffer_ec2s=[ - i - for i in cluster.buffer_ec2s - if i.ec2_instance.id not in started_instance_ids - ], - pending_ec2s=cluster.pending_ec2s - + [NonAssociatedInstance(ec2_instance=i) for i in started_instances], + return ( + dataclasses.replace( + cluster, + warm_buffer_ec2s=[ + i + for i in cluster.warm_buffer_ec2s + if i.ec2_instance.id not in started_instance_ids + ], + pending_ec2s=cluster.pending_ec2s + + [NonAssociatedInstance(ec2_instance=i) for i in started_instances], + ), + [], ) @@ -512,8 +575,8 @@ def _try_assign_task_to_ec2_instance( _logger.debug( "%s", f"assigned task with {task_required_resources=}, {task_required_ec2_instance=} to " - f"{instance.ec2_instance.id=}:{instance.ec2_instance.type}, " - f"remaining resources:{instance.available_resources}/{instance.ec2_instance.resources}", + f"{instance.ec2_instance.id=}:{instance.ec2_instance.type=}, " + f"{instance.available_resources=}, {instance.ec2_instance.resources=}", ) return True return False @@ -536,8 +599,8 @@ def _try_assign_task_to_ec2_instance_type( _logger.debug( "%s", f"assigned task with {task_required_resources=}, {task_required_ec2_instance=} to " - f"{instance.instance_type}, " - f"remaining resources:{instance.available_resources}/{instance.instance_type.resources}", + f"{instance.instance_type=}, " + f"{instance.available_resources=}, {instance.instance_type.resources=}", ) return True return False @@ -547,7 +610,7 @@ async def _assign_tasks_to_current_cluster( app: FastAPI, tasks: list, cluster: Cluster, - auto_scaling_mode: BaseAutoscaling, + auto_scaling_mode: AutoscalingProvider, ) -> tuple[list, Cluster]: """ Evaluates whether a task can be executed on any instance within the cluster. If the task's resource requirements are met, the task is *denoted* as assigned to the cluster. @@ -563,10 +626,10 @@ async def _assign_tasks_to_current_cluster( functools.partial(_try_assign_task_to_ec2_instance, instances=instances) for instances in ( cluster.active_nodes, - cluster.drained_nodes + cluster.buffer_drained_nodes, + cluster.drained_nodes + cluster.hot_buffer_drained_nodes, cluster.pending_nodes, cluster.pending_ec2s, - cluster.buffer_ec2s, + cluster.warm_buffer_ec2s, ) ] @@ -605,7 +668,7 @@ async def _find_needed_instances( unassigned_tasks: list, available_ec2_types: list[EC2InstanceType], cluster: Cluster, - auto_scaling_mode: BaseAutoscaling, + auto_scaling_mode: AutoscalingProvider, ) -> dict[EC2InstanceType, int]: # 1. check first the pending task needs needed_new_instance_types_for_tasks: list[AssignedTasksToInstanceType] = [] @@ -634,8 +697,8 @@ async def _find_needed_instances( defined_ec2 = find_selected_instance_type_for_task( task_required_ec2_instance, available_ec2_types, - auto_scaling_mode, task, + auto_scaling_mode.get_task_required_resources(task), ) needed_new_instance_types_for_tasks.append( AssignedTasksToInstanceType( @@ -684,13 +747,13 @@ async def _find_needed_instances( ), ) - # 2. check the buffer needs + # 2. check the hot buffer needs app_settings = get_application_settings(app) assert app_settings.AUTOSCALING_EC2_INSTANCES # nosec if ( num_missing_nodes := ( app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER - - len(cluster.buffer_drained_nodes) + - len(cluster.hot_buffer_drained_nodes) ) ) > 0: # check if some are already pending @@ -699,9 +762,9 @@ async def _find_needed_instances( ] + [i.ec2_instance for i in cluster.pending_nodes if not i.assigned_tasks] if len(remaining_pending_instances) < ( app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER - - len(cluster.buffer_drained_nodes) + - len(cluster.hot_buffer_drained_nodes) ): - default_instance_type = get_machine_buffer_type(available_ec2_types) + default_instance_type = get_hot_buffer_type(available_ec2_types) num_instances_per_type[default_instance_type] += num_missing_nodes return num_instances_per_type @@ -778,7 +841,7 @@ async def _launch_instances( app: FastAPI, needed_instances: dict[EC2InstanceType, int], tasks: list, - auto_scaling_mode: BaseAutoscaling, + auto_scaling_mode: AutoscalingProvider, ) -> list[EC2InstanceData]: ec2_client = get_ec2_client(app) app_settings = get_application_settings(app) @@ -816,7 +879,7 @@ async def _launch_instances( ].ami_id, key_name=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_KEY_NAME, security_group_ids=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_SECURITY_GROUP_IDS, - subnet_id=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_SUBNET_ID, + subnet_ids=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_SUBNET_IDS, iam_instance_profile=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_ATTACHED_IAM_PROFILE, ), min_number_of_instances=1, # NOTE: we want at least 1 if possible @@ -847,8 +910,8 @@ async def _launch_instances( new_pending_instances.append(r) log_message = ( - f"{sum(n for n in capped_needed_machines.values())} new machines launched" - ", it might take up to 3 minutes to start, Please wait..." + f"{sum(capped_needed_machines.values())} new machines launched" + f", it might take up to {timedelta_as_minute_second(app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MAX_START_TIME)} minutes to start, Please wait..." ) await post_tasks_log_message( app, tasks=tasks, message=log_message, level=logging.INFO @@ -951,7 +1014,7 @@ async def _deactivate_empty_nodes(app: FastAPI, cluster: Cluster) -> Cluster: ) -async def _find_terminateable_instances( +def _find_terminateable_instances( app: FastAPI, cluster: Cluster ) -> list[AssociatedInstance]: app_settings: ApplicationSettings = app.state.settings @@ -996,7 +1059,7 @@ async def _try_scale_down_cluster(app: FastAPI, cluster: Cluster) -> Cluster: assert app_settings.AUTOSCALING_EC2_INSTANCES # nosec # instances found to be terminateable will now start the termination process. new_terminating_instances = [] - for instance in await _find_terminateable_instances(app, cluster): + for instance in _find_terminateable_instances(app, cluster): assert instance.node.description is not None # nosec with ( log_context( @@ -1150,9 +1213,15 @@ async def _drain_retired_nodes( async def _scale_down_unused_cluster_instances( app: FastAPI, cluster: Cluster, - auto_scaling_mode: BaseAutoscaling, + auto_scaling_mode: AutoscalingProvider, ) -> Cluster: - await auto_scaling_mode.try_retire_nodes(app) + if any(not instance.has_assigned_tasks() for instance in cluster.active_nodes): + # ask the provider to try to retire nodes actively + with ( + log_catch(_logger, reraise=False), + log_context(_logger, logging.INFO, "actively ask to retire unused nodes"), + ): + await auto_scaling_mode.try_retire_nodes(app) cluster = await _deactivate_empty_nodes(app, cluster) return await _try_scale_down_cluster(app, cluster) @@ -1160,14 +1229,14 @@ async def _scale_down_unused_cluster_instances( async def _scale_up_cluster( app: FastAPI, cluster: Cluster, - auto_scaling_mode: BaseAutoscaling, + auto_scaling_mode: AutoscalingProvider, allowed_instance_types: list[EC2InstanceType], unassigned_tasks: list, ) -> Cluster: app_settings = get_application_settings(app) assert app_settings.AUTOSCALING_EC2_INSTANCES # nosec if not unassigned_tasks and ( - len(cluster.buffer_drained_nodes) + len(cluster.hot_buffer_drained_nodes) >= app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER ): return cluster @@ -1212,7 +1281,7 @@ async def _scale_up_cluster( async def _autoscale_cluster( app: FastAPI, cluster: Cluster, - auto_scaling_mode: BaseAutoscaling, + auto_scaling_mode: AutoscalingProvider, allowed_instance_types: list[EC2InstanceType], ) -> Cluster: # 1. check if we have pending tasks @@ -1231,7 +1300,11 @@ async def _autoscale_cluster( cluster = await _activate_drained_nodes(app, cluster) # 3. start warm buffer instances to cover the remaining tasks - cluster = await _start_warm_buffer_instances(app, cluster, auto_scaling_mode) + cluster, de_assigned_tasks = await _try_start_warm_buffer_instances( + app, cluster, auto_scaling_mode + ) + # 3.1 if some tasks were de-assigned, we need to add them to the still pending tasks + still_pending_tasks.extend(de_assigned_tasks) # 4. scale down unused instances cluster = await _scale_down_unused_cluster_instances( @@ -1245,11 +1318,13 @@ async def _autoscale_cluster( async def _notify_autoscaling_status( - app: FastAPI, cluster: Cluster, auto_scaling_mode: BaseAutoscaling + app: FastAPI, cluster: Cluster, auto_scaling_mode: AutoscalingProvider ) -> None: monitored_instances = list( itertools.chain( - cluster.active_nodes, cluster.drained_nodes, cluster.buffer_drained_nodes + cluster.active_nodes, + cluster.drained_nodes, + cluster.hot_buffer_drained_nodes, ) ) @@ -1274,7 +1349,7 @@ async def _notify_autoscaling_status( async def auto_scale_cluster( - *, app: FastAPI, auto_scaling_mode: BaseAutoscaling + *, app: FastAPI, auto_scaling_mode: AutoscalingProvider ) -> None: """Check that there are no pending tasks requiring additional resources in the cluster (docker swarm) If there are such tasks, this method will allocate new machines in AWS to cope with @@ -1289,7 +1364,7 @@ async def auto_scale_cluster( # cleanup cluster = await _cleanup_disconnected_nodes(app, cluster) cluster = await _terminate_broken_ec2s(app, cluster) - cluster = await _make_pending_buffer_ec2s_join_cluster(app, cluster) + cluster = await _make_pending_warm_buffer_ec2s_join_cluster(app, cluster) cluster = await _try_attach_pending_ec2s( app, cluster, auto_scaling_mode, allowed_instance_types ) diff --git a/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_mode_computational.py b/services/autoscaling/src/simcore_service_autoscaling/modules/cluster_scaling/_provider_computational.py similarity index 70% rename from services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_mode_computational.py rename to services/autoscaling/src/simcore_service_autoscaling/modules/cluster_scaling/_provider_computational.py index 2fb2344f22f5..c9b2d498fd66 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_mode_computational.py +++ b/services/autoscaling/src/simcore_service_autoscaling/modules/cluster_scaling/_provider_computational.py @@ -5,27 +5,26 @@ from aws_library.ec2 import EC2InstanceData, EC2Tags, Resources from fastapi import FastAPI from models_library.clusters import ClusterAuthentication -from models_library.docker import ( +from models_library.docker import DockerLabelKey +from models_library.generated_models.docker_rest_api import Node +from models_library.services_metadata_runtime import ( DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY, - DockerLabelKey, ) -from models_library.generated_models.docker_rest_api import Node from pydantic import AnyUrl, ByteSize from servicelib.utils import logged_gather from types_aiobotocore_ec2.literals import InstanceTypeType -from ..core.errors import ( +from ...core.errors import ( DaskNoWorkersError, DaskSchedulerNotFoundError, DaskWorkerNotFoundError, ) -from ..core.settings import get_application_settings -from ..models import AssociatedInstance, DaskTask -from ..utils import computational_scaling as utils -from ..utils import utils_docker, utils_ec2 -from . import dask -from .auto_scaling_mode_base import BaseAutoscaling -from .docker import get_docker_client +from ...core.settings import get_application_settings +from ...models import AssociatedInstance, DaskTask +from ...utils import utils_docker, utils_ec2 +from .. import dask +from ..docker import get_docker_client +from . import _utils_computational as utils _logger = logging.getLogger(__name__) @@ -42,27 +41,27 @@ def _scheduler_auth(app: FastAPI) -> ClusterAuthentication: return app_settings.AUTOSCALING_DASK.DASK_SCHEDULER_AUTH -class ComputationalAutoscaling(BaseAutoscaling): - @staticmethod - async def get_monitored_nodes(app: FastAPI) -> list[Node]: +class ComputationalAutoscalingProvider: + async def get_monitored_nodes(self, app: FastAPI) -> list[Node]: + assert self # nosec return await utils_docker.get_worker_nodes(get_docker_client(app)) - @staticmethod - def get_ec2_tags(app: FastAPI) -> EC2Tags: + def get_ec2_tags(self, app: FastAPI) -> EC2Tags: + assert self # nosec app_settings = get_application_settings(app) return utils_ec2.get_ec2_tags_computational(app_settings) - @staticmethod def get_new_node_docker_tags( - app: FastAPI, ec2_instance_data: EC2InstanceData + self, app: FastAPI, ec2_instance_data: EC2InstanceData ) -> dict[DockerLabelKey, str]: + assert self # nosec assert app # nosec return { DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY: ec2_instance_data.type } - @staticmethod - async def list_unrunnable_tasks(app: FastAPI) -> list[DaskTask]: + async def list_unrunnable_tasks(self, app: FastAPI) -> list[DaskTask]: + assert self # nosec try: unrunnable_tasks = await dask.list_unrunnable_tasks( _scheduler_url(app), _scheduler_auth(app) @@ -87,19 +86,27 @@ async def list_unrunnable_tasks(app: FastAPI) -> list[DaskTask]: ) return [] - @staticmethod - def get_task_required_resources(task) -> Resources: - return utils.resources_from_dask_task(task) + def get_task_required_resources(self, task) -> Resources: + assert self # nosec + task_required_resources = utils.resources_from_dask_task(task) + # ensure cpu is set at least to 1 as dask-workers use 1 thread per CPU + if task_required_resources.cpus < 1.0: + task_required_resources = task_required_resources.model_copy( + update={"cpus": 1.0} + ) + return task_required_resources - @staticmethod - async def get_task_defined_instance(app: FastAPI, task) -> InstanceTypeType | None: + async def get_task_defined_instance( + self, app: FastAPI, task + ) -> InstanceTypeType | None: + assert self # nosec assert app # nosec return cast(InstanceTypeType | None, utils.get_task_instance_restriction(task)) - @staticmethod async def compute_node_used_resources( - app: FastAPI, instance: AssociatedInstance + self, app: FastAPI, instance: AssociatedInstance ) -> Resources: + assert self # nosec try: resource = await dask.get_worker_used_resources( _scheduler_url(app), _scheduler_auth(app), instance.ec2_instance @@ -127,25 +134,22 @@ async def compute_node_used_resources( _logger.debug("no resource found for %s", f"{instance.ec2_instance.id}") return Resources.create_as_empty() - @staticmethod async def compute_cluster_used_resources( - app: FastAPI, instances: list[AssociatedInstance] + self, app: FastAPI, instances: list[AssociatedInstance] ) -> Resources: + assert self # nosec list_of_used_resources: list[Resources] = await logged_gather( - *( - ComputationalAutoscaling.compute_node_used_resources(app, i) - for i in instances - ) + *(self.compute_node_used_resources(app, i) for i in instances) ) - counter = collections.Counter({k: 0 for k in Resources.model_fields}) + counter = collections.Counter(dict.fromkeys(Resources.model_fields, 0)) for result in list_of_used_resources: counter.update(result.model_dump()) return Resources.model_validate(dict(counter)) - @staticmethod async def compute_cluster_total_resources( - app: FastAPI, instances: list[AssociatedInstance] + self, app: FastAPI, instances: list[AssociatedInstance] ) -> Resources: + assert self # nosec try: return await dask.compute_cluster_total_resources( _scheduler_url(app), _scheduler_auth(app), instances @@ -153,8 +157,10 @@ async def compute_cluster_total_resources( except DaskNoWorkersError: return Resources.create_as_empty() - @staticmethod - async def is_instance_active(app: FastAPI, instance: AssociatedInstance) -> bool: + async def is_instance_active( + self, app: FastAPI, instance: AssociatedInstance + ) -> bool: + assert self # nosec if not utils_docker.is_node_osparc_ready(instance.node): return False @@ -163,14 +169,16 @@ async def is_instance_active(app: FastAPI, instance: AssociatedInstance) -> bool _scheduler_url(app), _scheduler_auth(app), instance.ec2_instance ) - @staticmethod - async def is_instance_retired(app: FastAPI, instance: AssociatedInstance) -> bool: + async def is_instance_retired( + self, app: FastAPI, instance: AssociatedInstance + ) -> bool: + assert self # nosec if not utils_docker.is_node_osparc_ready(instance.node): return False return await dask.is_worker_retired( _scheduler_url(app), _scheduler_auth(app), instance.ec2_instance ) - @staticmethod - async def try_retire_nodes(app: FastAPI) -> None: + async def try_retire_nodes(self, app: FastAPI) -> None: + assert self # nosec await dask.try_retire_nodes(_scheduler_url(app), _scheduler_auth(app)) diff --git a/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_mode_dynamic.py b/services/autoscaling/src/simcore_service_autoscaling/modules/cluster_scaling/_provider_dynamic.py similarity index 64% rename from services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_mode_dynamic.py rename to services/autoscaling/src/simcore_service_autoscaling/modules/cluster_scaling/_provider_dynamic.py index a8dcd7552ac8..e6dbca840e37 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_mode_dynamic.py +++ b/services/autoscaling/src/simcore_service_autoscaling/modules/cluster_scaling/_provider_dynamic.py @@ -4,16 +4,15 @@ from models_library.generated_models.docker_rest_api import Node, Task from types_aiobotocore_ec2.literals import InstanceTypeType -from ..core.settings import get_application_settings -from ..models import AssociatedInstance -from ..utils import utils_docker, utils_ec2 -from .auto_scaling_mode_base import BaseAutoscaling -from .docker import get_docker_client +from ...core.settings import get_application_settings +from ...models import AssociatedInstance +from ...utils import utils_docker, utils_ec2 +from ..docker import get_docker_client -class DynamicAutoscaling(BaseAutoscaling): - @staticmethod - async def get_monitored_nodes(app: FastAPI) -> list[Node]: +class DynamicAutoscalingProvider: + async def get_monitored_nodes(self, app: FastAPI) -> list[Node]: + assert self # nosec app_settings = get_application_settings(app) assert app_settings.AUTOSCALING_NODES_MONITORING # nosec return await utils_docker.get_monitored_nodes( @@ -21,20 +20,20 @@ async def get_monitored_nodes(app: FastAPI) -> list[Node]: node_labels=app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_NODE_LABELS, ) - @staticmethod - def get_ec2_tags(app: FastAPI) -> EC2Tags: + def get_ec2_tags(self, app: FastAPI) -> EC2Tags: + assert self # nosec app_settings = get_application_settings(app) return utils_ec2.get_ec2_tags_dynamic(app_settings) - @staticmethod def get_new_node_docker_tags( - app: FastAPI, ec2_instance_data: EC2InstanceData + self, app: FastAPI, ec2_instance_data: EC2InstanceData ) -> dict[DockerLabelKey, str]: + assert self # nosec app_settings = get_application_settings(app) return utils_docker.get_new_node_docker_tags(app_settings, ec2_instance_data) - @staticmethod - async def list_unrunnable_tasks(app: FastAPI) -> list[Task]: + async def list_unrunnable_tasks(self, app: FastAPI) -> list[Task]: + assert self # nosec app_settings = get_application_settings(app) assert app_settings.AUTOSCALING_NODES_MONITORING # nosec return await utils_docker.pending_service_tasks_with_insufficient_resources( @@ -42,20 +41,22 @@ async def list_unrunnable_tasks(app: FastAPI) -> list[Task]: service_labels=app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_SERVICE_LABELS, ) - @staticmethod - def get_task_required_resources(task) -> Resources: + def get_task_required_resources(self, task) -> Resources: + assert self # nosec return utils_docker.get_max_resources_from_docker_task(task) - @staticmethod - async def get_task_defined_instance(app: FastAPI, task) -> InstanceTypeType | None: + async def get_task_defined_instance( + self, app: FastAPI, task + ) -> InstanceTypeType | None: + assert self # nosec return await utils_docker.get_task_instance_restriction( get_docker_client(app), task ) - @staticmethod async def compute_node_used_resources( - app: FastAPI, instance: AssociatedInstance + self, app: FastAPI, instance: AssociatedInstance ) -> Resources: + assert self # nosec docker_client = get_docker_client(app) app_settings = get_application_settings(app) assert app_settings.AUTOSCALING_NODES_MONITORING # nosec @@ -65,37 +66,41 @@ async def compute_node_used_resources( service_labels=app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_SERVICE_LABELS, ) - @staticmethod async def compute_cluster_used_resources( - app: FastAPI, instances: list[AssociatedInstance] + self, app: FastAPI, instances: list[AssociatedInstance] ) -> Resources: + assert self # nosec docker_client = get_docker_client(app) return await utils_docker.compute_cluster_used_resources( docker_client, [i.node for i in instances] ) - @staticmethod async def compute_cluster_total_resources( - app: FastAPI, instances: list[AssociatedInstance] + self, app: FastAPI, instances: list[AssociatedInstance] ) -> Resources: + assert self # nosec assert app # nosec return await utils_docker.compute_cluster_total_resources( [i.node for i in instances] ) - @staticmethod - async def is_instance_active(app: FastAPI, instance: AssociatedInstance) -> bool: + async def is_instance_active( + self, app: FastAPI, instance: AssociatedInstance + ) -> bool: + assert self # nosec assert app # nosec return utils_docker.is_node_osparc_ready(instance.node) - @staticmethod - async def is_instance_retired(app: FastAPI, instance: AssociatedInstance) -> bool: + async def is_instance_retired( + self, app: FastAPI, instance: AssociatedInstance + ) -> bool: + assert self # nosec assert app # nosec assert instance # nosec # nothing to do here return False - @staticmethod - async def try_retire_nodes(app: FastAPI) -> None: + async def try_retire_nodes(self, app: FastAPI) -> None: + assert self # nosec assert app # nosec # nothing to do here diff --git a/services/autoscaling/src/simcore_service_autoscaling/modules/cluster_scaling/_provider_protocol.py b/services/autoscaling/src/simcore_service_autoscaling/modules/cluster_scaling/_provider_protocol.py new file mode 100644 index 000000000000..355394b9f1d3 --- /dev/null +++ b/services/autoscaling/src/simcore_service_autoscaling/modules/cluster_scaling/_provider_protocol.py @@ -0,0 +1,49 @@ +from typing import Protocol + +from aws_library.ec2 import EC2InstanceData, EC2Tags, Resources +from fastapi import FastAPI +from models_library.docker import DockerLabelKey +from models_library.generated_models.docker_rest_api import Node as DockerNode +from types_aiobotocore_ec2.literals import InstanceTypeType + +from ...models import AssociatedInstance + + +class AutoscalingProvider(Protocol): + async def get_monitored_nodes(self, app: FastAPI) -> list[DockerNode]: ... + + def get_ec2_tags(self, app: FastAPI) -> EC2Tags: ... + + def get_new_node_docker_tags( + self, app: FastAPI, ec2_instance_data: EC2InstanceData + ) -> dict[DockerLabelKey, str]: ... + + async def list_unrunnable_tasks(self, app: FastAPI) -> list: ... + + def get_task_required_resources(self, task) -> Resources: ... + + async def get_task_defined_instance( + self, app: FastAPI, task + ) -> InstanceTypeType | None: ... + + async def compute_node_used_resources( + self, app: FastAPI, instance: AssociatedInstance + ) -> Resources: ... + + async def compute_cluster_used_resources( + self, app: FastAPI, instances: list[AssociatedInstance] + ) -> Resources: ... + + async def compute_cluster_total_resources( + self, app: FastAPI, instances: list[AssociatedInstance] + ) -> Resources: ... + + async def is_instance_active( + self, app: FastAPI, instance: AssociatedInstance + ) -> bool: ... + + async def is_instance_retired( + self, app: FastAPI, instance: AssociatedInstance + ) -> bool: ... + + async def try_retire_nodes(self, app: FastAPI) -> None: ... diff --git a/services/autoscaling/src/simcore_service_autoscaling/utils/computational_scaling.py b/services/autoscaling/src/simcore_service_autoscaling/modules/cluster_scaling/_utils_computational.py similarity index 95% rename from services/autoscaling/src/simcore_service_autoscaling/utils/computational_scaling.py rename to services/autoscaling/src/simcore_service_autoscaling/modules/cluster_scaling/_utils_computational.py index 07c55bf746a4..4fb76ee5e129 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/utils/computational_scaling.py +++ b/services/autoscaling/src/simcore_service_autoscaling/modules/cluster_scaling/_utils_computational.py @@ -6,7 +6,7 @@ get_ec2_instance_type_from_resources, ) -from ..models import DaskTask +from ...models import DaskTask _logger = logging.getLogger(__name__) diff --git a/services/autoscaling/src/simcore_service_autoscaling/modules/buffer_machines_pool_core.py b/services/autoscaling/src/simcore_service_autoscaling/modules/cluster_scaling/_warm_buffer_machines_pool_core.py similarity index 75% rename from services/autoscaling/src/simcore_service_autoscaling/modules/buffer_machines_pool_core.py rename to services/autoscaling/src/simcore_service_autoscaling/modules/cluster_scaling/_warm_buffer_machines_pool_core.py index d9f1c5505685..7dcaa48dd61d 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/modules/buffer_machines_pool_core.py +++ b/services/autoscaling/src/simcore_service_autoscaling/modules/cluster_scaling/_warm_buffer_machines_pool_core.py @@ -20,7 +20,6 @@ import arrow from aws_library.ec2 import ( - AWSTagValue, EC2InstanceConfig, EC2InstanceData, EC2InstanceType, @@ -35,94 +34,132 @@ from servicelib.logging_utils import log_context from types_aiobotocore_ec2.literals import InstanceTypeType -from ..constants import ( +from ...constants import ( BUFFER_MACHINE_PULLING_COMMAND_ID_EC2_TAG_KEY, BUFFER_MACHINE_PULLING_EC2_TAG_KEY, DOCKER_PULL_COMMAND, PREPULL_COMMAND_NAME, ) -from ..core.settings import get_application_settings -from ..models import BufferPool, BufferPoolManager -from ..utils.auto_scaling_core import ec2_buffer_startup_script -from ..utils.buffer_machines_pool_core import ( +from ...core.settings import get_application_settings +from ...models import WarmBufferPool, WarmBufferPoolManager +from ...utils.warm_buffer_machines import ( dump_pre_pulled_images_as_tags, - get_deactivated_buffer_ec2_tags, + ec2_warm_buffer_startup_script, + get_deactivated_warm_buffer_ec2_tags, load_pre_pulled_images_from_tags, ) -from .auto_scaling_mode_base import BaseAutoscaling -from .ec2 import get_ec2_client -from .instrumentation import get_instrumentation, has_instrumentation -from .ssm import get_ssm_client +from ..ec2 import get_ec2_client +from ..instrumentation import get_instrumentation, has_instrumentation +from ..ssm import get_ssm_client +from ._provider_protocol import AutoscalingProvider _logger = logging.getLogger(__name__) -async def _analyze_running_instance_state( - app: FastAPI, *, buffer_pool: BufferPool, instance: EC2InstanceData -): - ssm_client = get_ssm_client(app) +def _record_instance_ready_metrics(app: FastAPI, *, instance: EC2InstanceData) -> None: + """Record metrics for instances ready to pull images.""" + if has_instrumentation(app): + get_instrumentation( + app + ).buffer_machines_pools_metrics.instances_ready_to_pull_seconds.labels( + instance_type=instance.type + ).observe( + (arrow.utcnow().datetime - instance.launch_time).total_seconds() + ) + + +def _handle_completed_cloud_init_instance( + app: FastAPI, *, buffer_pool: WarmBufferPool, instance: EC2InstanceData +) -> None: + """Handle instance that has completed cloud init.""" app_settings = get_application_settings(app) assert app_settings.AUTOSCALING_EC2_INSTANCES # nosec - if BUFFER_MACHINE_PULLING_EC2_TAG_KEY in instance.tags: - buffer_pool.pulling_instances.add(instance) - elif await ssm_client.is_instance_connected_to_ssm_server(instance.id): - try: - if await ssm_client.wait_for_has_instance_completed_cloud_init(instance.id): - if has_instrumentation(app): - get_instrumentation( - app - ).buffer_machines_pools_metrics.instances_ready_to_pull_seconds.labels( - instance_type=instance.type - ).observe( - (arrow.utcnow().datetime - instance.launch_time).total_seconds() - ) - if app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_ALLOWED_TYPES[ - instance.type - ].pre_pull_images: - buffer_pool.waiting_to_pull_instances.add(instance) - else: - buffer_pool.waiting_to_stop_instances.add(instance) - else: - buffer_pool.pending_instances.add(instance) - except ( - SSMCommandExecutionResultError, - SSMCommandExecutionTimeoutError, - ): - _logger.exception( - "Unnexpected error when checking EC2 cloud initialization completion!. " - "The machine will be terminated. TIP: check the initialization phase for errors." - ) - buffer_pool.broken_instances.add(instance) + _record_instance_ready_metrics(app, instance=instance) + + if app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_ALLOWED_TYPES[ + instance.type + ].pre_pull_images: + buffer_pool.waiting_to_pull_instances.add(instance) else: - is_broken = bool( - (arrow.utcnow().datetime - instance.launch_time) - > app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MAX_START_TIME - ) + buffer_pool.waiting_to_stop_instances.add(instance) + - if is_broken: - _logger.error( - "The machine does not connect to the SSM server after %s. It will be terminated. TIP: check the initialization phase for errors.", - app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MAX_START_TIME, +async def _handle_ssm_connected_instance( + app: FastAPI, *, buffer_pool: WarmBufferPool, instance: EC2InstanceData +) -> None: + """Handle instance connected to SSM server.""" + ssm_client = get_ssm_client(app) + + try: + if await ssm_client.wait_for_has_instance_completed_cloud_init(instance.id): + _handle_completed_cloud_init_instance( + app, buffer_pool=buffer_pool, instance=instance ) - buffer_pool.broken_instances.add(instance) else: buffer_pool.pending_instances.add(instance) + except ( + SSMCommandExecutionResultError, + SSMCommandExecutionTimeoutError, + ): + _logger.exception( + "Unnexpected error when checking EC2 cloud initialization completion!. " + "The machine will be terminated. TIP: check the initialization phase for errors." + ) + buffer_pool.broken_instances.add(instance) + + +def _handle_unconnected_instance( + app: FastAPI, *, buffer_pool: WarmBufferPool, instance: EC2InstanceData +) -> None: + """Handle instance not connected to SSM server.""" + app_settings = get_application_settings(app) + assert app_settings.AUTOSCALING_EC2_INSTANCES # nosec + + is_broken = bool( + (arrow.utcnow().datetime - instance.launch_time) + > app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MAX_START_TIME + ) + + if is_broken: + _logger.error( + "The machine does not connect to the SSM server after %s. It will be terminated. TIP: check the initialization phase for errors.", + app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MAX_START_TIME, + ) + buffer_pool.broken_instances.add(instance) + else: + buffer_pool.pending_instances.add(instance) + + +async def _analyze_running_instance_state( + app: FastAPI, *, buffer_pool: WarmBufferPool, instance: EC2InstanceData +) -> None: + """Analyze and categorize running instance based on its current state.""" + ssm_client = get_ssm_client(app) + + if BUFFER_MACHINE_PULLING_EC2_TAG_KEY in instance.tags: + buffer_pool.pulling_instances.add(instance) + elif await ssm_client.is_instance_connected_to_ssm_server(instance.id): + await _handle_ssm_connected_instance( + app, buffer_pool=buffer_pool, instance=instance + ) + else: + _handle_unconnected_instance(app, buffer_pool=buffer_pool, instance=instance) async def _analyse_current_state( - app: FastAPI, *, auto_scaling_mode: BaseAutoscaling -) -> BufferPoolManager: + app: FastAPI, *, auto_scaling_mode: AutoscalingProvider +) -> WarmBufferPoolManager: ec2_client = get_ec2_client(app) app_settings = get_application_settings(app) assert app_settings.AUTOSCALING_EC2_INSTANCES # nosec all_buffer_instances = await ec2_client.get_instances( key_names=[app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_KEY_NAME], - tags=get_deactivated_buffer_ec2_tags(app, auto_scaling_mode), + tags=get_deactivated_warm_buffer_ec2_tags(auto_scaling_mode.get_ec2_tags(app)), state_names=["stopped", "pending", "running", "stopping"], ) - buffers_manager = BufferPoolManager() + buffers_manager = WarmBufferPoolManager() for instance in all_buffer_instances: match instance.state: case "stopped": @@ -150,8 +187,8 @@ async def _analyse_current_state( async def _terminate_unneeded_pools( app: FastAPI, - buffers_manager: BufferPoolManager, -) -> BufferPoolManager: + buffers_manager: WarmBufferPoolManager, +) -> WarmBufferPoolManager: ec2_client = get_ec2_client(app) app_settings = get_application_settings(app) assert app_settings.AUTOSCALING_EC2_INSTANCES # nosec @@ -178,8 +215,8 @@ async def _terminate_unneeded_pools( async def _terminate_instances_with_invalid_pre_pulled_images( - app: FastAPI, buffers_manager: BufferPoolManager -) -> BufferPoolManager: + app: FastAPI, buffers_manager: WarmBufferPoolManager +) -> WarmBufferPoolManager: ec2_client = get_ec2_client(app) app_settings = get_application_settings(app) assert app_settings.AUTOSCALING_EC2_INSTANCES # nosec @@ -212,8 +249,8 @@ async def _terminate_instances_with_invalid_pre_pulled_images( async def _terminate_broken_instances( - app: FastAPI, buffers_manager: BufferPoolManager -) -> BufferPoolManager: + app: FastAPI, buffers_manager: WarmBufferPoolManager +) -> WarmBufferPoolManager: ec2_client = get_ec2_client(app) termineatable_instances = set() for pool in buffers_manager.buffer_pools.values(): @@ -227,10 +264,10 @@ async def _terminate_broken_instances( async def _add_remove_buffer_instances( app: FastAPI, - buffers_manager: BufferPoolManager, + buffers_manager: WarmBufferPoolManager, *, - auto_scaling_mode: BaseAutoscaling, -) -> BufferPoolManager: + auto_scaling_mode: AutoscalingProvider, +) -> WarmBufferPoolManager: ec2_client = get_ec2_client(app) app_settings = get_application_settings(app) assert app_settings.AUTOSCALING_EC2_INSTANCES # nosec @@ -265,17 +302,19 @@ async def _add_remove_buffer_instances( name=ec2_type, resources=Resources.create_as_empty(), # fake resources ), - tags=get_deactivated_buffer_ec2_tags(app, auto_scaling_mode), - startup_script=ec2_buffer_startup_script( + tags=get_deactivated_warm_buffer_ec2_tags( + auto_scaling_mode.get_ec2_tags(app) + ), + startup_script=ec2_warm_buffer_startup_script( ec2_boot_specific, app_settings ), ami_id=ec2_boot_specific.ami_id, key_name=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_KEY_NAME, security_group_ids=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_SECURITY_GROUP_IDS, - subnet_id=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_SUBNET_ID, + subnet_ids=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_SUBNET_IDS, iam_instance_profile=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_ATTACHED_IAM_PROFILE, ), - min_number_of_instances=num_to_start, + min_number_of_instances=1, # NOTE: we want at least 1 number_of_instances=num_to_start, max_total_number_of_instances=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MAX_INSTANCES, ) @@ -291,7 +330,7 @@ async def _add_remove_buffer_instances( async def _handle_pool_image_pulling( - app: FastAPI, instance_type: InstanceTypeType, pool: BufferPool + app: FastAPI, instance_type: InstanceTypeType, pool: WarmBufferPool ) -> tuple[InstancesToStop, InstancesToTerminate]: ec2_client = get_ec2_client(app) ssm_client = get_ssm_client(app) @@ -305,10 +344,8 @@ async def _handle_pool_image_pulling( await ec2_client.set_instances_tags( tuple(pool.waiting_to_pull_instances), tags={ - BUFFER_MACHINE_PULLING_EC2_TAG_KEY: AWSTagValue("true"), - BUFFER_MACHINE_PULLING_COMMAND_ID_EC2_TAG_KEY: AWSTagValue( - ssm_command.command_id - ), + BUFFER_MACHINE_PULLING_EC2_TAG_KEY: "true", + BUFFER_MACHINE_PULLING_COMMAND_ID_EC2_TAG_KEY: ssm_command.command_id, }, ) @@ -361,7 +398,7 @@ async def _handle_pool_image_pulling( async def _handle_image_pre_pulling( - app: FastAPI, buffers_manager: BufferPoolManager + app: FastAPI, buffers_manager: WarmBufferPoolManager ) -> None: ec2_client = get_ec2_client(app) instances_to_stop: set[EC2InstanceData] = set() @@ -397,7 +434,7 @@ async def _handle_image_pre_pulling( async def monitor_buffer_machines( - app: FastAPI, *, auto_scaling_mode: BaseAutoscaling + app: FastAPI, *, auto_scaling_mode: AutoscalingProvider ) -> None: """Buffer machine creation works like so: 1. a EC2 is created with an EBS attached volume wO auto prepulling and wO auto connect to swarm diff --git a/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_task.py b/services/autoscaling/src/simcore_service_autoscaling/modules/cluster_scaling/auto_scaling_task.py similarity index 80% rename from services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_task.py rename to services/autoscaling/src/simcore_service_autoscaling/modules/cluster_scaling/auto_scaling_task.py index 5ebc6a190f89..fd3f43975c4f 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/modules/auto_scaling_task.py +++ b/services/autoscaling/src/simcore_service_autoscaling/modules/cluster_scaling/auto_scaling_task.py @@ -2,17 +2,17 @@ from collections.abc import Awaitable, Callable from typing import Final +from common_library.async_tools import cancel_wait_task from fastapi import FastAPI -from servicelib.async_utils import cancel_wait_task from servicelib.background_task import create_periodic_task from servicelib.redis import exclusive -from ..core.settings import ApplicationSettings -from ..utils.redis import create_lock_key_and_value -from .auto_scaling_core import auto_scale_cluster -from .auto_scaling_mode_computational import ComputationalAutoscaling -from .auto_scaling_mode_dynamic import DynamicAutoscaling -from .redis import get_redis_client +from ...core.settings import ApplicationSettings +from ...utils.redis import create_lock_key_and_value +from ..redis import get_redis_client +from ._auto_scaling_core import auto_scale_cluster +from ._provider_computational import ComputationalAutoscalingProvider +from ._provider_dynamic import DynamicAutoscalingProvider _TASK_NAME: Final[str] = "Autoscaling EC2 instances" @@ -33,9 +33,9 @@ async def _startup() -> None: task_name=_TASK_NAME, app=app, auto_scaling_mode=( - DynamicAutoscaling() + DynamicAutoscalingProvider() if app_settings.AUTOSCALING_NODES_MONITORING is not None - else ComputationalAutoscaling() + else ComputationalAutoscalingProvider() ), ) diff --git a/services/autoscaling/src/simcore_service_autoscaling/modules/buffer_machines_pool_task.py b/services/autoscaling/src/simcore_service_autoscaling/modules/cluster_scaling/warm_buffer_machines_pool_task.py similarity index 86% rename from services/autoscaling/src/simcore_service_autoscaling/modules/buffer_machines_pool_task.py rename to services/autoscaling/src/simcore_service_autoscaling/modules/cluster_scaling/warm_buffer_machines_pool_task.py index 2985e2ffcc4d..26ea0c4e0f80 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/modules/buffer_machines_pool_task.py +++ b/services/autoscaling/src/simcore_service_autoscaling/modules/cluster_scaling/warm_buffer_machines_pool_task.py @@ -2,16 +2,16 @@ from collections.abc import Awaitable, Callable from typing import Final +from common_library.async_tools import cancel_wait_task from fastapi import FastAPI -from servicelib.async_utils import cancel_wait_task from servicelib.background_task import create_periodic_task from servicelib.redis import exclusive -from ..core.settings import ApplicationSettings -from ..utils.redis import create_lock_key_and_value -from .auto_scaling_mode_dynamic import DynamicAutoscaling -from .buffer_machines_pool_core import monitor_buffer_machines -from .redis import get_redis_client +from ...core.settings import ApplicationSettings +from ...utils.redis import create_lock_key_and_value +from ..redis import get_redis_client +from ._provider_dynamic import DynamicAutoscalingProvider +from ._warm_buffer_machines_pool_core import monitor_buffer_machines _TASK_NAME_BUFFER: Final[str] = "Autoscaling Buffer Machines Pool" @@ -35,7 +35,7 @@ async def _startup() -> None: interval=app_settings.AUTOSCALING_POLL_INTERVAL, task_name=_TASK_NAME_BUFFER, app=app, - auto_scaling_mode=(DynamicAutoscaling()), + auto_scaling_mode=(DynamicAutoscalingProvider()), ) return _startup diff --git a/services/autoscaling/src/simcore_service_autoscaling/modules/dask.py b/services/autoscaling/src/simcore_service_autoscaling/modules/dask.py index d57508babf88..966593295e87 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/modules/dask.py +++ b/services/autoscaling/src/simcore_service_autoscaling/modules/dask.py @@ -21,7 +21,7 @@ DaskWorkerNotFoundError, ) from ..models import AssociatedInstance, DaskTask, DaskTaskId -from ..utils.auto_scaling_core import ( +from ..utils.utils_ec2 import ( node_host_name_from_ec2_private_dns, node_ip_from_ec2_private_dns, ) @@ -30,7 +30,7 @@ async def _wrap_client_async_routine( - client_coroutine: Coroutine[Any, Any, Any] | Any | None + client_coroutine: Coroutine[Any, Any, Any] | Any | None, ) -> Any: """Dask async behavior does not go well with Pylance as it returns a union of types. this wrapper makes both mypy and pylance happy""" @@ -96,7 +96,7 @@ def _dask_worker_from_ec2_instance( # dict is of type dask_worker_address: worker_details def _find_by_worker_host( - dask_worker: tuple[DaskWorkerUrl, DaskWorkerDetails] + dask_worker: tuple[DaskWorkerUrl, DaskWorkerDetails], ) -> bool: _, details = dask_worker if match := re.match(DASK_NAME_PATTERN, details["name"]): @@ -173,9 +173,9 @@ def _list_tasks( } async with _scheduler_client(scheduler_url, authentication) as client: - list_of_tasks: dict[ - dask.typing.Key, DaskTaskResources - ] = await _wrap_client_async_routine(client.run_on_scheduler(_list_tasks)) + list_of_tasks: dict[dask.typing.Key, DaskTaskResources] = ( + await _wrap_client_async_routine(client.run_on_scheduler(_list_tasks)) + ) _logger.debug("found unrunnable tasks: %s", list_of_tasks) return [ DaskTask( @@ -207,10 +207,10 @@ def _list_processing_tasks( return worker_to_processing_tasks async with _scheduler_client(scheduler_url, authentication) as client: - worker_to_tasks: dict[ - str, list[tuple[dask.typing.Key, DaskTaskResources]] - ] = await _wrap_client_async_routine( - client.run_on_scheduler(_list_processing_tasks) + worker_to_tasks: dict[str, list[tuple[dask.typing.Key, DaskTaskResources]]] = ( + await _wrap_client_async_routine( + client.run_on_scheduler(_list_processing_tasks) + ) ) _logger.debug("found processing tasks: %s", worker_to_tasks) tasks_per_worker = defaultdict(list) @@ -276,12 +276,12 @@ def _list_processing_tasks_on_worker( _logger.debug("looking for processing tasks for %s", f"{worker_url=}") # now get the used resources - worker_processing_tasks: list[ - tuple[dask.typing.Key, DaskTaskResources] - ] = await _wrap_client_async_routine( - client.run_on_scheduler( - _list_processing_tasks_on_worker, worker_url=worker_url - ), + worker_processing_tasks: list[tuple[dask.typing.Key, DaskTaskResources]] = ( + await _wrap_client_async_routine( + client.run_on_scheduler( + _list_processing_tasks_on_worker, worker_url=worker_url + ), + ) ) total_resources_used: collections.Counter[str] = collections.Counter() diff --git a/services/autoscaling/src/simcore_service_autoscaling/modules/instrumentation/_constants.py b/services/autoscaling/src/simcore_service_autoscaling/modules/instrumentation/_constants.py index 1224ea719078..cc4c2a2126dc 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/modules/instrumentation/_constants.py +++ b/services/autoscaling/src/simcore_service_autoscaling/modules/instrumentation/_constants.py @@ -20,7 +20,7 @@ "Number of EC2-backed docker nodes which are drained", EC2_INSTANCE_LABELS, ), - "buffer_drained_nodes": ( + "hot_buffer_drained_nodes": ( "Number of EC2-backed docker nodes which are drained and in buffer/reserve", EC2_INSTANCE_LABELS, ), @@ -32,7 +32,7 @@ "Number of EC2 instances that failed joining the cluster", EC2_INSTANCE_LABELS, ), - "buffer_ec2s": ( + "warm_buffer_ec2s": ( "Number of buffer EC2 instances prepared, stopped, and ready to be activated", EC2_INSTANCE_LABELS, ), @@ -54,7 +54,7 @@ ), } -BUFFER_POOLS_METRICS_DEFINITIONS: Final[dict[str, tuple[str, tuple[str, ...]]]] = { +WARM_BUFFER_POOLS_METRICS_DEFINITIONS: Final[dict[str, tuple[str, tuple[str, ...]]]] = { "ready_instances": ( "Number of EC2 buffer instances that are ready for use", EC2_INSTANCE_LABELS, diff --git a/services/autoscaling/src/simcore_service_autoscaling/modules/instrumentation/_models.py b/services/autoscaling/src/simcore_service_autoscaling/modules/instrumentation/_models.py index 3831b33b826d..82e1790880c5 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/modules/instrumentation/_models.py +++ b/services/autoscaling/src/simcore_service_autoscaling/modules/instrumentation/_models.py @@ -4,12 +4,12 @@ from prometheus_client import CollectorRegistry, Counter, Histogram from servicelib.instrumentation import MetricsBase -from ...models import BufferPoolManager, Cluster +from ...models import Cluster, WarmBufferPoolManager from ._constants import ( - BUFFER_POOLS_METRICS_DEFINITIONS, CLUSTER_METRICS_DEFINITIONS, EC2_INSTANCE_LABELS, METRICS_NAMESPACE, + WARM_BUFFER_POOLS_METRICS_DEFINITIONS, ) from ._utils import TrackedGauge, create_gauge @@ -19,10 +19,10 @@ class ClusterMetrics(MetricsBase): # pylint: disable=too-many-instance-attribut active_nodes: TrackedGauge = field(init=False) pending_nodes: TrackedGauge = field(init=False) drained_nodes: TrackedGauge = field(init=False) - buffer_drained_nodes: TrackedGauge = field(init=False) + hot_buffer_drained_nodes: TrackedGauge = field(init=False) pending_ec2s: TrackedGauge = field(init=False) broken_ec2s: TrackedGauge = field(init=False) - buffer_ec2s: TrackedGauge = field(init=False) + warm_buffer_ec2s: TrackedGauge = field(init=False) disconnected_nodes: TrackedGauge = field(init=False) terminating_nodes: TrackedGauge = field(init=False) retired_nodes: TrackedGauge = field(init=False) @@ -110,7 +110,7 @@ def instance_terminated(self, instance_type: str) -> None: @dataclass(slots=True, kw_only=True) -class BufferPoolsMetrics(MetricsBase): +class WarmBufferPoolsMetrics(MetricsBase): ready_instances: TrackedGauge = field(init=False) pending_instances: TrackedGauge = field(init=False) waiting_to_pull_instances: TrackedGauge = field(init=False) @@ -124,7 +124,7 @@ class BufferPoolsMetrics(MetricsBase): def __post_init__(self) -> None: buffer_pools_subsystem = f"{self.subsystem}_buffer_machines_pools" - for field_name, definition in BUFFER_POOLS_METRICS_DEFINITIONS.items(): + for field_name, definition in WARM_BUFFER_POOLS_METRICS_DEFINITIONS.items(): setattr( self, field_name, @@ -165,11 +165,11 @@ def __post_init__(self) -> None: ) def update_from_buffer_pool_manager( - self, buffer_pool_manager: BufferPoolManager + self, buffer_pool_manager: WarmBufferPoolManager ) -> None: flat_pool = buffer_pool_manager.flatten_buffer_pool() - for field_name in BUFFER_POOLS_METRICS_DEFINITIONS: + for field_name in WARM_BUFFER_POOLS_METRICS_DEFINITIONS: tracked_gauge = getattr(self, field_name) assert isinstance(tracked_gauge, TrackedGauge) # nosec instances = getattr(flat_pool, field_name) @@ -183,19 +183,15 @@ class AutoscalingInstrumentation(MetricsBase): cluster_metrics: ClusterMetrics = field(init=False) ec2_client_metrics: EC2ClientMetrics = field(init=False) - buffer_machines_pools_metrics: BufferPoolsMetrics = field(init=False) + buffer_machines_pools_metrics: WarmBufferPoolsMetrics = field(init=False) def __post_init__(self) -> None: self.cluster_metrics = ClusterMetrics( # pylint: disable=unexpected-keyword-arg subsystem=self.subsystem, registry=self.registry ) - self.ec2_client_metrics = ( - EC2ClientMetrics( # pylint: disable=unexpected-keyword-arg - subsystem=self.subsystem, registry=self.registry - ) + self.ec2_client_metrics = EC2ClientMetrics( # pylint: disable=unexpected-keyword-arg + subsystem=self.subsystem, registry=self.registry ) - self.buffer_machines_pools_metrics = ( - BufferPoolsMetrics( # pylint: disable=unexpected-keyword-arg - subsystem=self.subsystem, registry=self.registry - ) + self.buffer_machines_pools_metrics = WarmBufferPoolsMetrics( # pylint: disable=unexpected-keyword-arg + subsystem=self.subsystem, registry=self.registry ) diff --git a/services/autoscaling/src/simcore_service_autoscaling/modules/redis.py b/services/autoscaling/src/simcore_service_autoscaling/modules/redis.py index c0cf7a15e07a..4aa9cea509c2 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/modules/redis.py +++ b/services/autoscaling/src/simcore_service_autoscaling/modules/redis.py @@ -18,6 +18,7 @@ async def on_startup() -> None: app.state.redis_client_sdk = RedisClientSDK( redis_locks_dsn, client_name=APP_NAME ) + await app.state.redis_client_sdk.setup() async def on_shutdown() -> None: redis_client_sdk: None | RedisClientSDK = app.state.redis_client_sdk diff --git a/services/autoscaling/src/simcore_service_autoscaling/utils/auto_scaling_core.py b/services/autoscaling/src/simcore_service_autoscaling/utils/cluster_scaling.py similarity index 63% rename from services/autoscaling/src/simcore_service_autoscaling/utils/auto_scaling_core.py rename to services/autoscaling/src/simcore_service_autoscaling/utils/cluster_scaling.py index d7f69d50b54e..d8a43eecfe25 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/utils/auto_scaling_core.py +++ b/services/autoscaling/src/simcore_service_autoscaling/utils/cluster_scaling.py @@ -1,9 +1,13 @@ import functools import logging -import re -from typing import Final +from typing import TypeAlias -from aws_library.ec2 import EC2InstanceBootSpecific, EC2InstanceData, EC2InstanceType +from aws_library.ec2 import ( + EC2InstanceBootSpecific, + EC2InstanceData, + EC2InstanceType, + Resources, +) from models_library.generated_models.docker_rest_api import Node from types_aiobotocore_ec2.literals import InstanceTypeType @@ -14,41 +18,12 @@ ) from ..core.settings import ApplicationSettings from ..models import AssociatedInstance -from ..modules.auto_scaling_mode_base import BaseAutoscaling -from . import utils_docker +from . import utils_docker, utils_ec2 -_EC2_INTERNAL_DNS_RE: Final[re.Pattern] = re.compile(r"^(?Pip-[^.]+).*$") _logger = logging.getLogger(__name__) -def node_host_name_from_ec2_private_dns( - ec2_instance_data: EC2InstanceData, -) -> str: - """returns the node host name 'ip-10-2-3-22' from the ec2 private dns - Raises: - Ec2InvalidDnsNameError: if the dns name does not follow the expected pattern - """ - if match := re.match(_EC2_INTERNAL_DNS_RE, ec2_instance_data.aws_private_dns): - host_name: str = match.group("host_name") - return host_name - raise Ec2InvalidDnsNameError(aws_private_dns_name=ec2_instance_data.aws_private_dns) - - -def node_ip_from_ec2_private_dns( - ec2_instance_data: EC2InstanceData, -) -> str: - """returns the node ipv4 from the ec2 private dns string - Raises: - Ec2InvalidDnsNameError: if the dns name does not follow the expected pattern - """ - return ( - node_host_name_from_ec2_private_dns(ec2_instance_data) - .removeprefix("ip-") - .replace("-", ".") - ) - - -async def associate_ec2_instances_with_nodes( +def associate_ec2_instances_with_nodes( nodes: list[Node], ec2_instances: list[EC2InstanceData] ) -> tuple[list[AssociatedInstance], list[EC2InstanceData]]: """returns the associated and non-associated instances""" @@ -61,7 +36,9 @@ def _find_node_with_name(node: Node) -> bool: for instance_data in ec2_instances: try: - docker_node_name = node_host_name_from_ec2_private_dns(instance_data) + docker_node_name = utils_ec2.node_host_name_from_ec2_private_dns( + instance_data + ) except Ec2InvalidDnsNameError: _logger.exception("Unexpected EC2 private dns name") non_associated_instances.append(instance_data) @@ -108,38 +85,17 @@ async def ec2_startup_script( return " && ".join(startup_commands) -def ec2_buffer_startup_script( - ec2_boot_specific: EC2InstanceBootSpecific, app_settings: ApplicationSettings -) -> str: - startup_commands = ec2_boot_specific.custom_boot_scripts.copy() - if ec2_boot_specific.pre_pull_images: - assert app_settings.AUTOSCALING_REGISTRY # nosec - startup_commands.extend( - ( - utils_docker.get_docker_login_on_start_bash_command( - app_settings.AUTOSCALING_REGISTRY - ), - utils_docker.write_compose_file_command( - ec2_boot_specific.pre_pull_images - ), - ) - ) - return " && ".join(startup_commands) - - def _instance_type_by_type_name( ec2_type: EC2InstanceType, *, type_name: InstanceTypeType | None ) -> bool: - if type_name is None: - return True - return bool(ec2_type.name == type_name) + return type_name is None or ec2_type.name == type_name def find_selected_instance_type_for_task( instance_type_name: InstanceTypeType, available_ec2_types: list[EC2InstanceType], - auto_scaling_mode: BaseAutoscaling, task, + task_required_resources: Resources, ) -> EC2InstanceType: filtered_instances = list( filter( @@ -158,36 +114,33 @@ def find_selected_instance_type_for_task( selected_instance = filtered_instances[0] # check that the assigned resources and the machine resource fit - if ( - auto_scaling_mode.get_task_required_resources(task) - > selected_instance.resources - ): + if task_required_resources > selected_instance.resources: raise TaskRequirementsAboveRequiredEC2InstanceTypeError( task=task, instance_type=selected_instance, - resources=auto_scaling_mode.get_task_required_resources(task), + resources=task_required_resources, ) return selected_instance -def get_machine_buffer_type( +def get_hot_buffer_type( available_ec2_types: list[EC2InstanceType], ) -> EC2InstanceType: assert len(available_ec2_types) > 0 # nosec return available_ec2_types[0] -DrainedNodes = list[AssociatedInstance] -BufferDrainedNodes = list[AssociatedInstance] -TerminatingNodes = list[AssociatedInstance] +DrainedNodes: TypeAlias = list[AssociatedInstance] +HotBufferDrainedNodes: TypeAlias = list[AssociatedInstance] +TerminatingNodes: TypeAlias = list[AssociatedInstance] def sort_drained_nodes( app_settings: ApplicationSettings, all_drained_nodes: list[AssociatedInstance], available_ec2_types: list[EC2InstanceType], -) -> tuple[DrainedNodes, BufferDrainedNodes, TerminatingNodes]: +) -> tuple[DrainedNodes, HotBufferDrainedNodes, TerminatingNodes]: assert app_settings.AUTOSCALING_EC2_INSTANCES # nosec # first sort out the drained nodes that started termination terminating_nodes = [ @@ -199,15 +152,15 @@ def sort_drained_nodes( n for n in all_drained_nodes if n not in terminating_nodes ] # we need to keep in reserve only the drained nodes of the right type - machine_buffer_type = get_machine_buffer_type(available_ec2_types) + hot_buffer_type = get_hot_buffer_type(available_ec2_types) # NOTE: we keep only in buffer the drained nodes with the right EC2 type, AND the right amount - buffer_drained_nodes = [ + hot_buffer_drained_nodes = [ node for node in remaining_drained_nodes - if node.ec2_instance.type == machine_buffer_type.name + if node.ec2_instance.type == hot_buffer_type.name ][: app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER] # all the others are "normal" drained nodes and may be terminated at some point other_drained_nodes = [ - node for node in remaining_drained_nodes if node not in buffer_drained_nodes + node for node in remaining_drained_nodes if node not in hot_buffer_drained_nodes ] - return (other_drained_nodes, buffer_drained_nodes, terminating_nodes) + return (other_drained_nodes, hot_buffer_drained_nodes, terminating_nodes) diff --git a/services/autoscaling/src/simcore_service_autoscaling/utils/rabbitmq.py b/services/autoscaling/src/simcore_service_autoscaling/utils/rabbitmq.py index b01c0853bb23..e00ab3d4eb6e 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/utils/rabbitmq.py +++ b/services/autoscaling/src/simcore_service_autoscaling/utils/rabbitmq.py @@ -4,7 +4,6 @@ from aws_library.ec2 import Resources from dask_task_models_library.container_tasks.utils import parse_dask_job_id from fastapi import FastAPI -from models_library.docker import StandardSimcoreDockerLabels from models_library.generated_models.docker_rest_api import Task as DockerTask from models_library.progress_bar import ProgressReport from models_library.projects import ProjectID @@ -15,6 +14,7 @@ ProgressType, RabbitAutoscalingStatusMessage, ) +from models_library.services_metadata_runtime import SimcoreContainerLabels from models_library.users import UserID from servicelib.logging_utils import log_catch @@ -27,7 +27,7 @@ def _get_task_ids(task: DockerTask | DaskTask) -> tuple[UserID, ProjectID, NodeID]: if isinstance(task, DockerTask): - labels = StandardSimcoreDockerLabels.from_docker_task(task) + labels = SimcoreContainerLabels.from_docker_task(task) return labels.user_id, labels.project_id, labels.node_id _service_key, _service_version, user_id, project_id, node_id = parse_dask_job_id( task.task_id @@ -151,9 +151,9 @@ async def _create_autoscaling_status_message( total_nodes = ( len(cluster.active_nodes) + len(cluster.drained_nodes) - + len(cluster.buffer_drained_nodes) + + len(cluster.hot_buffer_drained_nodes) ) - drained_nodes = len(cluster.drained_nodes) + len(cluster.buffer_drained_nodes) + drained_nodes = len(cluster.drained_nodes) + len(cluster.hot_buffer_drained_nodes) return RabbitAutoscalingStatusMessage.model_construct( origin=origin, diff --git a/services/autoscaling/src/simcore_service_autoscaling/utils/utils_docker.py b/services/autoscaling/src/simcore_service_autoscaling/utils/utils_docker.py index 9c3f187a78f7..90c0c3807519 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/utils/utils_docker.py +++ b/services/autoscaling/src/simcore_service_autoscaling/utils/utils_docker.py @@ -15,7 +15,6 @@ import yaml from aws_library.ec2 import EC2InstanceData, Resources from models_library.docker import ( - DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY, DockerGenericTag, DockerLabelKey, ) @@ -27,6 +26,9 @@ Task, TaskState, ) +from models_library.services_metadata_runtime import ( + DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY, +) from pydantic import ByteSize, TypeAdapter, ValidationError from servicelib.docker_utils import to_datetime from servicelib.logging_utils import log_context @@ -35,6 +37,7 @@ from types_aiobotocore_ec2.literals import InstanceTypeType from ..core.settings import ApplicationSettings +from ..models import AssociatedInstance from ..modules.docker import AutoscalingDocker logger = logging.getLogger(__name__) @@ -278,24 +281,32 @@ def get_max_resources_from_docker_task(task: Task) -> Resources: return Resources( cpus=max( ( - task.spec.resources.reservations - and task.spec.resources.reservations.nano_cp_us + ( + task.spec.resources.reservations + and task.spec.resources.reservations.nano_cp_us + ) or 0 ), ( - task.spec.resources.limits - and task.spec.resources.limits.nano_cp_us + ( + task.spec.resources.limits + and task.spec.resources.limits.nano_cp_us + ) or 0 ), ) / _NANO_CPU, ram=TypeAdapter(ByteSize).validate_python( max( - task.spec.resources.reservations - and task.spec.resources.reservations.memory_bytes + ( + task.spec.resources.reservations + and task.spec.resources.reservations.memory_bytes + ) or 0, - task.spec.resources.limits - and task.spec.resources.limits.memory_bytes + ( + task.spec.resources.limits + and task.spec.resources.limits.memory_bytes + ) or 0, ) ), @@ -382,7 +393,7 @@ async def compute_cluster_used_resources( list_of_used_resources = await logged_gather( *(compute_node_used_resources(docker_client, node) for node in nodes) ) - counter = collections.Counter({k: 0 for k in list(Resources.model_fields)}) + counter = collections.Counter(dict.fromkeys(list(Resources.model_fields), 0)) for result in list_of_used_resources: counter.update(result.model_dump()) @@ -570,14 +581,14 @@ def get_new_node_docker_tags( ) -> dict[DockerLabelKey, str]: assert app_settings.AUTOSCALING_NODES_MONITORING # nosec return ( - { - tag_key: "true" - for tag_key in app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_NODE_LABELS - } - | { - tag_key: "true" - for tag_key in app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_NEW_NODES_LABELS - } + dict.fromkeys( + app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_NODE_LABELS, + "true", + ) + | dict.fromkeys( + app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_NEW_NODES_LABELS, + "true", + ) | {DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY: ec2_instance.type} ) @@ -601,6 +612,10 @@ def is_node_osparc_ready(node: Node) -> bool: ) +def is_instance_drained(instance: AssociatedInstance) -> bool: + return not is_node_osparc_ready(instance.node) + + async def set_node_osparc_ready( app_settings: ApplicationSettings, docker_client: AutoscalingDocker, @@ -702,3 +717,8 @@ async def attach_node( tags=new_tags, available=app_settings.AUTOSCALING_DRAIN_NODES_WITH_LABELS, # NOTE: full drain sometimes impede on performance ) + + +def is_node_ready(node: Node) -> bool: + assert node.status # nosec + return bool(node.status.state is NodeState.ready) diff --git a/services/autoscaling/src/simcore_service_autoscaling/utils/utils_ec2.py b/services/autoscaling/src/simcore_service_autoscaling/utils/utils_ec2.py index b3b76a48717f..4e72b5493b47 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/utils/utils_ec2.py +++ b/services/autoscaling/src/simcore_service_autoscaling/utils/utils_ec2.py @@ -1,18 +1,27 @@ """Free helper functions for AWS API""" import logging +import re from collections import OrderedDict from collections.abc import Callable from textwrap import dedent +from typing import Final from aws_library.ec2 import AWSTagKey, AWSTagValue, EC2InstanceType, EC2Tags, Resources +from aws_library.ec2._models import EC2InstanceData from common_library.json_serialization import json_dumps from .._meta import VERSION -from ..core.errors import ConfigurationError, TaskBestFittingInstanceNotFoundError +from ..core.errors import ( + ConfigurationError, + Ec2InvalidDnsNameError, + TaskBestFittingInstanceNotFoundError, +) from ..core.settings import ApplicationSettings -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) + +_EC2_INTERNAL_DNS_RE: Final[re.Pattern] = re.compile(r"^(?Pip-[^.]+)\..+$") def get_ec2_tags_dynamic(app_settings: ApplicationSettings) -> EC2Tags: @@ -105,3 +114,30 @@ def find_best_fitting_ec2_instance( raise TaskBestFittingInstanceNotFoundError(needed_resources=resources) return instance + + +def node_host_name_from_ec2_private_dns( + ec2_instance_data: EC2InstanceData, +) -> str: + """returns the node host name 'ip-10-2-3-22' from the ec2 private dns + Raises: + Ec2InvalidDnsNameError: if the dns name does not follow the expected pattern + """ + if match := re.match(_EC2_INTERNAL_DNS_RE, ec2_instance_data.aws_private_dns): + host_name: str = match.group("host_name") + return host_name + raise Ec2InvalidDnsNameError(aws_private_dns_name=ec2_instance_data.aws_private_dns) + + +def node_ip_from_ec2_private_dns( + ec2_instance_data: EC2InstanceData, +) -> str: + """returns the node ipv4 from the ec2 private dns string + Raises: + Ec2InvalidDnsNameError: if the dns name does not follow the expected pattern + """ + return ( + node_host_name_from_ec2_private_dns(ec2_instance_data) + .removeprefix("ip-") + .replace("-", ".") + ) diff --git a/services/autoscaling/src/simcore_service_autoscaling/utils/buffer_machines_pool_core.py b/services/autoscaling/src/simcore_service_autoscaling/utils/warm_buffer_machines.py similarity index 64% rename from services/autoscaling/src/simcore_service_autoscaling/utils/buffer_machines_pool_core.py rename to services/autoscaling/src/simcore_service_autoscaling/utils/warm_buffer_machines.py index 66ff79723060..3e331bc2f97a 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/utils/buffer_machines_pool_core.py +++ b/services/autoscaling/src/simcore_service_autoscaling/utils/warm_buffer_machines.py @@ -3,8 +3,8 @@ from typing import Final from aws_library.ec2 import AWS_TAG_VALUE_MAX_LENGTH, AWSTagKey, AWSTagValue, EC2Tags +from aws_library.ec2._models import EC2InstanceBootSpecific from common_library.json_serialization import json_dumps -from fastapi import FastAPI from models_library.docker import DockerGenericTag from pydantic import TypeAdapter @@ -15,30 +15,25 @@ PRE_PULLED_IMAGES_EC2_TAG_KEY, PRE_PULLED_IMAGES_RE, ) -from ..modules.auto_scaling_mode_base import BaseAutoscaling +from ..core.settings import ApplicationSettings +from . import utils_docker _NAME_EC2_TAG_KEY: Final[AWSTagKey] = TypeAdapter(AWSTagKey).validate_python("Name") -def get_activated_buffer_ec2_tags( - app: FastAPI, auto_scaling_mode: BaseAutoscaling -) -> EC2Tags: - return auto_scaling_mode.get_ec2_tags(app) | ACTIVATED_BUFFER_MACHINE_EC2_TAGS +def get_activated_warm_buffer_ec2_tags(base_ec2_tags: EC2Tags) -> EC2Tags: + return base_ec2_tags | ACTIVATED_BUFFER_MACHINE_EC2_TAGS -def get_deactivated_buffer_ec2_tags( - app: FastAPI, auto_scaling_mode: BaseAutoscaling -) -> EC2Tags: - base_ec2_tags = ( - auto_scaling_mode.get_ec2_tags(app) | DEACTIVATED_BUFFER_MACHINE_EC2_TAGS +def get_deactivated_warm_buffer_ec2_tags(base_ec2_tags: EC2Tags) -> EC2Tags: + new_base_ec2_tags = base_ec2_tags | DEACTIVATED_BUFFER_MACHINE_EC2_TAGS + new_base_ec2_tags[_NAME_EC2_TAG_KEY] = TypeAdapter(AWSTagValue).validate_python( + f"{new_base_ec2_tags[_NAME_EC2_TAG_KEY]}-buffer" ) - base_ec2_tags[_NAME_EC2_TAG_KEY] = AWSTagValue( - f"{base_ec2_tags[_NAME_EC2_TAG_KEY]}-buffer" - ) - return base_ec2_tags + return new_base_ec2_tags -def is_buffer_machine(tags: EC2Tags) -> bool: +def is_warm_buffer_machine(tags: EC2Tags) -> bool: return bool(BUFFER_MACHINE_TAG_KEY in tags) @@ -93,3 +88,22 @@ def load_pre_pulled_images_from_tags(tags: EC2Tags) -> list[DockerGenericTag]: if assembled_json: return TypeAdapter(list[DockerGenericTag]).validate_json(assembled_json) return [] + + +def ec2_warm_buffer_startup_script( + ec2_boot_specific: EC2InstanceBootSpecific, app_settings: ApplicationSettings +) -> str: + startup_commands = ec2_boot_specific.custom_boot_scripts.copy() + if ec2_boot_specific.pre_pull_images: + assert app_settings.AUTOSCALING_REGISTRY # nosec + startup_commands.extend( + ( + utils_docker.get_docker_login_on_start_bash_command( + app_settings.AUTOSCALING_REGISTRY + ), + utils_docker.write_compose_file_command( + ec2_boot_specific.pre_pull_images + ), + ) + ) + return " && ".join(startup_commands) diff --git a/services/autoscaling/tests/manual/.env-devel b/services/autoscaling/tests/manual/.env-devel index e654a4df5236..f19312b8eb30 100644 --- a/services/autoscaling/tests/manual/.env-devel +++ b/services/autoscaling/tests/manual/.env-devel @@ -21,7 +21,7 @@ EC2_INSTANCES_ATTACHED_IAM_PROFILE=XXXXXXXXX EC2_INSTANCES_KEY_NAME=XXXXXXXXXX EC2_INSTANCES_NAME_PREFIX=testing-osparc-computational-cluster EC2_INSTANCES_SECURITY_GROUP_IDS=["XXXXXXXXXX"] -EC2_INSTANCES_SUBNET_ID=XXXXXXXXXX +EC2_INSTANCES_SUBNET_IDS=["XXXXXXXXXX"] EC2_INSTANCES_CUSTOM_TAGS={"special": "testing"} EC2_INSTANCES_TIME_BEFORE_DRAINING=00:00:20 EC2_INSTANCES_TIME_BEFORE_TERMINATION=00:01:00 diff --git a/services/autoscaling/tests/manual/docker-compose.yml b/services/autoscaling/tests/manual/docker-compose.yml index a28712fb0af2..5056ca528202 100644 --- a/services/autoscaling/tests/manual/docker-compose.yml +++ b/services/autoscaling/tests/manual/docker-compose.yml @@ -1,6 +1,6 @@ services: rabbit: - image: itisfoundation/rabbitmq:3.13.7-management + image: itisfoundation/rabbitmq:4.1.2-management init: true hostname: "{{.Node.Hostname}}-{{.Service.Name}}-{{.Task.Slot}}" ports: diff --git a/services/autoscaling/tests/unit/conftest.py b/services/autoscaling/tests/unit/conftest.py index a49ec4e46b2e..8af62e808c8f 100644 --- a/services/autoscaling/tests/unit/conftest.py +++ b/services/autoscaling/tests/unit/conftest.py @@ -8,6 +8,7 @@ import json import logging import random +import secrets from collections.abc import AsyncIterator, Awaitable, Callable, Iterator from copy import deepcopy from pathlib import Path @@ -36,7 +37,6 @@ from models_library.docker import ( DockerGenericTag, DockerLabelKey, - StandardSimcoreDockerLabels, ) from models_library.generated_models.docker_rest_api import ( Availability, @@ -52,6 +52,7 @@ Service, TaskSpec, ) +from models_library.services_metadata_runtime import SimcoreContainerLabels from pydantic import ByteSize, NonNegativeInt, PositiveInt, TypeAdapter from pytest_mock import MockType from pytest_mock.plugin import MockerFixture @@ -78,19 +79,19 @@ Cluster, DaskTaskResources, ) -from simcore_service_autoscaling.modules import auto_scaling_core -from simcore_service_autoscaling.modules.auto_scaling_mode_dynamic import ( - DynamicAutoscaling, +from simcore_service_autoscaling.modules.cluster_scaling import _auto_scaling_core +from simcore_service_autoscaling.modules.cluster_scaling._provider_dynamic import ( + DynamicAutoscalingProvider, ) from simcore_service_autoscaling.modules.docker import AutoscalingDocker from simcore_service_autoscaling.modules.ec2 import SimcoreEC2API -from simcore_service_autoscaling.utils.buffer_machines_pool_core import ( - get_deactivated_buffer_ec2_tags, -) from simcore_service_autoscaling.utils.utils_docker import ( _OSPARC_SERVICE_READY_LABEL_KEY, _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY, ) +from simcore_service_autoscaling.utils.warm_buffer_machines import ( + get_deactivated_warm_buffer_ec2_tags, +) from tenacity import after_log, before_sleep_log, retry from tenacity.retry import retry_if_exception_type from tenacity.stop import stop_after_delay @@ -100,6 +101,7 @@ from types_aiobotocore_ec2.type_defs import TagTypeDef pytest_plugins = [ + "pytest_simcore.asyncio_event_loops", "pytest_simcore.aws_server", "pytest_simcore.aws_ec2_service", "pytest_simcore.aws_iam_service", @@ -109,6 +111,7 @@ "pytest_simcore.docker_compose", "pytest_simcore.docker_swarm", "pytest_simcore.environment_configs", + "pytest_simcore.logging", "pytest_simcore.rabbit_service", "pytest_simcore.repository_paths", ] @@ -242,11 +245,6 @@ def app_environment( delenvs_from_dict(monkeypatch, mock_env_devel_environment, raising=False) return setenvs_from_dict(monkeypatch, {**external_envfile_dict}) - assert "json_schema_extra" in EC2InstanceBootSpecific.model_config - assert isinstance(EC2InstanceBootSpecific.model_config["json_schema_extra"], dict) - assert isinstance( - EC2InstanceBootSpecific.model_config["json_schema_extra"]["examples"], list - ) envs = setenvs_from_dict( monkeypatch, { @@ -259,21 +257,19 @@ def app_environment( "SSM_ACCESS_KEY_ID": faker.pystr(), "SSM_SECRET_ACCESS_KEY": faker.pystr(), "EC2_INSTANCES_KEY_NAME": faker.pystr(), - "EC2_INSTANCES_SECURITY_GROUP_IDS": json.dumps( + "EC2_INSTANCES_SECURITY_GROUP_IDS": json_dumps( faker.pylist(allowed_types=(str,)) ), - "EC2_INSTANCES_SUBNET_ID": faker.pystr(), - "EC2_INSTANCES_ALLOWED_TYPES": json.dumps( + "EC2_INSTANCES_SUBNET_IDS": json_dumps(faker.pylist(allowed_types=(str,))), + "EC2_INSTANCES_ALLOWED_TYPES": json_dumps( { ec2_type_name: random.choice( # noqa: S311 - EC2InstanceBootSpecific.model_config["json_schema_extra"][ - "examples" - ] + EC2InstanceBootSpecific.model_json_schema()["examples"] ) for ec2_type_name in aws_allowed_ec2_instance_type_names } ), - "EC2_INSTANCES_CUSTOM_TAGS": json.dumps(ec2_instance_custom_tags), + "EC2_INSTANCES_CUSTOM_TAGS": json_dumps(ec2_instance_custom_tags), "EC2_INSTANCES_ATTACHED_IAM_PROFILE": faker.pystr(), }, ) @@ -290,25 +286,18 @@ def mocked_ec2_instances_envs( aws_allowed_ec2_instance_type_names: list[InstanceTypeType], aws_instance_profile: str, ) -> EnvVarsDict: - assert "json_schema_extra" in EC2InstanceBootSpecific.model_config - assert isinstance(EC2InstanceBootSpecific.model_config["json_schema_extra"], dict) - assert isinstance( - EC2InstanceBootSpecific.model_config["json_schema_extra"]["examples"], list - ) envs = setenvs_from_dict( monkeypatch, { "EC2_INSTANCES_KEY_NAME": "osparc-pytest", - "EC2_INSTANCES_SECURITY_GROUP_IDS": json.dumps([aws_security_group_id]), - "EC2_INSTANCES_SUBNET_ID": aws_subnet_id, - "EC2_INSTANCES_ALLOWED_TYPES": json.dumps( + "EC2_INSTANCES_SECURITY_GROUP_IDS": json_dumps([aws_security_group_id]), + "EC2_INSTANCES_SUBNET_IDS": json_dumps([aws_subnet_id]), + "EC2_INSTANCES_ALLOWED_TYPES": json_dumps( { ec2_type_name: cast( dict, - random.choice( # noqa: S311 - EC2InstanceBootSpecific.model_config["json_schema_extra"][ - "examples" - ] + secrets.choice( + EC2InstanceBootSpecific.model_json_schema()["examples"] ), ) | {"ami_id": aws_ami_id} @@ -324,12 +313,12 @@ def mocked_ec2_instances_envs( @pytest.fixture def disable_autoscaling_background_task(mocker: MockerFixture) -> None: mocker.patch( - "simcore_service_autoscaling.modules.auto_scaling_task.create_periodic_task", + "simcore_service_autoscaling.modules.cluster_scaling.auto_scaling_task.create_periodic_task", autospec=True, ) mocker.patch( - "simcore_service_autoscaling.modules.auto_scaling_task.cancel_wait_task", + "simcore_service_autoscaling.modules.cluster_scaling.auto_scaling_task.cancel_wait_task", autospec=True, ) @@ -337,12 +326,12 @@ def disable_autoscaling_background_task(mocker: MockerFixture) -> None: @pytest.fixture def disable_buffers_pool_background_task(mocker: MockerFixture) -> None: mocker.patch( - "simcore_service_autoscaling.modules.buffer_machines_pool_task.create_periodic_task", + "simcore_service_autoscaling.modules.cluster_scaling.warm_buffer_machines_pool_task.create_periodic_task", autospec=True, ) mocker.patch( - "simcore_service_autoscaling.modules.buffer_machines_pool_task.cancel_wait_task", + "simcore_service_autoscaling.modules.cluster_scaling.warm_buffer_machines_pool_task.cancel_wait_task", autospec=True, ) @@ -369,11 +358,11 @@ def enabled_dynamic_mode( monkeypatch, { "AUTOSCALING_NODES_MONITORING": "{}", - "NODES_MONITORING_NODE_LABELS": json.dumps(["pytest.fake-node-label"]), - "NODES_MONITORING_SERVICE_LABELS": json.dumps( + "NODES_MONITORING_NODE_LABELS": json_dumps(["pytest.fake-node-label"]), + "NODES_MONITORING_SERVICE_LABELS": json_dumps( ["pytest.fake-service-label"] ), - "NODES_MONITORING_NEW_NODES_LABELS": json.dumps( + "NODES_MONITORING_NEW_NODES_LABELS": json_dumps( ["pytest.fake-new-node-label"] ), }, @@ -445,10 +434,10 @@ def service_monitored_labels( app_settings: ApplicationSettings, ) -> dict[DockerLabelKey, str]: assert app_settings.AUTOSCALING_NODES_MONITORING - return { - key: "true" - for key in app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_SERVICE_LABELS - } + return dict.fromkeys( + app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_SERVICE_LABELS, + "true", + ) @pytest.fixture @@ -790,7 +779,7 @@ def aws_allowed_ec2_instance_type_names_env( aws_allowed_ec2_instance_type_names: list[InstanceTypeType], ) -> EnvVarsDict: changed_envs: dict[str, str | bool] = { - "EC2_INSTANCES_ALLOWED_TYPES": json.dumps(aws_allowed_ec2_instance_type_names), + "EC2_INSTANCES_ALLOWED_TYPES": json_dumps(aws_allowed_ec2_instance_type_names), } return app_environment | setenvs_from_dict(monkeypatch, changed_envs) @@ -810,8 +799,8 @@ def host_memory_total() -> ByteSize: @pytest.fixture def osparc_docker_label_keys( faker: Faker, -) -> StandardSimcoreDockerLabels: - return StandardSimcoreDockerLabels.model_validate( +) -> SimcoreContainerLabels: + return SimcoreContainerLabels.model_validate( { "user_id": faker.pyint(), "project_id": faker.uuid4(), @@ -848,10 +837,10 @@ def _creator(**cluter_overrides) -> Cluster: active_nodes=[], pending_nodes=[], drained_nodes=[], - buffer_drained_nodes=[], + hot_buffer_drained_nodes=[], pending_ec2s=[], broken_ec2s=[], - buffer_ec2s=[], + warm_buffer_ec2s=[], disconnected_nodes=[], terminating_nodes=[], retired_nodes=[], @@ -902,7 +891,7 @@ async def _fake_set_node_availability( return returned_node return mocker.patch( - "simcore_service_autoscaling.modules.auto_scaling_core.utils_docker.set_node_availability", + "simcore_service_autoscaling.modules.cluster_scaling._auto_scaling_core.utils_docker.set_node_availability", autospec=True, side_effect=_fake_set_node_availability, ) @@ -926,7 +915,7 @@ async def fake_tag_node( return updated_node return mocker.patch( - "simcore_service_autoscaling.modules.auto_scaling_core.utils_docker.tag_node", + "simcore_service_autoscaling.modules.cluster_scaling._auto_scaling_core.utils_docker.tag_node", autospec=True, side_effect=fake_tag_node, ) @@ -1043,7 +1032,7 @@ def hot_buffer_instance_type(app_settings: ApplicationSettings) -> InstanceTypeT @pytest.fixture def mock_find_node_with_name_returns_none(mocker: MockerFixture) -> Iterator[mock.Mock]: return mocker.patch( - "simcore_service_autoscaling.modules.auto_scaling_core.utils_docker.find_node_with_name", + "simcore_service_autoscaling.modules.cluster_scaling._auto_scaling_core.utils_docker.find_node_with_name", autospec=True, return_value=None, ) @@ -1070,18 +1059,18 @@ def with_short_ec2_instances_max_start_time( @pytest.fixture async def spied_cluster_analysis(mocker: MockerFixture) -> MockType: - return mocker.spy(auto_scaling_core, "_analyze_current_cluster") + return mocker.spy(_auto_scaling_core, "_analyze_current_cluster") @pytest.fixture -async def mocked_associate_ec2_instances_with_nodes(mocker: MockerFixture) -> mock.Mock: - async def _( +def mocked_associate_ec2_instances_with_nodes(mocker: MockerFixture) -> mock.Mock: + def _( nodes: list[DockerNode], ec2_instances: list[EC2InstanceData] ) -> tuple[list[AssociatedInstance], list[EC2InstanceData]]: return [], ec2_instances return mocker.patch( - "simcore_service_autoscaling.modules.auto_scaling_core.associate_ec2_instances_with_nodes", + "simcore_service_autoscaling.modules.cluster_scaling._auto_scaling_core.associate_ec2_instances_with_nodes", autospec=True, side_effect=_, ) @@ -1184,8 +1173,8 @@ async def _do( resource_tags: list[TagTypeDef] = [ {"Key": tag_key, "Value": tag_value} - for tag_key, tag_value in get_deactivated_buffer_ec2_tags( - initialized_app, DynamicAutoscaling() + for tag_key, tag_value in get_deactivated_warm_buffer_ec2_tags( + DynamicAutoscalingProvider().get_ec2_tags(initialized_app) ).items() ] if pre_pull_images is not None and instance_state_name == "stopped": @@ -1205,7 +1194,9 @@ async def _do( InstanceType=instance_type, KeyName=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_KEY_NAME, SecurityGroupIds=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_SECURITY_GROUP_IDS, - SubnetId=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_SUBNET_ID, + SubnetId=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_SUBNET_IDS[ + 0 + ], IamInstanceProfile={ "Arn": app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_ATTACHED_IAM_PROFILE }, diff --git a/services/autoscaling/tests/unit/test_core_settings.py b/services/autoscaling/tests/unit/test_core_settings.py index bc63be64cff2..9d0abdae9e33 100644 --- a/services/autoscaling/tests/unit/test_core_settings.py +++ b/services/autoscaling/tests/unit/test_core_settings.py @@ -2,6 +2,13 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument # pylint: disable=unused-variable +""" +We can validate actual .env files (also refered as `repo.config` files) by passing them via the CLI + +$ ln -s /path/to/osparc-config/deployments/mydeploy.com/repo.config .secrets +$ pytest --external-envfile=.secrets --pdb tests/unit/test_core_settings.py + +""" import datetime import json @@ -10,6 +17,7 @@ from typing import Final import pytest +from common_library.json_serialization import json_dumps from faker import Faker from pydantic import ValidationError from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict @@ -66,6 +74,16 @@ def test_settings(app_environment: EnvVarsDict): assert settings.AUTOSCALING_REDIS +def test_settings_multiple_subnets( + app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch, faker: Faker +): + subnets = [faker.pystr() for _ in range(3)] + monkeypatch.setenv("EC2_INSTANCES_SUBNET_IDS", json_dumps(subnets)) + settings = ApplicationSettings.create_from_envs() + assert settings.AUTOSCALING_EC2_INSTANCES + assert subnets == settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_SUBNET_IDS + + def test_settings_dynamic_mode(enabled_dynamic_mode: EnvVarsDict): settings = ApplicationSettings.create_from_envs() assert settings.AUTOSCALING_EC2_ACCESS @@ -166,7 +184,6 @@ def test_EC2_INSTANCES_ALLOWED_TYPES_passing_invalid_image_tags( # noqa: N802 ) with caplog.at_level(logging.WARNING): - settings = ApplicationSettings.create_from_envs() assert settings.AUTOSCALING_EC2_INSTANCES is None @@ -259,7 +276,6 @@ def test_EC2_INSTANCES_ALLOWED_TYPES_empty_not_allowed_with_main_field_env_var( # NOTE: input captured via EnvSettingsWithAutoDefaultSource # default env factory -> None with caplog.at_level(logging.WARNING): - settings = ApplicationSettings.create_from_envs() assert settings.AUTOSCALING_EC2_INSTANCES is None @@ -287,7 +303,6 @@ def test_EC2_INSTANCES_ALLOWED_TYPES_empty_not_allowed_without_main_field_env_va # removing any value for AUTOSCALING_EC2_INSTANCES caplog.clear() with caplog.at_level(logging.WARNING): - settings = ApplicationSettings.create_from_envs() assert settings.AUTOSCALING_EC2_INSTANCES is None @@ -324,7 +339,6 @@ def test_EC2_INSTANCES_ALLOWED_TYPES_invalid_instance_names( # noqa: N802 ) caplog.clear() with caplog.at_level(logging.WARNING): - settings = ApplicationSettings.create_from_envs() assert settings.AUTOSCALING_EC2_INSTANCES is None diff --git a/services/autoscaling/tests/unit/test_main.py b/services/autoscaling/tests/unit/test_main.py index 525748023ec2..b1e0b41985a1 100644 --- a/services/autoscaling/tests/unit/test_main.py +++ b/services/autoscaling/tests/unit/test_main.py @@ -7,6 +7,6 @@ def test_main_app(app_environment: EnvVarsDict): - from simcore_service_autoscaling.main import the_app, the_settings + from simcore_service_autoscaling.main import app_factory - assert the_app.state.settings == the_settings + app_factory() diff --git a/services/autoscaling/tests/unit/test_models.py b/services/autoscaling/tests/unit/test_models.py index f2271889ddb2..9360f8072296 100644 --- a/services/autoscaling/tests/unit/test_models.py +++ b/services/autoscaling/tests/unit/test_models.py @@ -8,8 +8,9 @@ import aiodocker import pytest -from models_library.docker import DockerLabelKey, StandardSimcoreDockerLabels +from models_library.docker import DockerLabelKey from models_library.generated_models.docker_rest_api import Service, Task +from models_library.services_metadata_runtime import SimcoreContainerLabels from pydantic import TypeAdapter, ValidationError @@ -28,7 +29,7 @@ async def test_get_simcore_service_docker_labels_from_task_with_missing_labels_r assert service_tasks assert len(service_tasks) == 1 with pytest.raises(ValidationError): - StandardSimcoreDockerLabels.from_docker_task(service_tasks[0]) + SimcoreContainerLabels.from_docker_task(service_tasks[0]) async def test_get_simcore_service_docker_labels( @@ -37,7 +38,7 @@ async def test_get_simcore_service_docker_labels( [dict[str, Any], dict[DockerLabelKey, str], str], Awaitable[Service] ], task_template: dict[str, Any], - osparc_docker_label_keys: StandardSimcoreDockerLabels, + osparc_docker_label_keys: SimcoreContainerLabels, ): service_with_labels = await create_service( task_template, @@ -52,7 +53,7 @@ async def test_get_simcore_service_docker_labels( ) assert service_tasks assert len(service_tasks) == 1 - task_ownership = StandardSimcoreDockerLabels.from_docker_task(service_tasks[0]) + task_ownership = SimcoreContainerLabels.from_docker_task(service_tasks[0]) assert task_ownership assert task_ownership.user_id == osparc_docker_label_keys.user_id assert task_ownership.project_id == osparc_docker_label_keys.project_id diff --git a/services/autoscaling/tests/unit/test_modules_auto_scaling_task.py b/services/autoscaling/tests/unit/test_modules_cluster_scaling_auto_scaling_task.py similarity index 96% rename from services/autoscaling/tests/unit/test_modules_auto_scaling_task.py rename to services/autoscaling/tests/unit/test_modules_cluster_scaling_auto_scaling_task.py index 4a3d3e85baef..8778996d9e64 100644 --- a/services/autoscaling/tests/unit/test_modules_auto_scaling_task.py +++ b/services/autoscaling/tests/unit/test_modules_cluster_scaling_auto_scaling_task.py @@ -40,7 +40,7 @@ def app_environment( @pytest.fixture def mock_background_task(mocker: MockerFixture) -> mock.Mock: return mocker.patch( - "simcore_service_autoscaling.modules.auto_scaling_task.auto_scale_cluster", + "simcore_service_autoscaling.modules.cluster_scaling.auto_scaling_task.auto_scale_cluster", autospec=True, ) diff --git a/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py b/services/autoscaling/tests/unit/test_modules_cluster_scaling_computational.py similarity index 96% rename from services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py rename to services/autoscaling/tests/unit/test_modules_cluster_scaling_computational.py index 8a9f82ec8471..f83eaac9ea8b 100644 --- a/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py +++ b/services/autoscaling/tests/unit/test_modules_cluster_scaling_computational.py @@ -26,11 +26,18 @@ ) from faker import Faker from fastapi import FastAPI -from models_library.docker import DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY -from models_library.generated_models.docker_rest_api import Availability +from models_library.generated_models.docker_rest_api import ( + Availability, +) from models_library.generated_models.docker_rest_api import Node as DockerNode -from models_library.generated_models.docker_rest_api import NodeState, NodeStatus +from models_library.generated_models.docker_rest_api import ( + NodeState, + NodeStatus, +) from models_library.rabbitmq_messages import RabbitAutoscalingStatusMessage +from models_library.services_metadata_runtime import ( + DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY, +) from pydantic import ByteSize, TypeAdapter from pytest_mock import MockerFixture, MockType from pytest_simcore.helpers.autoscaling import ( @@ -41,9 +48,11 @@ from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from simcore_service_autoscaling.core.settings import ApplicationSettings from simcore_service_autoscaling.models import EC2InstanceData -from simcore_service_autoscaling.modules.auto_scaling_core import auto_scale_cluster -from simcore_service_autoscaling.modules.auto_scaling_mode_computational import ( - ComputationalAutoscaling, +from simcore_service_autoscaling.modules.cluster_scaling._auto_scaling_core import ( + auto_scale_cluster, +) +from simcore_service_autoscaling.modules.cluster_scaling._provider_computational import ( + ComputationalAutoscalingProvider, ) from simcore_service_autoscaling.modules.dask import DaskTaskResources from simcore_service_autoscaling.modules.docker import get_docker_client @@ -128,7 +137,7 @@ def mock_docker_find_node_with_name_returns_fake_node( mocker: MockerFixture, fake_node: DockerNode ) -> Iterator[mock.Mock]: return mocker.patch( - "simcore_service_autoscaling.modules.auto_scaling_core.utils_docker.find_node_with_name", + "simcore_service_autoscaling.modules.cluster_scaling._auto_scaling_core.utils_docker.find_node_with_name", autospec=True, return_value=fake_node, ) @@ -137,7 +146,7 @@ def mock_docker_find_node_with_name_returns_fake_node( @pytest.fixture def mock_docker_compute_node_used_resources(mocker: MockerFixture) -> mock.Mock: return mocker.patch( - "simcore_service_autoscaling.modules.auto_scaling_core.utils_docker.compute_node_used_resources", + "simcore_service_autoscaling.modules.cluster_scaling._auto_scaling_core.utils_docker.compute_node_used_resources", autospec=True, return_value=Resources.create_as_empty(), ) @@ -226,6 +235,13 @@ def mock_dask_is_worker_connected(mocker: MockerFixture) -> mock.Mock: ) +@pytest.fixture +def spy_dask_try_retire_nodes(mocker: MockerFixture) -> mock.Mock: + from simcore_service_autoscaling.modules import dask + + return mocker.spy(dask, "try_retire_nodes") + + async def _create_task_with_resources( ec2_client: EC2Client, dask_task_imposed_ec2_type: InstanceTypeType | None, @@ -326,7 +342,7 @@ async def test_cluster_scaling_with_no_tasks_does_nothing( dask_spec_local_cluster: distributed.SpecCluster, ): await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=ComputationalAutoscaling() + app=initialized_app, auto_scaling_mode=ComputationalAutoscalingProvider() ) mock_launch_instances.assert_not_called() mock_terminate_instances.assert_not_called() @@ -364,7 +380,7 @@ async def test_cluster_scaling_with_disabled_ssm_does_not_block_autoscaling( dask_spec_local_cluster: distributed.SpecCluster, ): await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=ComputationalAutoscaling() + app=initialized_app, auto_scaling_mode=ComputationalAutoscalingProvider() ) mock_launch_instances.assert_not_called() mock_terminate_instances.assert_not_called() @@ -405,7 +421,7 @@ async def test_cluster_scaling_with_task_with_too_much_resources_starts_nothing( assert dask_future await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=ComputationalAutoscaling() + app=initialized_app, auto_scaling_mode=ComputationalAutoscalingProvider() ) mock_launch_instances.assert_not_called() mock_terminate_instances.assert_not_called() @@ -482,6 +498,7 @@ async def test_cluster_scaling_up_and_down( # noqa: PLR0915 mock_dask_get_worker_has_results_in_memory: mock.Mock, mock_dask_get_worker_used_resources: mock.Mock, mock_dask_is_worker_connected: mock.Mock, + spy_dask_try_retire_nodes: mock.Mock, mocker: MockerFixture, dask_spec_local_cluster: distributed.SpecCluster, with_drain_nodes_labelled: bool, @@ -497,7 +514,7 @@ async def test_cluster_scaling_up_and_down( # noqa: PLR0915 assert dask_futures # this should trigger a scaling up as we have no nodes await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=ComputationalAutoscaling() + app=initialized_app, auto_scaling_mode=ComputationalAutoscalingProvider() ) # check the instance was started and we have exactly 1 @@ -518,6 +535,7 @@ async def test_cluster_scaling_up_and_down( # noqa: PLR0915 mock_dask_get_worker_has_results_in_memory.assert_not_called() mock_dask_get_worker_used_resources.assert_not_called() mock_dask_is_worker_connected.assert_not_called() + spy_dask_try_retire_nodes.assert_not_called() # check rabbit messages were sent _assert_rabbit_autoscaling_message_sent( mock_rabbitmq_post_message, @@ -531,13 +549,14 @@ async def test_cluster_scaling_up_and_down( # noqa: PLR0915 # 2. running this again should not scale again, but tag the node and make it available await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=ComputationalAutoscaling() + app=initialized_app, auto_scaling_mode=ComputationalAutoscalingProvider() ) mock_dask_get_worker_has_results_in_memory.assert_called_once() mock_dask_get_worker_has_results_in_memory.reset_mock() mock_dask_get_worker_used_resources.assert_called_once() mock_dask_get_worker_used_resources.reset_mock() mock_dask_is_worker_connected.assert_not_called() + spy_dask_try_retire_nodes.assert_not_called() instances = await assert_autoscaled_computational_ec2_instances( ec2_client, expected_num_reservations=1, @@ -629,7 +648,7 @@ async def test_cluster_scaling_up_and_down( # noqa: PLR0915 assert fake_attached_node.description fake_attached_node.description.hostname = internal_dns_name - auto_scaling_mode = ComputationalAutoscaling() + auto_scaling_mode = ComputationalAutoscalingProvider() mocker.patch.object( auto_scaling_mode, "get_monitored_nodes", @@ -658,6 +677,7 @@ async def test_cluster_scaling_up_and_down( # noqa: PLR0915 assert mock_docker_tag_node.call_count == num_useless_calls mock_docker_tag_node.reset_mock() mock_docker_set_node_availability.assert_not_called() + spy_dask_try_retire_nodes.assert_not_called() # check the number of instances did not change and is still running await assert_autoscaled_computational_ec2_instances( ec2_client, @@ -688,6 +708,8 @@ async def test_cluster_scaling_up_and_down( # noqa: PLR0915 mock_dask_get_worker_used_resources.reset_mock() # the node shall be waiting before draining mock_docker_set_node_availability.assert_not_called() + spy_dask_try_retire_nodes.assert_called_once() + spy_dask_try_retire_nodes.reset_mock() mock_docker_tag_node.assert_called_once_with( get_docker_client(initialized_app), fake_attached_node, @@ -722,6 +744,8 @@ async def test_cluster_scaling_up_and_down( # noqa: PLR0915 mock_dask_get_worker_used_resources.reset_mock() # the node shall be set to drain, but not yet terminated mock_docker_set_node_availability.assert_not_called() + spy_dask_try_retire_nodes.assert_called_once() + spy_dask_try_retire_nodes.reset_mock() mock_docker_tag_node.assert_called_once_with( get_docker_client(initialized_app), fake_attached_node, @@ -766,7 +790,7 @@ async def test_cluster_scaling_up_and_down( # noqa: PLR0915 < app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_TIME_BEFORE_TERMINATION ) mocked_docker_remove_node = mocker.patch( - "simcore_service_autoscaling.modules.auto_scaling_core.utils_docker.remove_nodes", + "simcore_service_autoscaling.modules.cluster_scaling._auto_scaling_core.utils_docker.remove_nodes", return_value=None, autospec=True, ) @@ -818,6 +842,7 @@ async def test_cluster_scaling_up_and_down( # noqa: PLR0915 ) .datetime.isoformat() ) + spy_dask_try_retire_nodes.assert_not_called() await auto_scale_cluster(app=initialized_app, auto_scaling_mode=auto_scaling_mode) mocked_docker_remove_node.assert_called_once_with( mock.ANY, nodes=[fake_attached_node], force=True @@ -830,7 +855,7 @@ async def test_cluster_scaling_up_and_down( # noqa: PLR0915 expected_instance_state="terminated", expected_additional_tag_keys=list(ec2_instance_custom_tags), ) - + spy_dask_try_retire_nodes.assert_not_called() # this call should never be used in computational mode mock_docker_compute_node_used_resources.assert_not_called() @@ -873,7 +898,7 @@ async def test_cluster_does_not_scale_up_if_defined_instance_is_not_allowed( # this should trigger a scaling up as we have no nodes await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=ComputationalAutoscaling() + app=initialized_app, auto_scaling_mode=ComputationalAutoscalingProvider() ) # nothing runs @@ -924,7 +949,7 @@ async def test_cluster_does_not_scale_up_if_defined_instance_is_not_fitting_reso # this should trigger a scaling up as we have no nodes await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=ComputationalAutoscaling() + app=initialized_app, auto_scaling_mode=ComputationalAutoscalingProvider() ) # nothing runs @@ -991,7 +1016,7 @@ async def test_cluster_scaling_up_starts_multiple_instances( # run the code await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=ComputationalAutoscaling() + app=initialized_app, auto_scaling_mode=ComputationalAutoscalingProvider() ) # check the instances were started @@ -1083,7 +1108,7 @@ async def test_cluster_scaling_up_more_than_allowed_max_starts_max_instances_and # this should trigger a scaling up as we have no nodes await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=ComputationalAutoscaling() + app=initialized_app, auto_scaling_mode=ComputationalAutoscalingProvider() ) await assert_autoscaled_computational_ec2_instances( ec2_client, @@ -1115,7 +1140,7 @@ async def test_cluster_scaling_up_more_than_allowed_max_starts_max_instances_and num_useless_calls = 10 for _ in range(num_useless_calls): await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=ComputationalAutoscaling() + app=initialized_app, auto_scaling_mode=ComputationalAutoscalingProvider() ) await assert_autoscaled_computational_ec2_instances( ec2_client, @@ -1184,7 +1209,7 @@ async def test_cluster_scaling_up_more_than_allowed_with_multiple_types_max_star # this should trigger a scaling up as we have no nodes await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=ComputationalAutoscaling() + app=initialized_app, auto_scaling_mode=ComputationalAutoscalingProvider() ) # one of each type is created with some that will have 2 instances @@ -1228,7 +1253,7 @@ async def test_cluster_scaling_up_more_than_allowed_with_multiple_types_max_star num_useless_calls = 10 for _ in range(num_useless_calls): await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=ComputationalAutoscaling() + app=initialized_app, auto_scaling_mode=ComputationalAutoscalingProvider() ) all_instances = await ec2_client.describe_instances() assert len(all_instances["Reservations"]) == len( @@ -1294,7 +1319,7 @@ async def test_long_pending_ec2_is_detected_as_broken_terminated_and_restarted( # this should trigger a scaling up as we have no nodes await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=ComputationalAutoscaling() + app=initialized_app, auto_scaling_mode=ComputationalAutoscalingProvider() ) # check the instance was started and we have exactly 1 @@ -1338,7 +1363,7 @@ async def test_long_pending_ec2_is_detected_as_broken_terminated_and_restarted( # 2. running again several times the autoscaler, the node does not join for i in range(7): await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=ComputationalAutoscaling() + app=initialized_app, auto_scaling_mode=ComputationalAutoscalingProvider() ) # there should be no scaling up, since there is already a pending instance instances = await assert_autoscaled_computational_ec2_instances( @@ -1382,7 +1407,7 @@ async def test_long_pending_ec2_is_detected_as_broken_terminated_and_restarted( ) # scaling now will terminate the broken ec2 that did not connect, and directly create a replacement await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=ComputationalAutoscaling() + app=initialized_app, auto_scaling_mode=ComputationalAutoscalingProvider() ) # we have therefore 2 reservations, first instance is terminated and a second one started all_instances = await ec2_client.describe_instances() @@ -1485,7 +1510,7 @@ async def test_cluster_adapts_machines_on_the_fly( # it will only scale once and do nothing else await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=ComputationalAutoscaling() + app=initialized_app, auto_scaling_mode=ComputationalAutoscalingProvider() ) await assert_autoscaled_computational_ec2_instances( ec2_client, @@ -1512,7 +1537,7 @@ async def test_cluster_adapts_machines_on_the_fly( # # 2. now the machines are associated await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=ComputationalAutoscaling() + app=initialized_app, auto_scaling_mode=ComputationalAutoscalingProvider() ) analyzed_cluster = assert_cluster_state( spied_cluster_analysis, @@ -1535,7 +1560,7 @@ async def test_cluster_adapts_machines_on_the_fly( # scaling will do nothing since we have hit the maximum number of machines for _ in range(3): await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=ComputationalAutoscaling() + app=initialized_app, auto_scaling_mode=ComputationalAutoscalingProvider() ) await assert_autoscaled_computational_ec2_instances( ec2_client, @@ -1565,11 +1590,11 @@ async def test_cluster_adapts_machines_on_the_fly( # first call to auto_scale_cluster will mark 1 node as empty with mock.patch( - "simcore_service_autoscaling.modules.auto_scaling_core.utils_docker.set_node_found_empty", + "simcore_service_autoscaling.modules.cluster_scaling._auto_scaling_core.utils_docker.set_node_found_empty", autospec=True, ) as mock_docker_set_node_found_empty: await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=ComputationalAutoscaling() + app=initialized_app, auto_scaling_mode=ComputationalAutoscalingProvider() ) analyzed_cluster = assert_cluster_state( spied_cluster_analysis, @@ -1587,14 +1612,14 @@ async def test_cluster_adapts_machines_on_the_fly( # now we mock the get_node_found_empty so the next call will actually drain the machine with mock.patch( - "simcore_service_autoscaling.modules.auto_scaling_core.utils_docker.get_node_empty_since", + "simcore_service_autoscaling.modules.cluster_scaling._auto_scaling_core.utils_docker.get_node_empty_since", autospec=True, return_value=arrow.utcnow().datetime - 1.5 * app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_TIME_BEFORE_DRAINING, ) as mocked_get_node_empty_since: await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=ComputationalAutoscaling() + app=initialized_app, auto_scaling_mode=ComputationalAutoscalingProvider() ) mocked_get_node_empty_since.assert_called_once() analyzed_cluster = assert_cluster_state( @@ -1610,7 +1635,7 @@ async def test_cluster_adapts_machines_on_the_fly( create_fake_node, drained_machine_instance_id, None ) await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=ComputationalAutoscaling() + app=initialized_app, auto_scaling_mode=ComputationalAutoscalingProvider() ) analyzed_cluster = assert_cluster_state( spied_cluster_analysis, @@ -1622,7 +1647,7 @@ async def test_cluster_adapts_machines_on_the_fly( # this will initiate termination now with mock.patch( - "simcore_service_autoscaling.modules.auto_scaling_core.utils_docker.get_node_last_readyness_update", + "simcore_service_autoscaling.modules.cluster_scaling._auto_scaling_core.utils_docker.get_node_last_readyness_update", autospec=True, return_value=arrow.utcnow().datetime - 1.5 @@ -1630,7 +1655,7 @@ async def test_cluster_adapts_machines_on_the_fly( ): mock_docker_tag_node.reset_mock() await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=ComputationalAutoscaling() + app=initialized_app, auto_scaling_mode=ComputationalAutoscalingProvider() ) analyzed_cluster = assert_cluster_state( spied_cluster_analysis, @@ -1649,7 +1674,7 @@ async def test_cluster_adapts_machines_on_the_fly( create_fake_node, drained_machine_instance_id, drained_machine_instance_id ) await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=ComputationalAutoscaling() + app=initialized_app, auto_scaling_mode=ComputationalAutoscalingProvider() ) analyzed_cluster = assert_cluster_state( spied_cluster_analysis, @@ -1662,19 +1687,19 @@ async def test_cluster_adapts_machines_on_the_fly( # now this will terminate it and straight away start a new machine type with mock.patch( - "simcore_service_autoscaling.modules.auto_scaling_core.utils_docker.get_node_termination_started_since", + "simcore_service_autoscaling.modules.cluster_scaling._auto_scaling_core.utils_docker.get_node_termination_started_since", autospec=True, return_value=arrow.utcnow().datetime - 1.5 * app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_TIME_BEFORE_TERMINATION, ): mocked_docker_remove_node = mocker.patch( - "simcore_service_autoscaling.modules.auto_scaling_core.utils_docker.remove_nodes", + "simcore_service_autoscaling.modules.cluster_scaling._auto_scaling_core.utils_docker.remove_nodes", return_value=None, autospec=True, ) await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=ComputationalAutoscaling() + app=initialized_app, auto_scaling_mode=ComputationalAutoscalingProvider() ) mocked_docker_remove_node.assert_called_once() diff --git a/services/autoscaling/tests/unit/test_modules_auto_scaling_dynamic.py b/services/autoscaling/tests/unit/test_modules_cluster_scaling_dynamic.py similarity index 88% rename from services/autoscaling/tests/unit/test_modules_auto_scaling_dynamic.py rename to services/autoscaling/tests/unit/test_modules_cluster_scaling_dynamic.py index a46a75c8006a..40b3f6b3b90c 100644 --- a/services/autoscaling/tests/unit/test_modules_auto_scaling_dynamic.py +++ b/services/autoscaling/tests/unit/test_modules_cluster_scaling_dynamic.py @@ -18,15 +18,15 @@ import aiodocker import arrow +import botocore.exceptions import pytest import tenacity from aws_library.ec2 import EC2InstanceBootSpecific, EC2InstanceData, Resources +from common_library.json_serialization import json_dumps from fastapi import FastAPI from models_library.docker import ( - DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY, DockerGenericTag, DockerLabelKey, - StandardSimcoreDockerLabels, ) from models_library.generated_models.docker_rest_api import ( Availability, @@ -37,6 +37,10 @@ Task, ) from models_library.rabbitmq_messages import RabbitAutoscalingStatusMessage +from models_library.services_metadata_runtime import ( + DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY, + SimcoreContainerLabels, +) from pydantic import ByteSize, TypeAdapter from pytest_mock import MockType from pytest_mock.plugin import MockerFixture @@ -53,28 +57,29 @@ from simcore_service_autoscaling.constants import BUFFER_MACHINE_TAG_KEY from simcore_service_autoscaling.core.settings import ApplicationSettings from simcore_service_autoscaling.models import AssociatedInstance, Cluster -from simcore_service_autoscaling.modules.auto_scaling_core import ( +from simcore_service_autoscaling.modules.cluster_scaling._auto_scaling_core import ( _activate_drained_nodes, _find_terminateable_instances, _try_scale_down_cluster, auto_scale_cluster, ) -from simcore_service_autoscaling.modules.auto_scaling_mode_dynamic import ( - DynamicAutoscaling, +from simcore_service_autoscaling.modules.cluster_scaling._provider_dynamic import ( + DynamicAutoscalingProvider, ) from simcore_service_autoscaling.modules.docker import ( AutoscalingDocker, get_docker_client, ) -from simcore_service_autoscaling.utils.auto_scaling_core import ( - node_host_name_from_ec2_private_dns, -) +from simcore_service_autoscaling.modules.ec2 import get_ec2_client from simcore_service_autoscaling.utils.utils_docker import ( _OSPARC_NODE_EMPTY_DATETIME_LABEL_KEY, _OSPARC_NODE_TERMINATION_PROCESS_LABEL_KEY, _OSPARC_SERVICE_READY_LABEL_KEY, _OSPARC_SERVICES_READY_DATETIME_LABEL_KEY, ) +from simcore_service_autoscaling.utils.utils_ec2 import ( + node_host_name_from_ec2_private_dns, +) from types_aiobotocore_ec2.client import EC2Client from types_aiobotocore_ec2.literals import InstanceStateNameType, InstanceTypeType from types_aiobotocore_ec2.type_defs import FilterTypeDef, InstanceTypeDef @@ -113,7 +118,7 @@ def mock_find_node_with_name_returns_fake_node( mocker: MockerFixture, fake_node: Node ) -> Iterator[mock.Mock]: return mocker.patch( - "simcore_service_autoscaling.modules.auto_scaling_core.utils_docker.find_node_with_name", + "simcore_service_autoscaling.modules.cluster_scaling._auto_scaling_core.utils_docker.find_node_with_name", autospec=True, return_value=fake_node, ) @@ -122,7 +127,7 @@ def mock_find_node_with_name_returns_fake_node( @pytest.fixture def mock_remove_nodes(mocker: MockerFixture) -> mock.Mock: return mocker.patch( - "simcore_service_autoscaling.modules.auto_scaling_core.utils_docker.remove_nodes", + "simcore_service_autoscaling.modules.cluster_scaling._auto_scaling_core.utils_docker.remove_nodes", autospec=True, ) @@ -130,7 +135,7 @@ def mock_remove_nodes(mocker: MockerFixture) -> mock.Mock: @pytest.fixture def mock_compute_node_used_resources(mocker: MockerFixture) -> mock.Mock: return mocker.patch( - "simcore_service_autoscaling.modules.auto_scaling_core.utils_docker.compute_node_used_resources", + "simcore_service_autoscaling.modules.cluster_scaling._auto_scaling_core.utils_docker.compute_node_used_resources", autospec=True, return_value=Resources.create_as_empty(), ) @@ -273,7 +278,7 @@ async def create_services_batch( task_template: dict[str, Any], create_task_reservations: Callable[[int, int], dict[str, Any]], service_monitored_labels: dict[DockerLabelKey, str], - osparc_docker_label_keys: StandardSimcoreDockerLabels, + osparc_docker_label_keys: SimcoreContainerLabels, ) -> Callable[[_ScaleUpParams], Awaitable[list[Service]]]: async def _(scale_up_params: _ScaleUpParams) -> list[Service]: return await asyncio.gather( @@ -323,7 +328,7 @@ async def test_cluster_scaling_with_no_services_does_nothing( mock_rabbitmq_post_message: mock.Mock, ): await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=DynamicAutoscaling() + app=initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) mock_launch_instances.assert_not_called() mock_terminate_instances.assert_not_called() @@ -362,7 +367,7 @@ async def test_cluster_scaling_with_no_services_and_machine_buffer_starts_expect ): assert app_settings.AUTOSCALING_EC2_INSTANCES await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=DynamicAutoscaling() + app=initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) await assert_autoscaled_dynamic_ec2_instances( ec2_client, @@ -387,7 +392,7 @@ async def test_cluster_scaling_with_no_services_and_machine_buffer_starts_expect mock_rabbitmq_post_message.reset_mock() # calling again should attach the new nodes to the reserve, but nothing should start await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=DynamicAutoscaling() + app=initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) await assert_autoscaled_dynamic_ec2_instances( ec2_client, @@ -426,7 +431,7 @@ async def test_cluster_scaling_with_no_services_and_machine_buffer_starts_expect # calling it again should not create anything new for _ in range(10): await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=DynamicAutoscaling() + app=initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) await assert_autoscaled_dynamic_ec2_instances( ec2_client, @@ -486,7 +491,7 @@ async def test_cluster_scaling_with_service_asking_for_too_much_resources_starts await create_services_batch(scale_up_params) await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=DynamicAutoscaling() + app=initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) mock_launch_instances.assert_not_called() mock_terminate_instances.assert_not_called() @@ -529,7 +534,7 @@ async def _test_cluster_scaling_up_and_down( # noqa: PLR0915 # this should trigger a scaling up as we have no nodes await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=DynamicAutoscaling() + app=initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) assert_cluster_state( spied_cluster_analysis, expected_calls=1, expected_num_machines=0 @@ -578,7 +583,7 @@ async def _assert_wait_for_ec2_instances_running() -> list[InstanceTypeDef]: # 2. running this again should not scale again, but tag the node and make it available await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=DynamicAutoscaling() + app=initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) assert_cluster_state( spied_cluster_analysis, expected_calls=1, expected_num_machines=1 @@ -591,13 +596,11 @@ async def _assert_wait_for_ec2_instances_running() -> list[InstanceTypeDef]: ) assert fake_attached_node.spec.labels assert app_settings.AUTOSCALING_NODES_MONITORING - expected_docker_node_tags = { - tag_key: "true" - for tag_key in ( - app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_NODE_LABELS - + app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_NEW_NODES_LABELS - ) - } | { + expected_docker_node_tags = dict.fromkeys( + app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_NODE_LABELS + + app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_NEW_NODES_LABELS, + "true", + ) | { DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY: scale_up_params.expected_instance_type } fake_attached_node.spec.labels |= expected_docker_node_tags | { @@ -713,7 +716,7 @@ async def _assert_wait_for_ec2_instances_running() -> list[InstanceTypeDef]: fake_attached_node.spec.availability = Availability.active fake_attached_node.description.hostname = internal_dns_name - auto_scaling_mode = DynamicAutoscaling() + auto_scaling_mode = DynamicAutoscalingProvider() mocker.patch.object( auto_scaling_mode, "get_monitored_nodes", @@ -862,7 +865,7 @@ async def _assert_wait_for_ec2_instances_running() -> list[InstanceTypeDef]: < app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_TIME_BEFORE_TERMINATION ) mocked_docker_remove_node = mocker.patch( - "simcore_service_autoscaling.modules.auto_scaling_core.utils_docker.remove_nodes", + "simcore_service_autoscaling.modules.cluster_scaling._auto_scaling_core.utils_docker.remove_nodes", return_value=None, autospec=True, ) @@ -1069,7 +1072,7 @@ async def test_cluster_scaling_up_and_down( ], ) async def test_cluster_scaling_up_and_down_against_aws( - skip_if_external_envfile_dict: None, + skip_if_no_external_envfile: None, external_ec2_instances_allowed_types: None | dict[str, EC2InstanceBootSpecific], with_labelize_drain_nodes: EnvVarsDict, app_with_docker_join_drained: EnvVarsDict, @@ -1192,7 +1195,7 @@ async def test_cluster_scaling_up_starts_multiple_instances( # run the code await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=DynamicAutoscaling() + app=initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) # check the instances were started @@ -1254,7 +1257,7 @@ async def test_cluster_scaling_up_starts_multiple_instances( expected_instance_type="g4dn.8xlarge", # 32CPUs, 128GiB expected_num_instances=7, ), - id="A batch of services requiring g3.4xlarge and a batch requiring g4dn.8xlarge", + id="A batch of services requiring g4dn.2xlarge and a batch requiring g4dn.8xlarge", ), ], ) @@ -1294,7 +1297,7 @@ async def test_cluster_adapts_machines_on_the_fly( # noqa: PLR0915 # it will only scale once and do nothing else await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=DynamicAutoscaling() + app=initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) await assert_autoscaled_dynamic_ec2_instances( ec2_client, @@ -1319,7 +1322,7 @@ async def test_cluster_adapts_machines_on_the_fly( # noqa: PLR0915 # # 2. now the machines are associated await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=DynamicAutoscaling() + app=initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) analyzed_cluster = assert_cluster_state( spied_cluster_analysis, @@ -1341,7 +1344,7 @@ async def test_cluster_adapts_machines_on_the_fly( # noqa: PLR0915 # scaling will do nothing since we have hit the maximum number of machines for _ in range(3): await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=DynamicAutoscaling() + app=initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) await assert_autoscaled_dynamic_ec2_instances( ec2_client, @@ -1380,11 +1383,11 @@ async def test_cluster_adapts_machines_on_the_fly( # noqa: PLR0915 # first call to auto_scale_cluster will mark 1 node as empty with mock.patch( - "simcore_service_autoscaling.modules.auto_scaling_core.utils_docker.set_node_found_empty", + "simcore_service_autoscaling.modules.cluster_scaling._auto_scaling_core.utils_docker.set_node_found_empty", autospec=True, ) as mock_docker_set_node_found_empty: await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=DynamicAutoscaling() + app=initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) analyzed_cluster = assert_cluster_state( spied_cluster_analysis, @@ -1402,14 +1405,14 @@ async def test_cluster_adapts_machines_on_the_fly( # noqa: PLR0915 # now we mock the get_node_found_empty so the next call will actually drain the machine with mock.patch( - "simcore_service_autoscaling.modules.auto_scaling_core.utils_docker.get_node_empty_since", + "simcore_service_autoscaling.modules.cluster_scaling._auto_scaling_core.utils_docker.get_node_empty_since", autospec=True, return_value=arrow.utcnow().datetime - 1.5 * app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_TIME_BEFORE_DRAINING, ) as mocked_get_node_empty_since: await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=DynamicAutoscaling() + app=initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) mocked_get_node_empty_since.assert_called_once() analyzed_cluster = assert_cluster_state( @@ -1425,7 +1428,7 @@ async def test_cluster_adapts_machines_on_the_fly( # noqa: PLR0915 create_fake_node, drained_machine_instance_id, None ) await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=DynamicAutoscaling() + app=initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) analyzed_cluster = assert_cluster_state( spied_cluster_analysis, @@ -1437,7 +1440,7 @@ async def test_cluster_adapts_machines_on_the_fly( # noqa: PLR0915 # this will initiate termination now with mock.patch( - "simcore_service_autoscaling.modules.auto_scaling_core.utils_docker.get_node_last_readyness_update", + "simcore_service_autoscaling.modules.cluster_scaling._auto_scaling_core.utils_docker.get_node_last_readyness_update", autospec=True, return_value=arrow.utcnow().datetime - 1.5 @@ -1445,7 +1448,7 @@ async def test_cluster_adapts_machines_on_the_fly( # noqa: PLR0915 ): mock_docker_tag_node.reset_mock() await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=DynamicAutoscaling() + app=initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) analyzed_cluster = assert_cluster_state( spied_cluster_analysis, @@ -1464,7 +1467,7 @@ async def test_cluster_adapts_machines_on_the_fly( # noqa: PLR0915 create_fake_node, drained_machine_instance_id, drained_machine_instance_id ) await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=DynamicAutoscaling() + app=initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) analyzed_cluster = assert_cluster_state( spied_cluster_analysis, @@ -1477,19 +1480,19 @@ async def test_cluster_adapts_machines_on_the_fly( # noqa: PLR0915 # now this will terminate it and straight away start a new machine type with mock.patch( - "simcore_service_autoscaling.modules.auto_scaling_core.utils_docker.get_node_termination_started_since", + "simcore_service_autoscaling.modules.cluster_scaling._auto_scaling_core.utils_docker.get_node_termination_started_since", autospec=True, return_value=arrow.utcnow().datetime - 1.5 * app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_TIME_BEFORE_TERMINATION, ): mocked_docker_remove_node = mocker.patch( - "simcore_service_autoscaling.modules.auto_scaling_core.utils_docker.remove_nodes", + "simcore_service_autoscaling.modules.cluster_scaling._auto_scaling_core.utils_docker.remove_nodes", return_value=None, autospec=True, ) await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=DynamicAutoscaling() + app=initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) mocked_docker_remove_node.assert_called_once() @@ -1578,7 +1581,7 @@ async def test_long_pending_ec2_is_detected_as_broken_terminated_and_restarted( # this should trigger a scaling up as we have no nodes await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=DynamicAutoscaling() + app=initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) # check the instance was started and we have exactly 1 @@ -1622,7 +1625,7 @@ async def test_long_pending_ec2_is_detected_as_broken_terminated_and_restarted( # 2. running again several times the autoscaler, the node does not join for i in range(7): await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=DynamicAutoscaling() + app=initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) # there should be no scaling up, since there is already a pending instance instances = await assert_autoscaled_dynamic_ec2_instances( @@ -1666,7 +1669,7 @@ async def test_long_pending_ec2_is_detected_as_broken_terminated_and_restarted( ) # scaling now will terminate the broken ec2 that did not connect, and directly create a replacement await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=DynamicAutoscaling() + app=initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) # we have therefore 2 reservations, first instance is terminated and a second one started all_instances = await ec2_client.describe_instances() @@ -1720,11 +1723,11 @@ async def test__find_terminateable_nodes_with_no_hosts( AssociatedInstance(node=host_node, ec2_instance=fake_ec2_instance_data()) ], drained_nodes=[], - buffer_drained_nodes=[ + hot_buffer_drained_nodes=[ AssociatedInstance(node=host_node, ec2_instance=fake_ec2_instance_data()) ], ) - assert await _find_terminateable_instances(initialized_app, active_cluster) == [] + assert _find_terminateable_instances(initialized_app, active_cluster) == [] @pytest.mark.parametrize( @@ -1754,7 +1757,7 @@ async def test__try_scale_down_cluster_with_no_nodes( drained_nodes=[ create_associated_instance(drained_host_node, False) # noqa: FBT003 ], - buffer_drained_nodes=[ + hot_buffer_drained_nodes=[ create_associated_instance(drained_host_node, True) # noqa: FBT003 ], ) @@ -1795,7 +1798,7 @@ async def test__activate_drained_nodes_with_no_tasks( drained_nodes=[ create_associated_instance(drained_host_node, True) # noqa: FBT003 ], - buffer_drained_nodes=[ + hot_buffer_drained_nodes=[ create_associated_instance(drained_host_node, True) # noqa: FBT003 ], ) @@ -2006,7 +2009,7 @@ async def test_warm_buffers_are_started_to_replace_missing_hot_buffers( # let's autoscale, this should move the warm buffers to hot buffers await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=DynamicAutoscaling() + app=initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) mock_docker_tag_node.assert_not_called() # at analysis time, we had no machines running @@ -2016,8 +2019,8 @@ async def test_warm_buffers_are_started_to_replace_missing_hot_buffers( expected_num_machines=0, ) assert not analyzed_cluster.active_nodes - assert analyzed_cluster.buffer_ec2s - assert len(analyzed_cluster.buffer_ec2s) == len(buffer_machines) + assert analyzed_cluster.warm_buffer_ec2s + assert len(analyzed_cluster.warm_buffer_ec2s) == len(buffer_machines) # now we should have a warm buffer moved to the hot buffer await assert_autoscaled_dynamic_ec2_instances( @@ -2041,7 +2044,7 @@ async def test_warm_buffers_are_started_to_replace_missing_hot_buffers( # let's autoscale again, to check the cluster analysis and tag the nodes await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=DynamicAutoscaling() + app=initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) mock_docker_tag_node.assert_called() assert ( @@ -2055,14 +2058,14 @@ async def test_warm_buffers_are_started_to_replace_missing_hot_buffers( expected_num_machines=app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER, ) assert not analyzed_cluster.active_nodes - assert len(analyzed_cluster.buffer_ec2s) == max( + assert len(analyzed_cluster.warm_buffer_ec2s) == max( 0, buffer_count - app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER, ), ( "the warm buffers were not used as expected there should be" f" {buffer_count - app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER} remaining, " - f"found {len(analyzed_cluster.buffer_ec2s)}" + f"found {len(analyzed_cluster.warm_buffer_ec2s)}" ) assert ( len(analyzed_cluster.pending_ec2s) @@ -2082,7 +2085,7 @@ async def test_warm_buffers_are_started_to_replace_missing_hot_buffers( ["with_AUTOSCALING_DRAIN_NODES_WITH_LABELS"], indirect=True, ) -async def test_warm_buffers_only_replace_hot_buffer_if_service_is_started_issue7071( +async def test_warm_buffers_only_replace_hot_buffer_if_service_is_started_issue7071( # noqa: PLR0915 patch_ec2_client_launch_instances_min_number_of_instances: mock.Mock, minimal_configuration: None, with_instances_machines_hot_buffer: EnvVarsDict, @@ -2124,7 +2127,7 @@ async def test_warm_buffers_only_replace_hot_buffer_if_service_is_started_issue7 # ensure we get our running hot buffer await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=DynamicAutoscaling() + app=initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) await assert_autoscaled_dynamic_ec2_instances( ec2_client, @@ -2137,7 +2140,7 @@ async def test_warm_buffers_only_replace_hot_buffer_if_service_is_started_issue7 ) # this brings a new analysis await auto_scale_cluster( - app=initialized_app, auto_scaling_mode=DynamicAutoscaling() + app=initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) spied_cluster = assert_cluster_state( spied_cluster_analysis, expected_calls=2, expected_num_machines=5 @@ -2150,13 +2153,11 @@ async def test_warm_buffers_only_replace_hot_buffer_if_service_is_started_issue7 ) assert fake_attached_node_base.spec.labels assert app_settings.AUTOSCALING_NODES_MONITORING - expected_docker_node_tags = { - tag_key: "true" - for tag_key in ( - app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_NODE_LABELS - + app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_NEW_NODES_LABELS - ) - } | { + expected_docker_node_tags = dict.fromkeys( + app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_NODE_LABELS + + app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_NEW_NODES_LABELS, + "true", + ) | { DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY: f"{hot_buffer_instance_type}" } fake_attached_node_base.spec.labels |= expected_docker_node_tags | { @@ -2172,7 +2173,7 @@ async def test_warm_buffers_only_replace_hot_buffer_if_service_is_started_issue7 spied_cluster.pending_ec2s[i].ec2_instance ) fake_hot_buffer_nodes.append(node) - auto_scaling_mode = DynamicAutoscaling() + auto_scaling_mode = DynamicAutoscalingProvider() mocker.patch.object( auto_scaling_mode, "get_monitored_nodes", @@ -2193,8 +2194,8 @@ async def test_warm_buffers_only_replace_hot_buffer_if_service_is_started_issue7 spied_cluster = assert_cluster_state( spied_cluster_analysis, expected_calls=1, expected_num_machines=5 ) - assert len(spied_cluster.buffer_drained_nodes) == num_hot_buffer - assert not spied_cluster.buffer_ec2s + assert len(spied_cluster.hot_buffer_drained_nodes) == num_hot_buffer + assert not spied_cluster.warm_buffer_ec2s # have a few warm buffers ready with the same type as the hot buffer machines await create_buffer_machines( @@ -2238,8 +2239,8 @@ async def test_warm_buffers_only_replace_hot_buffer_if_service_is_started_issue7 spied_cluster = assert_cluster_state( spied_cluster_analysis, expected_calls=1, expected_num_machines=5 ) - assert len(spied_cluster.buffer_drained_nodes) == num_hot_buffer - assert len(spied_cluster.buffer_ec2s) == buffer_count + assert len(spied_cluster.hot_buffer_drained_nodes) == num_hot_buffer + assert len(spied_cluster.warm_buffer_ec2s) == buffer_count # # BUG REPRODUCTION @@ -2307,8 +2308,8 @@ async def test_warm_buffers_only_replace_hot_buffer_if_service_is_started_issue7 spied_cluster = assert_cluster_state( spied_cluster_analysis, expected_calls=2, expected_num_machines=6 ) - assert len(spied_cluster.buffer_drained_nodes) == num_hot_buffer - 1 - assert len(spied_cluster.buffer_ec2s) == buffer_count - 1 + assert len(spied_cluster.hot_buffer_drained_nodes) == num_hot_buffer - 1 + assert len(spied_cluster.warm_buffer_ec2s) == buffer_count - 1 assert len(spied_cluster.active_nodes) == 1 assert len(spied_cluster.pending_ec2s) == 1 @@ -2326,10 +2327,199 @@ async def _check_autoscaling_is_stable() -> None: spied_cluster = assert_cluster_state( spied_cluster_analysis, expected_calls=1, expected_num_machines=6 ) - assert len(spied_cluster.buffer_drained_nodes) == num_hot_buffer - 1 - assert len(spied_cluster.buffer_ec2s) == buffer_count - 1 + assert len(spied_cluster.hot_buffer_drained_nodes) == num_hot_buffer - 1 + assert len(spied_cluster.warm_buffer_ec2s) == buffer_count - 1 assert len(spied_cluster.active_nodes) == 1 assert len(spied_cluster.pending_ec2s) == 1 with pytest.raises(tenacity.RetryError): await _check_autoscaling_is_stable() + + +@pytest.fixture +async def with_multiple_small_subnet_ids( + create_aws_subnet_id: Callable[..., Awaitable[str]], monkeypatch: pytest.MonkeyPatch +) -> tuple[str, ...]: + subnet_1 = await create_aws_subnet_id("10.0.200.0/29") # 3 usable IPs + subnet_2 = await create_aws_subnet_id("10.0.201.0/29") # 3 usable IPs + monkeypatch.setenv("EC2_INSTANCES_SUBNET_IDS", json_dumps([subnet_1, subnet_2])) + return subnet_1, subnet_2 + + +@pytest.mark.parametrize( + "scale_up_params", + [ + pytest.param( + _ScaleUpParams( + imposed_instance_type=None, + service_resources=Resources( + cpus=5, ram=TypeAdapter(ByteSize).validate_python("36Gib") + ), + num_services=1, + expected_instance_type="r5n.4xlarge", # 1 GPU, 16 CPUs, 128GiB + expected_num_instances=1, + ), + ), + ], +) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_docker_join_drained", + ["without_AUTOSCALING_DOCKER_JOIN_DRAINED"], + indirect=True, +) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_drain_nodes_labelled", + ["with_AUTOSCALING_DRAIN_NODES_WITH_LABELS"], + indirect=True, +) +async def test_fresh_instance_is_started_in_second_subnet_if_warm_buffers_used_up_all_ips_in_first_subnet( + patch_ec2_client_launch_instances_min_number_of_instances: mock.Mock, + minimal_configuration: None, + with_multiple_small_subnet_ids: tuple[str, ...], + initialized_app: FastAPI, + app_settings: ApplicationSettings, + create_buffer_machines: Callable[ + [int, InstanceTypeType, InstanceStateNameType, list[DockerGenericTag] | None], + Awaitable[list[str]], + ], + ec2_client: EC2Client, + scale_up_params: _ScaleUpParams, + create_services_batch: Callable[[_ScaleUpParams], Awaitable[list[Service]]], + ec2_instance_custom_tags: dict[str, str], + instance_type_filters: Sequence[FilterTypeDef], +): + # we have nothing running now + all_instances = await ec2_client.describe_instances() + assert not all_instances["Reservations"] + + # have warm buffers in the first subnet *fixture uses subnet_1 by default*, this will use all the IPs in the first subnet + assert app_settings.AUTOSCALING_EC2_INSTANCES + await create_buffer_machines( + 3, + cast( + InstanceTypeType, + next( + iter(app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_ALLOWED_TYPES) + ), + ), + "stopped", + None, + ) + + # create several tasks that needs more power + await create_services_batch(scale_up_params) + # now autoscale shall create machines in the second subnet + await auto_scale_cluster( + app=initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() + ) + # check the instances were started + created_instances = await assert_autoscaled_dynamic_ec2_instances( + ec2_client, + expected_num_reservations=1, + expected_num_instances=scale_up_params.expected_num_instances, + expected_instance_type=scale_up_params.expected_instance_type, + expected_instance_state="running", + expected_additional_tag_keys=list(ec2_instance_custom_tags), + instance_filters=instance_type_filters, + ) + # check the instance is in the second subnet + assert created_instances + assert "SubnetId" in created_instances[0] + assert created_instances[0]["SubnetId"] == with_multiple_small_subnet_ids[1] + + +@pytest.fixture +def mock_start_instances_to_raise_insufficient_capacity_error( + initialized_app: FastAPI, + mocker: MockerFixture, +) -> mock.Mock: + async def _raise_insufficient_capacity_error(*args: Any, **kwargs: Any) -> None: + raise botocore.exceptions.ClientError( + error_response={ + "Error": { + "Code": "InsufficientInstanceCapacity", + "Message": "An error occurred (InsufficientInstanceCapacity) when calling the RunInstances operation (reached max retries: 4): We currently do not have sufficient g4dn.4xlarge capacity in the Availability Zone you requested (us-east-1a). Our system will be working on provisioning additional capacity. You can currently get g4dn.4xlarge capacity by not specifying an Availability Zone in your request or choosing us-east-1b, us-east-1c, us-east-1d, us-east-1f", + } + }, + operation_name="StartInstances", + ) + + return mocker.patch.object( + get_ec2_client(initialized_app).client, + "start_instances", + autospec=True, + side_effect=_raise_insufficient_capacity_error, + ) + + +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_docker_join_drained", + ["without_AUTOSCALING_DOCKER_JOIN_DRAINED"], + indirect=True, +) +@pytest.mark.parametrize( + # NOTE: only the main test test_cluster_scaling_up_and_down is run with all options + "with_drain_nodes_labelled", + ["with_AUTOSCALING_DRAIN_NODES_WITH_LABELS"], + indirect=True, +) +async def test_fresh_instance_is_launched_if_warm_buffers_cannot_start_due_to_insufficient_capacity_error( + patch_ec2_client_launch_instances_min_number_of_instances: mock.Mock, + minimal_configuration: None, + with_multiple_small_subnet_ids: tuple[str, ...], + initialized_app: FastAPI, + mock_start_instances_to_raise_insufficient_capacity_error: None, + app_settings: ApplicationSettings, + create_buffer_machines: Callable[ + [int, InstanceTypeType, InstanceStateNameType, list[DockerGenericTag] | None], + Awaitable[list[str]], + ], + ec2_client: EC2Client, + create_services_batch: Callable[[_ScaleUpParams], Awaitable[list[Service]]], + ec2_instance_custom_tags: dict[str, str], + instance_type_filters: Sequence[FilterTypeDef], +): + # we have nothing running now + all_instances = await ec2_client.describe_instances() + assert not all_instances["Reservations"] + + # have warm buffers in the first subnet *fixture uses subnet_1 by default*, this will use all the IPs in the first subnet + assert app_settings.AUTOSCALING_EC2_INSTANCES + warm_buffer_instance_type = cast( + InstanceTypeType, + next(iter(app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_ALLOWED_TYPES)), + ) + await create_buffer_machines(3, warm_buffer_instance_type, "stopped", None) + + # create several tasks that needs more power + scale_up_params = _ScaleUpParams( + imposed_instance_type=warm_buffer_instance_type, + service_resources=Resources( + cpus=1, ram=TypeAdapter(ByteSize).validate_python("1Gib") + ), + num_services=1, + expected_instance_type=warm_buffer_instance_type, + expected_num_instances=1, + ) + await create_services_batch(scale_up_params) + # now autoscale shall create machines in the second subnet + await auto_scale_cluster( + app=initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() + ) + # check the instances were started + created_instances = await assert_autoscaled_dynamic_ec2_instances( + ec2_client, + expected_num_reservations=1, + expected_num_instances=scale_up_params.expected_num_instances, + expected_instance_type=scale_up_params.expected_instance_type, + expected_instance_state="running", + expected_additional_tag_keys=list(ec2_instance_custom_tags), + instance_filters=instance_type_filters, + ) + # check the instance is in the second subnet + assert created_instances + assert "SubnetId" in created_instances[0] + assert created_instances[0]["SubnetId"] == with_multiple_small_subnet_ids[1] diff --git a/services/autoscaling/tests/unit/test_utils_computational_scaling.py b/services/autoscaling/tests/unit/test_modules_cluster_scaling_utils_computational.py similarity index 95% rename from services/autoscaling/tests/unit/test_utils_computational_scaling.py rename to services/autoscaling/tests/unit/test_modules_cluster_scaling_utils_computational.py index b5744f170535..e412487f4ea6 100644 --- a/services/autoscaling/tests/unit/test_utils_computational_scaling.py +++ b/services/autoscaling/tests/unit/test_modules_cluster_scaling_utils_computational.py @@ -8,7 +8,7 @@ from aws_library.ec2 import Resources from pydantic import ByteSize, TypeAdapter from simcore_service_autoscaling.models import DaskTask, DaskTaskResources -from simcore_service_autoscaling.utils.computational_scaling import ( +from simcore_service_autoscaling.modules.cluster_scaling._utils_computational import ( _DEFAULT_MAX_CPU, _DEFAULT_MAX_RAM, resources_from_dask_task, diff --git a/services/autoscaling/tests/unit/test_modules_buffer_machine_core.py b/services/autoscaling/tests/unit/test_modules_cluster_scaling_warm_buffer_machine_core.py similarity index 95% rename from services/autoscaling/tests/unit/test_modules_buffer_machine_core.py rename to services/autoscaling/tests/unit/test_modules_cluster_scaling_warm_buffer_machine_core.py index 99939ede125a..c1b2cface44c 100644 --- a/services/autoscaling/tests/unit/test_modules_buffer_machine_core.py +++ b/services/autoscaling/tests/unit/test_modules_cluster_scaling_warm_buffer_machine_core.py @@ -28,10 +28,10 @@ from pytest_simcore.helpers.logging_tools import log_context from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from simcore_service_autoscaling.constants import PRE_PULLED_IMAGES_EC2_TAG_KEY -from simcore_service_autoscaling.modules.auto_scaling_mode_dynamic import ( - DynamicAutoscaling, +from simcore_service_autoscaling.modules.cluster_scaling._provider_dynamic import ( + DynamicAutoscalingProvider, ) -from simcore_service_autoscaling.modules.buffer_machines_pool_core import ( +from simcore_service_autoscaling.modules.cluster_scaling._warm_buffer_machines_pool_core import ( monitor_buffer_machines, ) from types_aiobotocore_ec2 import EC2Client @@ -95,7 +95,7 @@ async def test_if_send_command_is_mocked_by_moto( # 1. run, this will create as many buffer machines as needed await monitor_buffer_machines( - initialized_app, auto_scaling_mode=DynamicAutoscaling() + initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) await assert_autoscaled_dynamic_warm_pools_ec2_instances( ec2_client, @@ -112,7 +112,7 @@ async def test_if_send_command_is_mocked_by_moto( # 2. this should generate a failure as current version of moto does not handle this await monitor_buffer_machines( - initialized_app, auto_scaling_mode=DynamicAutoscaling() + initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) @@ -168,7 +168,7 @@ async def _test_monitor_buffer_machines( # 1. run, this will create as many buffer machines as needed with log_context(logging.INFO, "create buffer machines"): await monitor_buffer_machines( - initialized_app, auto_scaling_mode=DynamicAutoscaling() + initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) with log_context( logging.INFO, f"waiting for {buffer_count} buffer instances to be running" @@ -211,7 +211,7 @@ async def _assert_buffer_machines_running() -> None: ) async def _assert_run_ssm_command_for_pulling() -> None: await monitor_buffer_machines( - initialized_app, auto_scaling_mode=DynamicAutoscaling() + initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) await assert_autoscaled_dynamic_warm_pools_ec2_instances( ec2_client, @@ -248,7 +248,7 @@ async def _assert_run_ssm_command_for_pulling() -> None: ) async def _assert_wait_for_ssm_command_to_finish() -> None: await monitor_buffer_machines( - initialized_app, auto_scaling_mode=DynamicAutoscaling() + initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) await assert_autoscaled_dynamic_warm_pools_ec2_instances( ec2_client, @@ -268,6 +268,7 @@ async def _assert_wait_for_ssm_command_to_finish() -> None: async def test_monitor_buffer_machines( + patch_ec2_client_launch_instances_min_number_of_instances: None, minimal_configuration: None, ec2_client: EC2Client, buffer_count: int, @@ -354,7 +355,7 @@ async def test_monitor_buffer_machines_terminates_supernumerary_instances( ) # this will terminate the supernumerary instances and start new ones await monitor_buffer_machines( - initialized_app, auto_scaling_mode=DynamicAutoscaling() + initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) await assert_autoscaled_dynamic_warm_pools_ec2_instances( ec2_client, @@ -374,6 +375,7 @@ async def test_monitor_buffer_machines_terminates_supernumerary_instances( async def test_monitor_buffer_machines_terminates_instances_with_incorrect_pre_pulled_images( + patch_ec2_client_launch_instances_min_number_of_instances: None, minimal_configuration: None, ec2_client: EC2Client, buffer_count: int, @@ -414,7 +416,7 @@ async def test_monitor_buffer_machines_terminates_instances_with_incorrect_pre_p ) # this will terminate the wrong instances and start new ones and pre-pull the new set of images await monitor_buffer_machines( - initialized_app, auto_scaling_mode=DynamicAutoscaling() + initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) await assert_autoscaled_dynamic_warm_pools_ec2_instances( ec2_client, @@ -457,6 +459,7 @@ def unneeded_instance_type( ], ) async def test_monitor_buffer_machines_terminates_unneeded_pool( + patch_ec2_client_launch_instances_min_number_of_instances: None, minimal_configuration: None, ec2_client: EC2Client, buffer_count: int, @@ -491,7 +494,7 @@ async def test_monitor_buffer_machines_terminates_unneeded_pool( # this will terminate the unwanted buffer pool and replace with the expected ones await monitor_buffer_machines( - initialized_app, auto_scaling_mode=DynamicAutoscaling() + initialized_app, auto_scaling_mode=DynamicAutoscalingProvider() ) await assert_autoscaled_dynamic_warm_pools_ec2_instances( ec2_client, @@ -532,7 +535,7 @@ def pre_pull_images( async def test_monitor_buffer_machines_against_aws( - skip_if_external_envfile_dict: None, + skip_if_no_external_envfile: None, disable_buffers_pool_background_task: None, disable_autoscaling_background_task: None, disabled_rabbitmq: None, diff --git a/services/autoscaling/tests/unit/test_modules_ec2.py b/services/autoscaling/tests/unit/test_modules_ec2.py index aab1747983a5..0ec83790d7e5 100644 --- a/services/autoscaling/tests/unit/test_modules_ec2.py +++ b/services/autoscaling/tests/unit/test_modules_ec2.py @@ -51,7 +51,7 @@ def _(instance_type: InstanceTypeType) -> EC2InstanceConfig: ami_id=aws_ami_id, key_name=faker.pystr(), security_group_ids=[aws_security_group_id], - subnet_id=aws_subnet_id, + subnet_ids=[aws_subnet_id], iam_instance_profile="", ) @@ -69,7 +69,7 @@ def _assert_metrics( *, expected_num_samples: int, check_sample_index: int | None, - expected_sample: _ExpectedSample | None + expected_sample: _ExpectedSample | None, ) -> None: collected_metrics = list(metrics_to_collect.collect()) assert len(collected_metrics) == 1 diff --git a/services/autoscaling/tests/unit/test_modules_instrumentation_models.py b/services/autoscaling/tests/unit/test_modules_instrumentation_models.py index 78824bd8fb5e..f96efae4574e 100644 --- a/services/autoscaling/tests/unit/test_modules_instrumentation_models.py +++ b/services/autoscaling/tests/unit/test_modules_instrumentation_models.py @@ -1,14 +1,14 @@ from dataclasses import is_dataclass import pytest -from simcore_service_autoscaling.models import BufferPool, Cluster +from simcore_service_autoscaling.models import Cluster, WarmBufferPool from simcore_service_autoscaling.modules.instrumentation._constants import ( - BUFFER_POOLS_METRICS_DEFINITIONS, CLUSTER_METRICS_DEFINITIONS, + WARM_BUFFER_POOLS_METRICS_DEFINITIONS, ) from simcore_service_autoscaling.modules.instrumentation._models import ( - BufferPoolsMetrics, ClusterMetrics, + WarmBufferPoolsMetrics, ) @@ -16,7 +16,7 @@ "class_name, metrics_class_name, metrics_definitions", [ (Cluster, ClusterMetrics, CLUSTER_METRICS_DEFINITIONS), - (BufferPool, BufferPoolsMetrics, BUFFER_POOLS_METRICS_DEFINITIONS), + (WarmBufferPool, WarmBufferPoolsMetrics, WARM_BUFFER_POOLS_METRICS_DEFINITIONS), ], ) def test_models_are_in_sync( @@ -27,9 +27,9 @@ def test_models_are_in_sync( assert is_dataclass(class_name) assert is_dataclass(metrics_class_name) for field in class_name.__dataclass_fields__: - assert ( - field in metrics_definitions - ), f"{metrics_definitions.__qualname__} is missing {field}" - assert hasattr( - metrics_class_name, field - ), f"{metrics_class_name.__qualname__} is missing {field}" + assert field in metrics_definitions, ( + f"{metrics_definitions.__qualname__} is missing {field}" + ) + assert hasattr(metrics_class_name, field), ( + f"{metrics_class_name.__qualname__} is missing {field}" + ) diff --git a/services/autoscaling/tests/unit/test_utils_auto_scaling_core.py b/services/autoscaling/tests/unit/test_utils_cluster_scaling.py similarity index 89% rename from services/autoscaling/tests/unit/test_utils_auto_scaling_core.py rename to services/autoscaling/tests/unit/test_utils_cluster_scaling.py index 54d4f1b44e00..7afa42f770d3 100644 --- a/services/autoscaling/tests/unit/test_utils_auto_scaling_core.py +++ b/services/autoscaling/tests/unit/test_utils_cluster_scaling.py @@ -19,14 +19,12 @@ from pydantic import TypeAdapter from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from simcore_service_autoscaling.core.errors import Ec2InvalidDnsNameError from simcore_service_autoscaling.core.settings import ApplicationSettings from simcore_service_autoscaling.models import AssociatedInstance, EC2InstanceData -from simcore_service_autoscaling.utils.auto_scaling_core import ( +from simcore_service_autoscaling.utils.cluster_scaling import ( associate_ec2_instances_with_nodes, ec2_startup_script, - get_machine_buffer_type, - node_host_name_from_ec2_private_dns, + get_hot_buffer_type, sort_drained_nodes, ) from simcore_service_autoscaling.utils.utils_docker import ( @@ -52,34 +50,6 @@ def _creator(**overrides) -> DockerNode: return _creator -@pytest.mark.parametrize( - "aws_private_dns, expected_host_name", - [ - ("ip-10-12-32-3.internal-data", "ip-10-12-32-3"), - ("ip-10-12-32-32.internal-data", "ip-10-12-32-32"), - ("ip-10-0-3-129.internal-data", "ip-10-0-3-129"), - ("ip-10-0-3-12.internal-data", "ip-10-0-3-12"), - ], -) -def test_node_host_name_from_ec2_private_dns( - fake_ec2_instance_data: Callable[..., EC2InstanceData], - aws_private_dns: str, - expected_host_name: str, -): - instance = fake_ec2_instance_data( - aws_private_dns=aws_private_dns, - ) - assert node_host_name_from_ec2_private_dns(instance) == expected_host_name - - -def test_node_host_name_from_ec2_private_dns_raises_with_invalid_name( - fake_ec2_instance_data: Callable[..., EC2InstanceData], faker: Faker -): - instance = fake_ec2_instance_data(aws_private_dns=faker.name()) - with pytest.raises(Ec2InvalidDnsNameError): - node_host_name_from_ec2_private_dns(instance) - - @pytest.mark.parametrize("valid_ec2_dns", [True, False]) async def test_associate_ec2_instances_with_nodes_with_no_correspondence( fake_ec2_instance_data: Callable[..., EC2InstanceData], @@ -99,7 +69,7 @@ async def test_associate_ec2_instances_with_nodes_with_no_correspondence( ( associated_instances, non_associated_instances, - ) = await associate_ec2_instances_with_nodes(nodes, ec2_instances) + ) = associate_ec2_instances_with_nodes(nodes, ec2_instances) assert not associated_instances assert non_associated_instances @@ -122,7 +92,7 @@ async def test_associate_ec2_instances_with_corresponding_nodes( ( associated_instances, non_associated_instances, - ) = await associate_ec2_instances_with_nodes(nodes, ec2_instances) + ) = associate_ec2_instances_with_nodes(nodes, ec2_instances) assert associated_instances assert not non_associated_instances @@ -304,7 +274,7 @@ def test_get_machine_buffer_type( random_fake_available_instances: list[EC2InstanceType], ): assert ( - get_machine_buffer_type(random_fake_available_instances) + get_hot_buffer_type(random_fake_available_instances) == random_fake_available_instances[0] ) @@ -329,7 +299,7 @@ def test_sort_drained_nodes( create_fake_node: Callable[..., DockerNode], create_associated_instance: Callable[..., AssociatedInstance], ): - machine_buffer_type = get_machine_buffer_type(random_fake_available_instances) + machine_buffer_type = get_hot_buffer_type(random_fake_available_instances) _NUM_DRAINED_NODES = 20 _NUM_NODE_WITH_TYPE_BUFFER = ( 3 * app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_MACHINES_BUFFER diff --git a/services/autoscaling/tests/unit/test_utils_docker.py b/services/autoscaling/tests/unit/test_utils_docker.py index cae93f3402ec..34b64d6861a2 100644 --- a/services/autoscaling/tests/unit/test_utils_docker.py +++ b/services/autoscaling/tests/unit/test_utils_docker.py @@ -18,7 +18,6 @@ from deepdiff import DeepDiff from faker import Faker from models_library.docker import ( - DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY, DockerGenericTag, DockerLabelKey, ) @@ -31,6 +30,9 @@ Service, Task, ) +from models_library.services_metadata_runtime import ( + DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY, +) from pydantic import ByteSize, TypeAdapter from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict diff --git a/services/autoscaling/tests/unit/test_utils_ec2.py b/services/autoscaling/tests/unit/test_utils_ec2.py index 23c5981acd2f..8f56460bc3b4 100644 --- a/services/autoscaling/tests/unit/test_utils_ec2.py +++ b/services/autoscaling/tests/unit/test_utils_ec2.py @@ -3,18 +3,24 @@ # pylint: disable=unused-variable +from collections.abc import Callable + import pytest from aws_library.ec2 import EC2InstanceType, Resources +from aws_library.ec2._models import EC2InstanceData from faker import Faker from pydantic import ByteSize from simcore_service_autoscaling.core.errors import ( ConfigurationError, + Ec2InvalidDnsNameError, TaskBestFittingInstanceNotFoundError, ) from simcore_service_autoscaling.utils.utils_ec2 import ( closest_instance_policy, compose_user_data, find_best_fitting_ec2_instance, + node_host_name_from_ec2_private_dns, + node_ip_from_ec2_private_dns, ) @@ -70,3 +76,59 @@ def test_compose_user_data(faker: Faker): user_data = compose_user_data(command) assert user_data.startswith("#!/bin/bash") assert command in user_data + + +@pytest.mark.parametrize( + "aws_private_dns, expected_host_name", + [ + ("ip-10-12-32-3.internal-data", "ip-10-12-32-3"), + ("ip-10-12-32-32.internal-data", "ip-10-12-32-32"), + ("ip-10-0-3-129.internal-data", "ip-10-0-3-129"), + ("ip-10-0-3-12.internal-data", "ip-10-0-3-12"), + ], +) +def test_node_host_name_from_ec2_private_dns( + fake_ec2_instance_data: Callable[..., EC2InstanceData], + aws_private_dns: str, + expected_host_name: str, +): + instance = fake_ec2_instance_data( + aws_private_dns=aws_private_dns, + ) + assert node_host_name_from_ec2_private_dns(instance) == expected_host_name + + +def test_node_host_name_from_ec2_private_dns_raises_with_invalid_name( + fake_ec2_instance_data: Callable[..., EC2InstanceData], faker: Faker +): + instance = fake_ec2_instance_data(aws_private_dns=faker.name()) + with pytest.raises(Ec2InvalidDnsNameError): + node_host_name_from_ec2_private_dns(instance) + + +@pytest.mark.parametrize( + "aws_private_dns, expected_host_name", + [ + ("ip-10-12-32-3.internal-data", "10.12.32.3"), + ("ip-10-12-32-32.internal-data", "10.12.32.32"), + ("ip-10-0-3-129.internal-data", "10.0.3.129"), + ("ip-10-0-3-12.internal-data", "10.0.3.12"), + ], +) +def test_node_ip_from_ec2_private_dns( + fake_ec2_instance_data: Callable[..., EC2InstanceData], + aws_private_dns: str, + expected_host_name: str, +): + instance = fake_ec2_instance_data( + aws_private_dns=aws_private_dns, + ) + assert node_ip_from_ec2_private_dns(instance) == expected_host_name + + +def test_node_ip_from_ec2_private_dns_raises_with_invalid_name( + fake_ec2_instance_data: Callable[..., EC2InstanceData], faker: Faker +): + instance = fake_ec2_instance_data(aws_private_dns=faker.name()) + with pytest.raises(Ec2InvalidDnsNameError): + node_ip_from_ec2_private_dns(instance) diff --git a/services/autoscaling/tests/unit/test_utils_rabbitmq.py b/services/autoscaling/tests/unit/test_utils_rabbitmq.py index f9949d1d1121..006155b1e0fa 100644 --- a/services/autoscaling/tests/unit/test_utils_rabbitmq.py +++ b/services/autoscaling/tests/unit/test_utils_rabbitmq.py @@ -14,7 +14,7 @@ from dask_task_models_library.container_tasks.utils import generate_dask_job_id from faker import Faker from fastapi import FastAPI -from models_library.docker import DockerLabelKey, StandardSimcoreDockerLabels +from models_library.docker import DockerLabelKey from models_library.generated_models.docker_rest_api import Service, Task from models_library.progress_bar import ProgressReport from models_library.projects import ProjectID @@ -24,6 +24,7 @@ ProgressRabbitMessageNode, ProgressType, ) +from models_library.services_metadata_runtime import SimcoreContainerLabels from models_library.services_types import ServiceKey, ServiceVersion from models_library.users import UserID from pydantic import TypeAdapter @@ -208,7 +209,7 @@ async def test_post_task_log_message_docker( mocked_redis_server: None, initialized_app: FastAPI, running_service_tasks: Callable[[dict[DockerLabelKey, str]], Awaitable[list[Task]]], - osparc_docker_label_keys: StandardSimcoreDockerLabels, + osparc_docker_label_keys: SimcoreContainerLabels, faker: Faker, logs_rabbitmq_consumer: AsyncMock, ): @@ -288,7 +289,7 @@ async def test_post_task_progress_message_docker( mocked_redis_server: None, initialized_app: FastAPI, running_service_tasks: Callable[[dict[DockerLabelKey, str]], Awaitable[list[Task]]], - osparc_docker_label_keys: StandardSimcoreDockerLabels, + osparc_docker_label_keys: SimcoreContainerLabels, faker: Faker, progress_rabbitmq_consumer: AsyncMock, ): diff --git a/services/autoscaling/tests/unit/test_utils_buffer_machines_pool_core.py b/services/autoscaling/tests/unit/test_utils_warm_buffer_machines.py similarity index 80% rename from services/autoscaling/tests/unit/test_utils_buffer_machines_pool_core.py rename to services/autoscaling/tests/unit/test_utils_warm_buffer_machines.py index 19cc33c2575d..f9cee912bcf6 100644 --- a/services/autoscaling/tests/unit/test_utils_buffer_machines_pool_core.py +++ b/services/autoscaling/tests/unit/test_utils_warm_buffer_machines.py @@ -2,7 +2,7 @@ # pylint:disable=unused-argument # pylint:disable=redefined-outer-name import pytest -from aws_library.ec2 import AWSTagKey, AWSTagValue, EC2Tags +from aws_library.ec2 import AWSTagValue, EC2Tags from faker import Faker from fastapi import FastAPI from models_library.docker import DockerGenericTag @@ -14,17 +14,17 @@ DEACTIVATED_BUFFER_MACHINE_EC2_TAGS, PRE_PULLED_IMAGES_EC2_TAG_KEY, ) -from simcore_service_autoscaling.modules.auto_scaling_mode_computational import ( - ComputationalAutoscaling, +from simcore_service_autoscaling.modules.cluster_scaling._provider_computational import ( + ComputationalAutoscalingProvider, ) -from simcore_service_autoscaling.modules.auto_scaling_mode_dynamic import ( - DynamicAutoscaling, +from simcore_service_autoscaling.modules.cluster_scaling._provider_dynamic import ( + DynamicAutoscalingProvider, ) -from simcore_service_autoscaling.utils.buffer_machines_pool_core import ( +from simcore_service_autoscaling.utils.warm_buffer_machines import ( dump_pre_pulled_images_as_tags, - get_activated_buffer_ec2_tags, - get_deactivated_buffer_ec2_tags, - is_buffer_machine, + get_activated_warm_buffer_ec2_tags, + get_deactivated_warm_buffer_ec2_tags, + is_warm_buffer_machine, load_pre_pulled_images_from_tags, ) @@ -37,9 +37,9 @@ def test_get_activated_buffer_ec2_tags_dynamic( enabled_dynamic_mode: EnvVarsDict, initialized_app: FastAPI, ): - auto_scaling_mode = DynamicAutoscaling() - activated_buffer_tags = get_activated_buffer_ec2_tags( - initialized_app, auto_scaling_mode + auto_scaling_mode = DynamicAutoscalingProvider() + activated_buffer_tags = get_activated_warm_buffer_ec2_tags( + auto_scaling_mode.get_ec2_tags(initialized_app) ) assert ( auto_scaling_mode.get_ec2_tags(initialized_app) @@ -55,9 +55,9 @@ def test_get_deactivated_buffer_ec2_tags_dynamic( enabled_dynamic_mode: EnvVarsDict, initialized_app: FastAPI, ): - auto_scaling_mode = DynamicAutoscaling() - deactivated_buffer_tags = get_deactivated_buffer_ec2_tags( - initialized_app, auto_scaling_mode + auto_scaling_mode = DynamicAutoscalingProvider() + deactivated_buffer_tags = get_deactivated_warm_buffer_ec2_tags( + auto_scaling_mode.get_ec2_tags(initialized_app) ) # when deactivated the buffer EC2 name has an additional -buffer suffix expected_tags = ( @@ -65,8 +65,8 @@ def test_get_deactivated_buffer_ec2_tags_dynamic( | DEACTIVATED_BUFFER_MACHINE_EC2_TAGS ) assert "Name" in expected_tags - expected_tags[AWSTagKey("Name")] = TypeAdapter(AWSTagValue).validate_python( - str(expected_tags[AWSTagKey("Name")]) + "-buffer" + expected_tags["Name"] = TypeAdapter(AWSTagValue).validate_python( + str(expected_tags["Name"]) + "-buffer" ) assert expected_tags == deactivated_buffer_tags @@ -79,9 +79,9 @@ def test_get_activated_buffer_ec2_tags_computational( enabled_computational_mode: EnvVarsDict, initialized_app: FastAPI, ): - auto_scaling_mode = ComputationalAutoscaling() - activated_buffer_tags = get_activated_buffer_ec2_tags( - initialized_app, auto_scaling_mode + auto_scaling_mode = ComputationalAutoscalingProvider() + activated_buffer_tags = get_activated_warm_buffer_ec2_tags( + auto_scaling_mode.get_ec2_tags(initialized_app) ) assert ( auto_scaling_mode.get_ec2_tags(initialized_app) @@ -97,9 +97,9 @@ def test_get_deactivated_buffer_ec2_tags_computational( enabled_computational_mode: EnvVarsDict, initialized_app: FastAPI, ): - auto_scaling_mode = ComputationalAutoscaling() - deactivated_buffer_tags = get_deactivated_buffer_ec2_tags( - initialized_app, auto_scaling_mode + auto_scaling_mode = ComputationalAutoscalingProvider() + deactivated_buffer_tags = get_deactivated_warm_buffer_ec2_tags( + auto_scaling_mode.get_ec2_tags(initialized_app) ) # when deactivated the buffer EC2 name has an additional -buffer suffix expected_tags = ( @@ -107,8 +107,8 @@ def test_get_deactivated_buffer_ec2_tags_computational( | DEACTIVATED_BUFFER_MACHINE_EC2_TAGS ) assert "Name" in expected_tags - expected_tags[AWSTagKey("Name")] = TypeAdapter(AWSTagValue).validate_python( - str(expected_tags[AWSTagKey("Name")]) + "-buffer" + expected_tags["Name"] = TypeAdapter(AWSTagValue).validate_python( + str(expected_tags["Name"]) + "-buffer" ) assert expected_tags == deactivated_buffer_tags @@ -121,7 +121,7 @@ def test_get_deactivated_buffer_ec2_tags_computational( ], ) def test_is_buffer_machine(tags: EC2Tags, expected_is_buffer: bool): - assert is_buffer_machine(tags) is expected_is_buffer + assert is_warm_buffer_machine(tags) is expected_is_buffer @pytest.mark.parametrize( diff --git a/services/catalog/Dockerfile b/services/catalog/Dockerfile index 2ad61d8395dc..2fd10e653097 100644 --- a/services/catalog/Dockerfile +++ b/services/catalog/Dockerfile @@ -2,7 +2,7 @@ # Define arguments in the global scope ARG PYTHON_VERSION="3.11.9" -ARG UV_VERSION="0.6" +ARG UV_VERSION="0.7" FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build FROM python:${PYTHON_VERSION}-slim-bookworm AS base-arm64 @@ -31,6 +31,7 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=private \ set -eux && \ apt-get update && \ apt-get install -y --no-install-recommends \ + fd-find \ gosu \ && apt-get clean -y \ && rm -rf /var/lib/apt/lists/* \ @@ -90,10 +91,7 @@ RUN uv venv "${VIRTUAL_ENV}" -RUN --mount=type=cache,target=/root/.cache/uv \ - uv pip install --upgrade \ - wheel \ - setuptools + WORKDIR /build @@ -111,6 +109,10 @@ FROM build AS prod-only-deps ENV SC_BUILD_TARGET=prod-only-deps +# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode +ENV UV_COMPILE_BYTECODE=1 \ + UV_LINK_MODE=copy + WORKDIR /build/services/catalog RUN \ @@ -135,8 +137,6 @@ ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production ENV PYTHONOPTIMIZE=TRUE -# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode -ENV UV_COMPILE_BYTECODE=1 WORKDIR /home/scu diff --git a/services/catalog/Makefile b/services/catalog/Makefile index 31b3a3276988..6fa07010255c 100644 --- a/services/catalog/Makefile +++ b/services/catalog/Makefile @@ -17,7 +17,7 @@ openapi.json: .env-ignore @set -o allexport; \ source $<; \ set +o allexport; \ - python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(the_app.openapi(), indent=2) )" > $@ + python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(app_factory().openapi(), indent=2) )" > $@ # validates OAS file: $@ $(call validate_openapi_specs,$@) diff --git a/services/catalog/VERSION b/services/catalog/VERSION index c18d72be3037..53a48a1e8c64 100644 --- a/services/catalog/VERSION +++ b/services/catalog/VERSION @@ -1 +1 @@ -0.8.1 \ No newline at end of file +0.8.2 \ No newline at end of file diff --git a/services/catalog/docker/boot.sh b/services/catalog/docker/boot.sh index 9db9967c98f9..81a4874d3fcb 100755 --- a/services/catalog/docker/boot.sh +++ b/services/catalog/docker/boot.sh @@ -19,7 +19,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/catalog - uv pip --quiet sync requirements/dev.txt + uv pip --quiet sync --link-mode=copy requirements/dev.txt cd - echo "$INFO" "PIP :" uv pip list @@ -28,7 +28,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy if command -v uv >/dev/null 2>&1; then - uv pip install debugpy + uv pip install --link-mode=copy debugpy else pip install debugpy fi @@ -40,19 +40,22 @@ SERVER_LOG_LEVEL=$(echo "${APP_LOG_LEVEL}" | tr '[:upper:]' '[:lower:]') echo "$INFO" "Log-level app/server: $APP_LOG_LEVEL/$SERVER_LOG_LEVEL" if [ "${SC_BOOT_MODE}" = "debug" ]; then - reload_dir_packages=$(find /devel/packages -maxdepth 3 -type d -path "*/src/*" ! -path "*.*" -exec echo '--reload-dir {} \' \;) + reload_dir_packages=$(fdfind src /devel/packages --exec echo '--reload-dir {} ' | tr '\n' ' ') exec sh -c " cd services/catalog/src/simcore_service_catalog && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${CATALOG_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${CATALOG_REMOTE_DEBUGGING_PORT} -m \ + uvicorn \ + --factory main:app_factory \ --host 0.0.0.0 \ --reload \ - $reload_dir_packages + $reload_dir_packages \ --reload-dir . \ --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_catalog.main:the_app \ + exec uvicorn \ + --factory simcore_service_catalog.main:app_factory \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/catalog/docker/entrypoint.sh b/services/catalog/docker/entrypoint.sh index 63e9249be312..5e534412b4a4 100755 --- a/services/catalog/docker/entrypoint.sh +++ b/services/catalog/docker/entrypoint.sh @@ -20,6 +20,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" USERNAME=scu GROUPNAME=scu @@ -56,10 +57,9 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME" echo "$INFO" "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \; - # change user property of files already around + fdfind --owner ":$SC_USER_ID" --exclude proc --exec-batch chgrp --no-dereference "$CONT_GROUPNAME" . '/' echo "$INFO" "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \; + fdfind --owner "$SC_USER_ID:" --exclude proc --exec-batch chown --no-dereference "$SC_USER_NAME" . '/' fi fi diff --git a/services/catalog/openapi.json b/services/catalog/openapi.json index 4295d0ebd1c3..652ef43537ef 100644 --- a/services/catalog/openapi.json +++ b/services/catalog/openapi.json @@ -3,7 +3,7 @@ "info": { "title": "simcore-service-catalog", "description": "Manages and maintains a catalog of all published components (e.g. macro-algorithms, scripts, etc)", - "version": "0.8.1" + "version": "0.8.2" }, "paths": { "/": { @@ -172,6 +172,7 @@ "application/json": { "schema": { "type": "object", + "additionalProperties": true, "title": "Response Get Service Labels V0 Services Service Key Service Version Labels Get" } } @@ -839,6 +840,7 @@ "Runtime": { "anyOf": [ { + "additionalProperties": true, "type": "object" }, { @@ -1534,6 +1536,7 @@ "Global": { "anyOf": [ { + "additionalProperties": true, "type": "object" }, { @@ -1556,6 +1559,7 @@ "GlobalJob": { "anyOf": [ { + "additionalProperties": true, "type": "object" }, { @@ -2688,6 +2692,7 @@ "title": "Classifiers" }, "quality": { + "additionalProperties": true, "type": "object", "title": "Quality", "default": {} @@ -2933,6 +2938,7 @@ "contentSchema": { "anyOf": [ { + "additionalProperties": true, "type": "object" }, { @@ -3044,6 +3050,7 @@ "contentSchema": { "anyOf": [ { + "additionalProperties": true, "type": "object" }, { @@ -3130,6 +3137,7 @@ "content_schema": { "anyOf": [ { + "additionalProperties": true, "type": "object" }, { diff --git a/services/catalog/requirements/_base.in b/services/catalog/requirements/_base.in index 7d985b6a1bc7..05665eea10c9 100644 --- a/services/catalog/requirements/_base.in +++ b/services/catalog/requirements/_base.in @@ -19,6 +19,6 @@ aiocache[redis,msgpack] asyncpg # database packaging -pydantic[dotenv] # data models +pydantic pyyaml tenacity diff --git a/services/catalog/requirements/_base.txt b/services/catalog/requirements/_base.txt index d87cbb51b880..8b2c4eff9101 100644 --- a/services/catalog/requirements/_base.txt +++ b/services/catalog/requirements/_base.txt @@ -12,7 +12,7 @@ aiofiles==24.1.0 # via -r requirements/../../../packages/service-library/requirements/_base.in aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -49,6 +49,8 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi asyncpg==0.30.0 @@ -79,19 +81,14 @@ certifi==2025.1.31 # httpcore # httpx # requests + # sentry-sdk charset-normalizer==3.4.1 # via requests -click==8.1.8 +click==8.2.1 # via # rich-toolkit # typer # uvicorn -deprecated==1.2.18 - # via - # opentelemetry-api - # opentelemetry-exporter-otlp-proto-grpc - # opentelemetry-exporter-otlp-proto-http - # opentelemetry-semantic-conventions dnspython==2.7.0 # via email-validator email-validator==2.2.0 @@ -102,12 +99,14 @@ exceptiongroup==1.2.2 # via aio-pika fast-depends==2.4.12 # via faststream -fastapi==0.115.12 +fastapi==0.116.1 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi-lifespan-manager -fastapi-cli==0.0.7 +fastapi-cli==0.0.8 # via fastapi +fastapi-cloud-cli==0.1.5 + # via fastapi-cli fastapi-lifespan-manager==0.1.4 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in faststream==0.5.35 @@ -116,7 +115,7 @@ frozenlist==1.5.0 # via # aiohttp # aiosignal -googleapis-common-protos==1.69.1 +googleapis-common-protos==1.70.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http @@ -124,7 +123,7 @@ greenlet==3.1.1 # via sqlalchemy grpcio==1.71.0 # via opentelemetry-exporter-otlp-proto-grpc -h11==0.14.0 +h11==0.16.0 # via # httpcore # uvicorn @@ -132,7 +131,7 @@ h2==4.2.0 # via httpx hpack==4.1.0 # via h2 -httpcore==1.0.7 +httpcore==1.0.9 # via httpx httptools==0.6.4 # via uvicorn @@ -154,6 +153,7 @@ httpx==0.28.1 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi + # fastapi-cloud-cli hyperframe==6.1.0 # via h2 idna==3.10 @@ -182,13 +182,17 @@ jinja2==3.1.6 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # fastapi +jsonref==1.1.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema==4.23.0 # via # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema-specifications==2024.10.1 # via jsonschema -mako==1.3.9 +mako==1.3.10 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -219,7 +223,7 @@ multidict==6.1.0 # via # aiohttp # yarl -opentelemetry-api==1.31.0 +opentelemetry-api==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc @@ -235,17 +239,17 @@ opentelemetry-api==1.31.0 # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.31.0 +opentelemetry-exporter-otlp==1.34.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.31.0 +opentelemetry-exporter-otlp-proto-common==1.34.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.31.0 +opentelemetry-exporter-otlp-proto-grpc==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.31.0 +opentelemetry-exporter-otlp-proto-http==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.52b0 +opentelemetry-instrumentation==0.55b1 # via # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-asgi @@ -255,33 +259,33 @@ opentelemetry-instrumentation==0.52b0 # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aio-pika==0.52b0 +opentelemetry-instrumentation-aio-pika==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-asgi==0.52b0 +opentelemetry-instrumentation-asgi==0.55b1 # via opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-asyncpg==0.52b0 - # via -r requirements/../../../packages/postgres-database/requirements/_base.in -opentelemetry-instrumentation-fastapi==0.52b0 +opentelemetry-instrumentation-asyncpg==0.55b1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-fastapi==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-httpx==0.52b0 +opentelemetry-instrumentation-httpx==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-logging==0.52b0 +opentelemetry-instrumentation-logging==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.52b0 +opentelemetry-instrumentation-redis==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.52b0 +opentelemetry-instrumentation-requests==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.31.0 +opentelemetry-proto==1.34.1 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.31.0 +opentelemetry-sdk==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.52b0 +opentelemetry-semantic-conventions==0.55b1 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi @@ -291,7 +295,7 @@ opentelemetry-semantic-conventions==0.52b0 # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.52b0 +opentelemetry-util-http==0.55b1 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi @@ -334,7 +338,7 @@ propcache==0.3.0 # via # aiohttp # yarl -protobuf==5.29.3 +protobuf==5.29.5 # via # googleapis-common-protos # opentelemetry-proto @@ -344,7 +348,7 @@ psycopg2-binary==2.9.10 # via sqlalchemy pycryptodome==3.21.0 # via stream-zip -pydantic==2.10.6 +pydantic==2.11.7 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -376,11 +380,12 @@ pydantic==2.10.6 # -r requirements/_base.in # fast-depends # fastapi + # fastapi-cloud-cli # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.2 +pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.10.3 +pydantic-extra-types==2.10.5 # via # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -478,26 +483,34 @@ referencing==0.35.1 # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications -requests==2.32.3 +requests==2.32.4 # via opentelemetry-exporter-otlp-proto-http -rich==13.9.4 +rich==14.1.0 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # rich-toolkit # typer -rich-toolkit==0.13.2 - # via fastapi-cli +rich-toolkit==0.15.0 + # via + # fastapi-cli + # fastapi-cloud-cli +rignore==0.6.4 + # via fastapi-cloud-cli rpds-py==0.23.1 # via # jsonschema # referencing +sentry-sdk==2.35.0 + # via fastapi-cloud-cli shellingham==1.5.4 # via typer six==1.17.0 # via python-dateutil sniffio==1.3.1 - # via anyio + # via + # anyio + # asgi-lifespan sqlalchemy==1.4.54 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -516,7 +529,7 @@ sqlalchemy==1.4.54 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/postgres-database/requirements/_base.in # alembic -starlette==0.46.1 +starlette==0.47.2 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -543,27 +556,36 @@ toolz==1.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in tqdm==4.67.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.15.2 +typer==0.16.1 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # fastapi-cli + # fastapi-cloud-cli types-python-dateutil==2.9.0.20241206 # via arrow -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # aiodebug # alembic # anyio # fastapi # faststream + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http # opentelemetry-sdk + # opentelemetry-semantic-conventions # pydantic # pydantic-core # pydantic-extra-types # rich-toolkit + # starlette # typer -urllib3==2.3.0 + # typing-inspection +typing-inspection==0.4.1 + # via pydantic +urllib3==2.5.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -580,10 +602,12 @@ urllib3==2.3.0 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # requests + # sentry-sdk uvicorn==0.34.2 # via # fastapi # fastapi-cli + # fastapi-cloud-cli uvloop==0.21.0 # via uvicorn watchfiles==1.0.4 @@ -592,7 +616,6 @@ websockets==15.0.1 # via uvicorn wrapt==1.17.2 # via - # deprecated # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-httpx diff --git a/services/catalog/requirements/_test.txt b/services/catalog/requirements/_test.txt index 30015c3c870f..3bbe95905bb3 100644 --- a/services/catalog/requirements/_test.txt +++ b/services/catalog/requirements/_test.txt @@ -2,7 +2,7 @@ aiohappyeyeballs==2.6.1 # via # -c requirements/_base.txt # aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -20,7 +20,9 @@ anyio==4.8.0 # -c requirements/_base.txt # httpx asgi-lifespan==2.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in attrs==25.2.0 # via # -c requirements/_base.txt @@ -39,7 +41,7 @@ charset-normalizer==3.4.1 # via # -c requirements/_base.txt # requests -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # -r requirements/_test.in @@ -58,11 +60,11 @@ greenlet==3.1.1 # via # -c requirements/_base.txt # sqlalchemy -h11==0.14.0 +h11==0.16.0 # via # -c requirements/_base.txt # httpcore -httpcore==1.0.7 +httpcore==1.0.9 # via # -c requirements/_base.txt # httpx @@ -88,7 +90,7 @@ jsonschema-specifications==2024.10.1 # via # -c requirements/_base.txt # jsonschema -mako==1.3.9 +mako==1.3.10 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -102,16 +104,20 @@ multidict==6.1.0 # -c requirements/_base.txt # aiohttp # yarl -mypy==1.15.0 +mypy==1.16.1 # via sqlalchemy -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via mypy packaging==24.2 # via # -c requirements/_base.txt # pytest +pathspec==0.12.1 + # via mypy pluggy==1.5.0 - # via pytest + # via + # pytest + # pytest-cov propcache==0.3.0 # via # -c requirements/_base.txt @@ -121,7 +127,11 @@ ptvsd==4.3.2 # via -r requirements/_test.in py-cpuinfo==9.0.0 # via pytest-benchmark -pytest==8.3.5 +pygments==2.19.1 + # via + # -c requirements/_base.txt + # pytest +pytest==8.4.1 # via # -r requirements/_test.in # pytest-aiohttp @@ -132,15 +142,15 @@ pytest==8.3.5 # pytest-mock pytest-aiohttp==1.1.0 # via -r requirements/_test.in -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via pytest-aiohttp pytest-benchmark==5.1.0 # via -r requirements/_test.in -pytest-cov==6.0.0 +pytest-cov==6.2.1 # via -r requirements/_test.in -pytest-docker==3.2.0 +pytest-docker==3.2.3 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in @@ -150,7 +160,7 @@ referencing==0.35.1 # -c requirements/_base.txt # jsonschema # jsonschema-specifications -requests==2.32.3 +requests==2.32.4 # via # -c requirements/_base.txt # docker @@ -178,7 +188,7 @@ types-psycopg2==2.9.21.20250121 # via -r requirements/_test.in types-pyyaml==6.0.12.20241230 # via -r requirements/_test.in -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # alembic @@ -187,7 +197,7 @@ typing-extensions==4.12.2 # sqlalchemy2-stubs tzdata==2025.1 # via faker -urllib3==2.3.0 +urllib3==2.5.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt diff --git a/services/catalog/requirements/_tools.txt b/services/catalog/requirements/_tools.txt index 4ef3f43c67d4..a326ba0a75f0 100644 --- a/services/catalog/requirements/_tools.txt +++ b/services/catalog/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # -c requirements/_test.txt @@ -28,11 +28,11 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via # -c requirements/_test.txt # black @@ -46,7 +46,10 @@ packaging==24.2 # black # build pathspec==0.12.1 - # via black + # via + # -c requirements/_test.txt + # black + # mypy pip==25.0.1 # via pip-tools pip-tools==7.4.1 @@ -71,11 +74,11 @@ pyyaml==6.0.2 # watchdog ruff==0.9.10 # via -r requirements/../../../requirements/devenv.txt -setuptools==76.0.0 +setuptools==80.9.0 # via pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # -c requirements/_test.txt diff --git a/services/catalog/setup.cfg b/services/catalog/setup.cfg index 812aa1c836cd..cce4210c9f9c 100644 --- a/services/catalog/setup.cfg +++ b/services/catalog/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.8.1 +current_version = 0.8.2 commit = True message = services/catalog version: {current_version} → {new_version} tag = False @@ -10,10 +10,10 @@ commit_args = --no-verify [tool:pytest] asyncio_mode = auto asyncio_default_fixture_loop_scope = function -markers = +markers = testit: "marks test to run during development" [mypy] -plugins = +plugins = pydantic.mypy sqlalchemy.ext.mypy.plugin diff --git a/services/catalog/src/simcore_service_catalog/_meta.py b/services/catalog/src/simcore_service_catalog/_meta.py index 770d24a4e285..4e7bd91cf54a 100644 --- a/services/catalog/src/simcore_service_catalog/_meta.py +++ b/services/catalog/src/simcore_service_catalog/_meta.py @@ -12,7 +12,7 @@ VERSION: Final[Version] = info.version API_VERSION: Final[VersionStr] = info.__version__ API_VTAG: Final[str] = info.api_prefix_path_tag -APP_NAME: Final[str] = info.project_name +APP_NAME: Final[str] = info.app_name SUMMARY: Final[str] = info.get_summary() diff --git a/services/catalog/src/simcore_service_catalog/api/rpc/_services.py b/services/catalog/src/simcore_service_catalog/api/rpc/_services.py index 73d5bc3e562f..7b5173a5383d 100644 --- a/services/catalog/src/simcore_service_catalog/api/rpc/_services.py +++ b/services/catalog/src/simcore_service_catalog/api/rpc/_services.py @@ -20,7 +20,6 @@ from models_library.users import UserID from pydantic import TypeAdapter, ValidationError, validate_call from pyinstrument import Profiler -from servicelib.logging_utils import log_decorator from servicelib.rabbitmq import RPCRouter from servicelib.rabbitmq.rpc_interfaces.catalog.errors import ( CatalogForbiddenError, @@ -106,7 +105,6 @@ async def list_services_paginated( ValidationError, ) ) -@log_decorator(_logger, level=logging.DEBUG) @_profile_rpc_call @validate_call(config={"arbitrary_types_allowed": True}) async def get_service( @@ -141,7 +139,6 @@ async def get_service( ValidationError, ) ) -@log_decorator(_logger, level=logging.DEBUG) @validate_call(config={"arbitrary_types_allowed": True}) async def update_service( app: FastAPI, @@ -179,7 +176,6 @@ async def update_service( ValidationError, ) ) -@log_decorator(_logger, level=logging.DEBUG) @validate_call(config={"arbitrary_types_allowed": True}) async def check_for_service( app: FastAPI, @@ -203,7 +199,6 @@ async def check_for_service( @router.expose(reraise_if_error_type=(CatalogForbiddenError, ValidationError)) -@log_decorator(_logger, level=logging.DEBUG) @validate_call(config={"arbitrary_types_allowed": True}) async def batch_get_my_services( app: FastAPI, @@ -233,7 +228,6 @@ async def batch_get_my_services( @router.expose(reraise_if_error_type=(ValidationError,)) -@log_decorator(_logger, level=logging.DEBUG) @validate_call(config={"arbitrary_types_allowed": True}) async def list_my_service_history_latest_first( app: FastAPI, @@ -281,7 +275,6 @@ async def list_my_service_history_latest_first( ValidationError, ) ) -@log_decorator(_logger, level=logging.DEBUG) @validate_call(config={"arbitrary_types_allowed": True}) async def get_service_ports( app: FastAPI, diff --git a/services/catalog/src/simcore_service_catalog/cli.py b/services/catalog/src/simcore_service_catalog/cli.py index 0d4fbf5107bc..9fd453f1f553 100644 --- a/services/catalog/src/simcore_service_catalog/cli.py +++ b/services/catalog/src/simcore_service_catalog/cli.py @@ -30,7 +30,7 @@ def run(): """Runs application""" typer.secho("Sorry, this entrypoint is intentionally disabled. Use instead") typer.secho( - "$ uvicorn simcore_service_catalog.main:the_app", + "$ uvicorn --factory simcore_service_catalog.main:app_factory", fg=typer.colors.BLUE, ) diff --git a/services/catalog/src/simcore_service_catalog/clients/director.py b/services/catalog/src/simcore_service_catalog/clients/director.py index 4d9fe37a3dfe..367f9cb0e2e4 100644 --- a/services/catalog/src/simcore_service_catalog/clients/director.py +++ b/services/catalog/src/simcore_service_catalog/clients/director.py @@ -82,7 +82,9 @@ def _unenvelope_or_raise_error( """ body = resp.json() - assert "data" in body or "error" in body # nosec + assert ( + "data" in body or "error" in body + ), f"here is the failing {body=}, {resp.request=}" # nosec data = body.get("data") error = body.get("error") @@ -250,7 +252,6 @@ async def get_service_extras( # discrete resources (custom made ones) --- # check if the service requires GPU support if not invalid_with_msg and _validate_kind(entry, "VRAM"): - result["node_requirements"]["GPU"] = 1 if not invalid_with_msg and _validate_kind(entry, "MPI"): result["node_requirements"]["MPI"] = 1 @@ -277,15 +278,12 @@ async def get_service_extras( ) # get org labels - result.update( - { - sl: labels[dl] - for dl, sl in _ORG_LABELS_TO_SCHEMA_LABELS.items() - if dl in labels - } - ) - - _logger.debug("Following service extras were compiled: %s", pformat(result)) + if service_build_details := { + sl: labels[dl] + for dl, sl in _ORG_LABELS_TO_SCHEMA_LABELS.items() + if dl in labels + }: + result.update({"service_build_details": service_build_details}) return TypeAdapter(ServiceExtras).validate_python(result) diff --git a/services/catalog/src/simcore_service_catalog/core/application.py b/services/catalog/src/simcore_service_catalog/core/application.py index 3f726883066d..1d5797418624 100644 --- a/services/catalog/src/simcore_service_catalog/core/application.py +++ b/services/catalog/src/simcore_service_catalog/core/application.py @@ -1,9 +1,11 @@ import logging +from common_library.json_serialization import json_dumps from fastapi import FastAPI from fastapi.middleware.gzip import GZipMiddleware from models_library.basic_types import BootModeEnum from servicelib.fastapi import timing_middleware +from servicelib.fastapi.lifespan_utils import Lifespan from servicelib.fastapi.monitoring import ( setup_prometheus_instrumentation, ) @@ -27,27 +29,18 @@ _logger = logging.getLogger(__name__) -_LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR -_NOISY_LOGGERS = ( - "aio_pika", - "aiobotocore", - "aiormq", - "botocore", - "httpcore", - "werkzeug", -) - -def create_app() -> FastAPI: - # keep mostly quiet noisy loggers - quiet_level: int = max( - min(logging.root.level + _LOG_LEVEL_STEP, logging.CRITICAL), logging.WARNING - ) - for name in _NOISY_LOGGERS: - logging.getLogger(name).setLevel(quiet_level) - - settings = ApplicationSettings.create_from_envs() - _logger.debug(settings.model_dump_json(indent=2)) +def create_app( + *, + settings: ApplicationSettings | None = None, + logging_lifespan: Lifespan | None = None, +) -> FastAPI: + if not settings: + settings = ApplicationSettings.create_from_envs() + _logger.info( + "Application settings: %s", + json_dumps(settings, indent=2, sort_keys=True), + ) app = FastAPI( debug=settings.SC_BOOT_MODE @@ -58,7 +51,7 @@ def create_app() -> FastAPI: openapi_url=f"/api/{API_VTAG}/openapi.json", docs_url="/dev/doc", redoc_url=None, # default disabled - lifespan=events.create_app_lifespan(), + lifespan=events.create_app_lifespan(logging_lifespan=logging_lifespan), ) override_fastapi_openapi_method(app) diff --git a/services/catalog/src/simcore_service_catalog/core/background_tasks.py b/services/catalog/src/simcore_service_catalog/core/background_tasks.py index ed5cd27240e9..56a4e338b842 100644 --- a/services/catalog/src/simcore_service_catalog/core/background_tasks.py +++ b/services/catalog/src/simcore_service_catalog/core/background_tasks.py @@ -203,7 +203,7 @@ async def _sync_services_task(app: FastAPI) -> None: await asyncio.sleep(app.state.settings.CATALOG_BACKGROUND_TASK_REST_TIME) - except asyncio.CancelledError: # noqa: PERF203 + except asyncio.CancelledError: # task is stopped _logger.info("registry syncing task cancelled") raise diff --git a/services/catalog/src/simcore_service_catalog/core/events.py b/services/catalog/src/simcore_service_catalog/core/events.py index 8695b10f15ef..673bdf2f80f6 100644 --- a/services/catalog/src/simcore_service_catalog/core/events.py +++ b/services/catalog/src/simcore_service_catalog/core/events.py @@ -3,6 +3,7 @@ from fastapi import FastAPI from fastapi_lifespan_manager import LifespanManager, State +from servicelib.fastapi.lifespan_utils import Lifespan from servicelib.fastapi.monitoring import ( create_prometheus_instrumentationmain_input_state, prometheus_instrumentation_lifespan, @@ -50,9 +51,11 @@ async def _settings_lifespan(app: FastAPI) -> AsyncIterator[State]: } -def create_app_lifespan() -> LifespanManager: +def create_app_lifespan(logging_lifespan: Lifespan | None = None) -> LifespanManager: # WARNING: order matters app_lifespan = LifespanManager() + if logging_lifespan: + app_lifespan.add(logging_lifespan) app_lifespan.add(_settings_lifespan) # - postgres diff --git a/services/catalog/src/simcore_service_catalog/core/settings.py b/services/catalog/src/simcore_service_catalog/core/settings.py index 5581bf4ba995..4237ce0c812c 100644 --- a/services/catalog/src/simcore_service_catalog/core/settings.py +++ b/services/catalog/src/simcore_service_catalog/core/settings.py @@ -3,6 +3,7 @@ from typing import Annotated, Final from common_library.basic_types import DEFAULT_FACTORY +from common_library.logging.logging_utils_filtering import LoggerName, MessageSubstring from models_library.api_schemas_catalog.services_specifications import ( ServiceSpecifications, ) @@ -16,7 +17,6 @@ PositiveInt, TypeAdapter, ) -from servicelib.logging_utils_filtering import LoggerName, MessageSubstring from settings_library.application import BaseApplicationSettings from settings_library.base import BaseCustomSettings from settings_library.http_client_request import ClientRequestSettings diff --git a/services/catalog/src/simcore_service_catalog/main.py b/services/catalog/src/simcore_service_catalog/main.py index 52bd949a5429..60f5da962d47 100644 --- a/services/catalog/src/simcore_service_catalog/main.py +++ b/services/catalog/src/simcore_service_catalog/main.py @@ -1,24 +1,40 @@ -"""Main application to be deployed in for example uvicorn. -""" +"""Main application to be deployed in for example uvicorn.""" import logging +from typing import Final +from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.logging_utils import config_all_loggers +from servicelib.fastapi.logging_lifespan import create_logging_lifespan from simcore_service_catalog.core.application import create_app from simcore_service_catalog.core.settings import ApplicationSettings -_the_settings = ApplicationSettings.create_from_envs() +_logger = logging.getLogger(__name__) -# SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 -logging.basicConfig(level=_the_settings.log_level) # NOSONAR -logging.root.setLevel(_the_settings.log_level) -config_all_loggers( - log_format_local_dev_enabled=_the_settings.CATALOG_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=_the_settings.CATALOG_LOG_FILTER_MAPPING, - tracing_settings=_the_settings.CATALOG_TRACING, + +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aio_pika", + "aiobotocore", + "aiormq", + "botocore", + "httpcore", + "werkzeug", ) -# SINGLETON FastAPI app -the_app: FastAPI = create_app() +def app_factory() -> FastAPI: + app_settings = ApplicationSettings.create_from_envs() + logging_lifespan = create_logging_lifespan( + log_format_local_dev_enabled=app_settings.CATALOG_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.CATALOG_LOG_FILTER_MAPPING, + tracing_settings=app_settings.CATALOG_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) + + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + + return create_app(settings=app_settings, logging_lifespan=logging_lifespan) diff --git a/services/catalog/src/simcore_service_catalog/service/access_rights.py b/services/catalog/src/simcore_service_catalog/service/access_rights.py index 435473197982..cac071cdb061 100644 --- a/services/catalog/src/simcore_service_catalog/service/access_rights.py +++ b/services/catalog/src/simcore_service_catalog/service/access_rights.py @@ -24,7 +24,9 @@ _logger = logging.getLogger(__name__) -_LEGACY_SERVICES_DATE: datetime = datetime(year=2020, month=8, day=19, tzinfo=UTC) +_OLD_SERVICES_CUTOFF_DATETIME: datetime = datetime( + year=2020, month=8, day=19, tzinfo=UTC +) class InheritedData(TypedDict): @@ -37,15 +39,42 @@ def _is_frontend_service(service: ServiceMetaDataPublished) -> bool: async def _is_old_service(app: FastAPI, service: ServiceMetaDataPublished) -> bool: + # # NOTE: https://github.com/ITISFoundation/osparc-simcore/pull/6003#discussion_r1658200909 - # get service build date - client = get_director_client(app) + # + service_extras = await get_director_client(app).get_service_extras( + service.key, service.version + ) + + # 1. w/o build details + has_no_build_data = ( + not service_extras or service_extras.service_build_details is None + ) + if has_no_build_data: + _logger.debug( + "Service %s:%s is considered legacy because it has no build details", + service.key, + service.version, + ) + return True + + # 2. check if built before cutoff date + assert service_extras.service_build_details + service_build_datetime = arrow.get( + service_extras.service_build_details.build_date + ).datetime - data = await client.get_service_extras(service.key, service.version) - if not data or data.service_build_details is None: + is_older_than_cutoff = service_build_datetime < _OLD_SERVICES_CUTOFF_DATETIME + if is_older_than_cutoff: + _logger.debug( + "Service %s:%s is considered legacy because it was built before %s", + service.key, + service.version, + _OLD_SERVICES_CUTOFF_DATETIME, + ) return True - service_build_data = arrow.get(data.service_build_details.build_date).datetime - return bool(service_build_data < _LEGACY_SERVICES_DATE) + + return False async def evaluate_default_service_ownership_and_rights( diff --git a/services/catalog/src/simcore_service_catalog/service/catalog_services.py b/services/catalog/src/simcore_service_catalog/service/catalog_services.py index d1377bb4db6c..f97b6da792d6 100644 --- a/services/catalog/src/simcore_service_catalog/service/catalog_services.py +++ b/services/catalog/src/simcore_service_catalog/service/catalog_services.py @@ -4,6 +4,7 @@ from contextlib import suppress from typing import Literal, TypeVar +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from models_library.api_schemas_catalog.services import ( LatestServiceGet, MyServiceGet, @@ -22,9 +23,6 @@ from models_library.services_types import ServiceKey, ServiceVersion from models_library.users import UserID from pydantic import HttpUrl -from servicelib.logging_errors import ( - create_troubleshotting_log_kwargs, -) from servicelib.rabbitmq.rpc_interfaces.catalog.errors import ( CatalogForbiddenError, CatalogInconsistentError, @@ -108,7 +106,6 @@ def _to_latest_get_schema( access_rights_db: list[ServiceAccessRightsDB], service_manifest: ServiceMetaDataPublished, ) -> LatestServiceGet: - assert len(service_db.history) == 0 # nosec return LatestServiceGet.model_validate( @@ -239,7 +236,7 @@ async def _get_services_manifests( if missing_services: msg = f"Found {len(missing_services)} services that are in the database but missing in the registry manifest" _logger.warning( - **create_troubleshotting_log_kwargs( + **create_troubleshooting_log_kwargs( msg, error=CatalogInconsistentError( missing_services=missing_services, @@ -397,7 +394,6 @@ async def get_catalog_service( service_key: ServiceKey, service_version: ServiceVersion, ) -> ServiceGetV2: - access_rights = await check_catalog_service_permissions( repo=repo, product_name=product_name, @@ -449,7 +445,6 @@ async def update_catalog_service( service_version: ServiceVersion, update: ServiceUpdateV2, ) -> ServiceGetV2: - if is_function_service(service_key): raise CatalogForbiddenError( name=f"function service {service_key}:{service_version}", @@ -482,7 +477,6 @@ async def update_catalog_service( # Updates service_access_rights (they can be added/removed/modified) if update.access_rights: - # before previous_gids = [r.gid for r in access_rights] @@ -602,7 +596,6 @@ async def batch_get_user_services( ] ], ) -> list[MyServiceGet]: - services_access_rights = await repo.batch_get_services_access_rights( key_versions=ids, product_name=product_name ) @@ -612,7 +605,6 @@ async def batch_get_user_services( my_services = [] for service_key, service_version in ids: - # Evaluate user's access-rights to this service key:version access_rights = services_access_rights.get((service_key, service_version), []) my_access_rights = ServiceGroupAccessRightsV2(execute=False, write=False) @@ -695,7 +687,6 @@ async def list_user_service_release_history( # result options include_compatibility: bool = False, ) -> tuple[PageTotalCount, list[ServiceRelease]]: - total_count, history = await repo.get_service_history_page( # NOTE: that the service history might be different for each user # since access rights are defined on a version basis (i.e. one use can have access to v1 but ot to v2) diff --git a/services/catalog/tests/unit/conftest.py b/services/catalog/tests/unit/conftest.py index 296cc47bd192..0088fa436a0b 100644 --- a/services/catalog/tests/unit/conftest.py +++ b/services/catalog/tests/unit/conftest.py @@ -15,18 +15,19 @@ import pytest import respx import simcore_service_catalog -import simcore_service_catalog.core.application import simcore_service_catalog.core.events -import simcore_service_catalog.repository -import simcore_service_catalog.repository.events import yaml from asgi_lifespan import LifespanManager from faker import Faker from fastapi import FastAPI, status from fastapi.testclient import TestClient -from models_library.api_schemas_directorv2.services import ServiceExtras +from models_library.api_schemas_directorv2.services import ( + NodeRequirements, + ServiceBuildDetails, + ServiceExtras, +) from packaging.version import Version -from pydantic import EmailStr, TypeAdapter +from pydantic import EmailStr from pytest_mock import MockerFixture, MockType from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict @@ -35,6 +36,7 @@ from simcore_service_catalog.core.settings import ApplicationSettings pytest_plugins = [ + "pytest_simcore.asyncio_event_loops", "pytest_simcore.cli_runner", "pytest_simcore.docker_compose", "pytest_simcore.docker_registry", @@ -42,6 +44,7 @@ "pytest_simcore.environment_configs", "pytest_simcore.faker_products_data", "pytest_simcore.faker_users_data", + "pytest_simcore.logging", "pytest_simcore.postgres_service", "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", @@ -177,7 +180,6 @@ def client( assert spy_app.on_shutdown.call_count == 0 with TestClient(app_under_test) as cli: - assert spy_app.on_startup.call_count == 1 assert spy_app.on_shutdown.call_count == 0 @@ -390,7 +392,6 @@ def mocked_director_rest_api_base( assert_all_called=False, assert_all_mocked=True, ) as respx_mock: - # HEATHCHECK assert openapi["paths"].get("/") respx_mock.head("/", name="healthcheck").respond( @@ -410,8 +411,10 @@ def mocked_director_rest_api_base( @pytest.fixture def get_mocked_service_labels() -> Callable[[str, str], dict]: - def _(service_key: str, service_version: str) -> dict: - return { + def _( + service_key: str, service_version: str, *, include_org_labels: bool = True + ) -> dict: + base_labels = { "io.simcore.authors": '{"authors": [{"name": "John Smith", "email": "john@acme.com", "affiliation": "ACME\'IS Foundation"}]}', "io.simcore.contact": '{"contact": "john@acme.com"}', "io.simcore.description": '{"description": "Autonomous Nervous System Network model"}', @@ -426,21 +429,35 @@ def _(service_key: str, service_version: str) -> dict: "xxxxx", service_version ), "maintainer": "johnsmith", - "org.label-schema.build-date": "2023-04-17T08:04:15Z", - "org.label-schema.schema-version": "1.0", - "org.label-schema.vcs-ref": "4d79449a2e79f8a3b3b2e1dd0290af9f3d1a8792", - "org.label-schema.vcs-url": "https://github.com/ITISFoundation/jupyter-math.git", "simcore.service.restart-policy": "no-restart", "simcore.service.settings": '[{"name": "Resources", "type": "Resources", "value": {"Limits": {"NanoCPUs": 1000000000, "MemoryBytes": 4194304}, "Reservations": {"NanoCPUs": 4000000000, "MemoryBytes": 2147483648}}}]', } + if include_org_labels: + base_labels.update( + { + "org.label-schema.build-date": "2023-04-17T08:04:15Z", + "org.label-schema.schema-version": "1.0", + "org.label-schema.vcs-ref": "4d79449a2e79f8a3b3b2e1dd0290af9f3d1a8792", + "org.label-schema.vcs-url": "https://github.com/ITISFoundation/jupyter-math.git", + } + ) + + return base_labels + return _ @pytest.fixture def mock_service_extras() -> ServiceExtras: - return TypeAdapter(ServiceExtras).validate_python( - ServiceExtras.model_json_schema()["examples"][0] + return ServiceExtras( + node_requirements=NodeRequirements(CPU=1.0, GPU=None, RAM=4194304, VRAM=None), + service_build_details=ServiceBuildDetails( + build_date="2023-04-17T08:04:15Z", + vcs_ref="4d79449a2e79f8a3b3b2e1dd0290af9f3d1a8792", + vcs_url="https://github.com/ITISFoundation/jupyter-math.git", + ), + container_spec=None, ) diff --git a/services/catalog/tests/unit/test_clients_director.py b/services/catalog/tests/unit/test_clients_director.py index 3c91d9bc52af..d0f8598aa400 100644 --- a/services/catalog/tests/unit/test_clients_director.py +++ b/services/catalog/tests/unit/test_clients_director.py @@ -7,10 +7,12 @@ import urllib.parse +from collections.abc import Callable from typing import Any +import httpx import pytest -from fastapi import FastAPI +from fastapi import FastAPI, status from models_library.services_metadata_published import ServiceMetaDataPublished from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict @@ -32,6 +34,14 @@ def app_environment( ) +@pytest.fixture +def service_key_and_version( + expected_director_rest_api_list_services: list[dict[str, Any]], +) -> tuple[str, str]: + expected_service = expected_director_rest_api_list_services[0] + return expected_service["key"], expected_service["version"] + + async def test_director_client_high_level_api( repository_lifespan_disabled: None, background_task_lifespan_disabled: None, @@ -57,7 +67,6 @@ async def test_director_client_high_level_api( await director_api.get_service(expected_service.key, expected_service.version) == expected_service ) - # TODO: error handling! async def test_director_client_low_level_api( @@ -65,14 +74,12 @@ async def test_director_client_low_level_api( background_task_lifespan_disabled: None, rabbitmq_and_rpc_setup_disabled: None, mocked_director_rest_api: MockRouter, - expected_director_rest_api_list_services: list[dict[str, Any]], + service_key_and_version: tuple[str, str], app: FastAPI, ): director_api = get_director_client(app) - expected_service = expected_director_rest_api_list_services[0] - key = expected_service["key"] - version = expected_service["version"] + key, version = service_key_and_version service_labels = await director_api.get( f"/services/{urllib.parse.quote_plus(key)}/{version}/labels" @@ -84,3 +91,74 @@ async def test_director_client_low_level_api( f"/services/{urllib.parse.quote_plus(key)}/{version}" ) assert service + + +async def test_director_client_get_service_extras_with_org_labels( + repository_lifespan_disabled: None, + background_task_lifespan_disabled: None, + rabbitmq_and_rpc_setup_disabled: None, + mocked_director_rest_api: MockRouter, + service_key_and_version: tuple[str, str], + app: FastAPI, +): + director_api = get_director_client(app) + + key, version = service_key_and_version + + service_extras = await director_api.get_service_extras(key, version) + + # Check node requirements are present + assert service_extras.node_requirements is not None + assert service_extras.node_requirements.cpu > 0 + assert service_extras.node_requirements.ram > 0 + + # Check service build details are present (since we have org.label-schema labels) + assert service_extras.service_build_details is not None + assert service_extras.service_build_details.build_date == "2023-04-17T08:04:15Z" + assert ( + service_extras.service_build_details.vcs_ref + == "4d79449a2e79f8a3b3b2e1dd0290af9f3d1a8792" + ) + assert ( + service_extras.service_build_details.vcs_url + == "https://github.com/ITISFoundation/jupyter-math.git" + ) + + +async def test_director_client_get_service_extras_without_org_labels( + repository_lifespan_disabled: None, + background_task_lifespan_disabled: None, + rabbitmq_and_rpc_setup_disabled: None, + mocked_director_rest_api_base: MockRouter, + service_key_and_version: tuple[str, str], + get_mocked_service_labels: Callable[[str, str, bool], dict], + app: FastAPI, +): + # Setup mock without org.label-schema labels + service_key, service_version = service_key_and_version + + # Mock the labels endpoint without org labels + @mocked_director_rest_api_base.get( + path__regex=r"^/services/(?P[/\w-]+)/(?P[0-9\.]+)/labels$", + name="get_service_labels_no_org", + ) + def _get_service_labels_no_org(request, service_key, service_version): + return httpx.Response( + status_code=status.HTTP_200_OK, + json={ + "data": get_mocked_service_labels( + service_key, service_version, include_org_labels=False + ) + }, + ) + + director_api = get_director_client(app) + service_extras = await director_api.get_service_extras(service_key, service_version) + + # Check node requirements are present + assert service_extras.node_requirements is not None + assert service_extras.node_requirements.cpu > 0 + assert service_extras.node_requirements.ram > 0 + + # Check service build details are NOT present (since we don't have org.label-schema labels) + assert service_extras.service_build_details is None diff --git a/services/catalog/tests/unit/test_core_settings.py b/services/catalog/tests/unit/test_core_settings.py index 9f94c6c35882..15476b4a37c3 100644 --- a/services/catalog/tests/unit/test_core_settings.py +++ b/services/catalog/tests/unit/test_core_settings.py @@ -3,19 +3,13 @@ # pylint: disable=unused-variable # pylint: disable=too-many-arguments - from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_service_catalog.core.settings import ApplicationSettings -def test_valid_web_application_settings(app_environment: EnvVarsDict): - """ - We can validate actual .env files (also refered as `repo.config` files) by passing them via the CLI - - $ ln -s /path/to/osparc-config/deployments/mydeploy.com/repo.config .secrets - $ pytest --external-envfile=.secrets --pdb tests/unit/test_core_settings.py +def test_valid_application_settings(app_environment: EnvVarsDict): + assert app_environment - """ settings = ApplicationSettings() # type: ignore assert settings diff --git a/services/catalog/tests/unit/test_utils_service_extras.py b/services/catalog/tests/unit/test_utils_service_extras.py index 15db550e774c..d49d40a13956 100644 --- a/services/catalog/tests/unit/test_utils_service_extras.py +++ b/services/catalog/tests/unit/test_utils_service_extras.py @@ -6,7 +6,9 @@ import pytest from fastapi import FastAPI, status from httpx import AsyncClient -from models_library.api_schemas_directorv2.services import ServiceExtras +from models_library.api_schemas_directorv2.services import ( + ServiceExtras, +) from pydantic import TypeAdapter from respx import MockRouter diff --git a/services/catalog/tests/unit/with_dbs/conftest.py b/services/catalog/tests/unit/with_dbs/conftest.py index 15bc8ac5b74b..a37aaf479302 100644 --- a/services/catalog/tests/unit/with_dbs/conftest.py +++ b/services/catalog/tests/unit/with_dbs/conftest.py @@ -22,13 +22,15 @@ from pytest_simcore.helpers.faker_factories import ( random_service_access_rights, random_service_meta_data, - random_user, ) from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.postgres_tools import ( PostgresTestConfig, insert_and_get_row_lifespan, ) +from pytest_simcore.helpers.postgres_users import ( + insert_and_get_user_and_secrets_lifespan, +) from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_postgres_database.models.groups import groups from simcore_postgres_database.models.products import products @@ -36,7 +38,6 @@ services_access_rights, services_meta_data, ) -from simcore_postgres_database.models.users import users from simcore_service_catalog.core.settings import ApplicationSettings from sqlalchemy import sql from sqlalchemy.dialects.postgresql import insert as pg_insert @@ -151,12 +152,9 @@ async def user( injects a user in db """ assert user_id == user["id"] - async with insert_and_get_row_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup + async with insert_and_get_user_and_secrets_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup sqlalchemy_async_engine, - table=users, - values=user, - pk_col=users.c.id, - pk_value=user["id"], + **user, ) as row: yield row @@ -165,16 +163,14 @@ async def user( async def other_user( user_id: UserID, sqlalchemy_async_engine: AsyncEngine, - faker: Faker, ) -> AsyncIterator[dict[str, Any]]: - - _other_user = random_user(fake=faker, id=user_id + 1) - async with insert_and_get_row_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup + """ + injects a other user in db (!= user) + """ + async with insert_and_get_user_and_secrets_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup sqlalchemy_async_engine, - table=users, - values=_other_user, - pk_col=users.c.id, - pk_value=_other_user["id"], + name="other_user", + id=user_id + 1, ) as row: yield row diff --git a/services/clusters-keeper/.env-devel b/services/clusters-keeper/.env-devel index 1c103bc8dd29..cd6039eec611 100644 --- a/services/clusters-keeper/.env-devel +++ b/services/clusters-keeper/.env-devel @@ -6,7 +6,7 @@ EC2_CLUSTERS_KEEPER_ACCESS_KEY_ID=XXXXXXXXXX PRIMARY_EC2_INSTANCES_ALLOWED_TYPES='{"t2.medium":"ami_id": "XXXXXXXXXX", "custom_boot_scripts": ["whoami"]}}' PRIMARY_EC2_INSTANCES_KEY_NAME=XXXXXXXXXX PRIMARY_EC2_INSTANCES_SECURITY_GROUP_IDS=XXXXXXXXXX -PRIMARY_EC2_INSTANCES_SUBNET_ID=XXXXXXXXXX +PRIMARY_EC2_INSTANCES_SUBNET_IDS='["XXXXXXXXXX"]' EC2_CLUSTERS_KEEPER_SECRET_ACCESS_KEY=XXXXXXXXXX CLUSTERS_KEEPER_EC2_INSTANCES_PREFIX="testing" LOG_FORMAT_LOCAL_DEV_ENABLED=True diff --git a/services/clusters-keeper/Dockerfile b/services/clusters-keeper/Dockerfile index 80af41307c2d..a775c6d441f7 100644 --- a/services/clusters-keeper/Dockerfile +++ b/services/clusters-keeper/Dockerfile @@ -2,7 +2,7 @@ # Define arguments in the global scope ARG PYTHON_VERSION="3.11.9" -ARG UV_VERSION="0.6" +ARG UV_VERSION="0.7" FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build FROM python:${PYTHON_VERSION}-slim-bookworm AS base-arm64 @@ -34,6 +34,7 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=private \ set -eux; \ apt-get update; \ apt-get install -y --no-install-recommends \ + fd-find \ gosu \ ca-certificates \ curl \ @@ -108,10 +109,7 @@ RUN uv venv "${VIRTUAL_ENV}" -RUN --mount=type=cache,target=/root/.cache/uv \ - uv pip install --upgrade \ - wheel \ - setuptools + WORKDIR /build @@ -128,6 +126,9 @@ WORKDIR /build FROM build AS prod-only-deps ENV SC_BUILD_TARGET=prod-only-deps +# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode +ENV UV_COMPILE_BYTECODE=1 \ + UV_LINK_MODE=copy WORKDIR /build/services/clusters-keeper @@ -153,8 +154,6 @@ ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production ENV PYTHONOPTIMIZE=TRUE -# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode -ENV UV_COMPILE_BYTECODE=1 WORKDIR /home/scu # ensure home folder is read/writable for user scu diff --git a/services/clusters-keeper/docker/boot.sh b/services/clusters-keeper/docker/boot.sh index 384133c7a874..c465ac2316f6 100755 --- a/services/clusters-keeper/docker/boot.sh +++ b/services/clusters-keeper/docker/boot.sh @@ -24,7 +24,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/clusters-keeper - uv pip --quiet sync requirements/dev.txt + uv pip --quiet sync --link-mode=copy requirements/dev.txt cd - echo "$INFO" "PIP :" uv pip list @@ -33,7 +33,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy if command -v uv >/dev/null 2>&1; then - uv pip install debugpy + uv pip install --link-mode=copy debugpy else pip install debugpy fi @@ -48,19 +48,22 @@ SERVER_LOG_LEVEL=$(echo "${APP_LOG_LEVEL}" | tr '[:upper:]' '[:lower:]') echo "$INFO" "Log-level app/server: $APP_LOG_LEVEL/$SERVER_LOG_LEVEL" if [ "${SC_BOOT_MODE}" = "debug" ]; then - reload_dir_packages=$(find /devel/packages -maxdepth 3 -type d -path "*/src/*" ! -path "*.*" -exec echo '--reload-dir {} \' \;) + reload_dir_packages=$(fdfind src /devel/packages --exec echo '--reload-dir {} ' | tr '\n' ' ') exec sh -c " cd services/clusters-keeper/src/simcore_service_clusters_keeper && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${CLUSTERS_KEEPER_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${CLUSTERS_KEEPER_REMOTE_DEBUGGING_PORT} -m \ + uvicorn \ + --factory main:app_factory \ --host 0.0.0.0 \ --reload \ - $reload_dir_packages + $reload_dir_packages \ --reload-dir . \ --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_clusters_keeper.main:the_app \ + exec uvicorn \ + --factory simcore_service_clusters_keeper.main:app_factory \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/clusters-keeper/docker/entrypoint.sh b/services/clusters-keeper/docker/entrypoint.sh index 66fbfba9200d..651a1ea875f0 100755 --- a/services/clusters-keeper/docker/entrypoint.sh +++ b/services/clusters-keeper/docker/entrypoint.sh @@ -26,6 +26,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" # # DEVELOPMENT MODE @@ -63,14 +64,12 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME" echo "$INFO" "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \; - # change user property of files already around + fdfind --owner ":$SC_USER_ID" --exclude proc --exec-batch chgrp --no-dereference "$CONT_GROUPNAME" . '/' echo "$INFO" "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \; + fdfind --owner "$SC_USER_ID:" --exclude proc --exec-batch chown --no-dereference "$SC_USER_NAME" . '/' fi fi - # Appends docker group if socket is mounted DOCKER_MOUNT=/var/run/docker.sock if stat $DOCKER_MOUNT >/dev/null 2>&1; then diff --git a/services/clusters-keeper/requirements/_base.txt b/services/clusters-keeper/requirements/_base.txt index 08f397566130..3fa4f2f82d9c 100644 --- a/services/clusters-keeper/requirements/_base.txt +++ b/services/clusters-keeper/requirements/_base.txt @@ -26,7 +26,7 @@ aiofiles==24.1.0 # aioboto3 aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -78,6 +78,8 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi attrs==25.3.0 @@ -123,9 +125,10 @@ certifi==2025.4.26 # httpcore # httpx # requests + # sentry-sdk charset-normalizer==3.4.2 # via requests -click==8.1.8 +click==8.2.1 # via # -c requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # dask @@ -143,12 +146,6 @@ dask==2025.5.0 # -c requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # -r requirements/_base.in # distributed -deprecated==1.2.18 - # via - # opentelemetry-api - # opentelemetry-exporter-otlp-proto-grpc - # opentelemetry-exporter-otlp-proto-http - # opentelemetry-semantic-conventions distributed==2025.5.0 # via # -c requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt @@ -163,12 +160,14 @@ exceptiongroup==1.3.0 # via aio-pika fast-depends==2.4.12 # via faststream -fastapi==0.115.12 +fastapi==0.116.1 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi-lifespan-manager -fastapi-cli==0.0.7 +fastapi-cli==0.0.8 # via fastapi +fastapi-cloud-cli==0.1.5 + # via fastapi-cli fastapi-lifespan-manager==0.1.4 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in faststream==0.5.41 @@ -229,6 +228,7 @@ httpx==0.28.1 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi + # fastapi-cloud-cli hyperframe==6.1.0 # via h2 idna==3.10 @@ -277,6 +277,12 @@ jmespath==1.0.1 # aiobotocore # boto3 # botocore +jsonref==1.1.0 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema==4.23.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in @@ -307,7 +313,7 @@ multidict==6.4.4 # aiobotocore # aiohttp # yarl -opentelemetry-api==1.33.1 +opentelemetry-api==1.34.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in @@ -316,6 +322,7 @@ opentelemetry-api==1.33.1 # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-botocore # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx @@ -325,76 +332,82 @@ opentelemetry-api==1.33.1 # opentelemetry-propagator-aws-xray # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.33.1 +opentelemetry-exporter-otlp==1.34.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.33.1 +opentelemetry-exporter-otlp-proto-common==1.34.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.33.1 +opentelemetry-exporter-otlp-proto-grpc==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.33.1 +opentelemetry-exporter-otlp-proto-http==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.54b1 +opentelemetry-instrumentation==0.55b1 # via # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-botocore # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aio-pika==0.54b1 +opentelemetry-instrumentation-aio-pika==0.55b1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-asgi==0.54b1 +opentelemetry-instrumentation-asgi==0.55b1 # via opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-botocore==0.54b1 +opentelemetry-instrumentation-asyncpg==0.55b1 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-botocore==0.55b1 # via -r requirements/../../../packages/aws-library/requirements/_base.in -opentelemetry-instrumentation-fastapi==0.54b1 +opentelemetry-instrumentation-fastapi==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-httpx==0.54b1 +opentelemetry-instrumentation-httpx==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-logging==0.54b1 +opentelemetry-instrumentation-logging==0.55b1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.54b1 +opentelemetry-instrumentation-redis==0.55b1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.54b1 +opentelemetry-instrumentation-requests==0.55b1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in opentelemetry-propagator-aws-xray==1.0.2 # via opentelemetry-instrumentation-botocore -opentelemetry-proto==1.33.1 +opentelemetry-proto==1.34.1 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.33.1 +opentelemetry-sdk==1.34.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.54b1 +opentelemetry-semantic-conventions==0.55b1 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-botocore # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.54b1 +opentelemetry-util-http==0.55b1 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi @@ -461,7 +474,7 @@ propcache==0.3.1 # via # aiohttp # yarl -protobuf==5.29.4 +protobuf==5.29.5 # via # googleapis-common-protos # opentelemetry-proto @@ -473,7 +486,7 @@ psutil==7.0.0 # distributed pycryptodome==3.23.0 # via stream-zip -pydantic==2.11.4 +pydantic==2.11.7 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -524,11 +537,12 @@ pydantic==2.11.4 # -r requirements/../../../packages/settings-library/requirements/_base.in # fast-depends # fastapi + # fastapi-cloud-cli # pydantic-extra-types # pydantic-settings pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.10.4 +pydantic-extra-types==2.10.5 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -685,9 +699,9 @@ referencing==0.35.1 # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications -requests==2.32.3 +requests==2.32.4 # via opentelemetry-exporter-otlp-proto-http -rich==14.0.0 +rich==14.1.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -695,14 +709,20 @@ rich==14.0.0 # -r requirements/../../../packages/settings-library/requirements/_base.in # rich-toolkit # typer -rich-toolkit==0.14.7 - # via fastapi-cli +rich-toolkit==0.15.0 + # via + # fastapi-cli + # fastapi-cloud-cli +rignore==0.6.4 + # via fastapi-cloud-cli rpds-py==0.25.0 # via # jsonschema # referencing s3transfer==0.11.3 # via boto3 +sentry-sdk==2.35.0 + # via fastapi-cloud-cli sh==2.2.2 # via -r requirements/../../../packages/aws-library/requirements/_base.in shellingham==1.5.4 @@ -710,12 +730,14 @@ shellingham==1.5.4 six==1.17.0 # via python-dateutil sniffio==1.3.1 - # via anyio + # via + # anyio + # asgi-lifespan sortedcontainers==2.4.0 # via # -c requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # distributed -starlette==0.46.2 +starlette==0.47.2 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -770,13 +792,14 @@ tqdm==4.67.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.15.4 +typer==0.16.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # fastapi-cli + # fastapi-cloud-cli types-aiobotocore==2.22.0 # via -r requirements/../../../packages/aws-library/requirements/_base.in types-aiobotocore-ec2==2.22.0 @@ -789,18 +812,23 @@ types-awscrt==0.27.2 # via botocore-stubs types-python-dateutil==2.9.0.20250516 # via arrow -typing-extensions==4.13.2 +typing-extensions==4.14.1 # via # aiodebug # anyio # exceptiongroup # fastapi # faststream + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http # opentelemetry-sdk + # opentelemetry-semantic-conventions # pydantic # pydantic-core # pydantic-extra-types # rich-toolkit + # starlette # typer # types-aiobotocore # types-aiobotocore-ec2 @@ -809,7 +837,7 @@ typing-extensions==4.13.2 # typing-inspection typing-inspection==0.4.0 # via pydantic -urllib3==2.4.0 +urllib3==2.5.0 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -839,10 +867,12 @@ urllib3==2.4.0 # botocore # distributed # requests + # sentry-sdk uvicorn==0.34.2 # via # fastapi # fastapi-cli + # fastapi-cloud-cli uvloop==0.21.0 # via uvicorn watchfiles==1.0.5 @@ -852,7 +882,6 @@ websockets==15.0.1 wrapt==1.17.2 # via # aiobotocore - # deprecated # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-httpx diff --git a/services/clusters-keeper/requirements/_test.txt b/services/clusters-keeper/requirements/_test.txt index 41eacfca34a6..d0809661686c 100644 --- a/services/clusters-keeper/requirements/_test.txt +++ b/services/clusters-keeper/requirements/_test.txt @@ -6,7 +6,7 @@ aiohappyeyeballs==2.6.1 # via # -c requirements/_base.txt # aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -26,7 +26,9 @@ anyio==4.9.0 # -c requirements/_base.txt # httpx asgi-lifespan==2.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in attrs==25.3.0 # via # -c requirements/_base.txt @@ -66,7 +68,7 @@ charset-normalizer==3.4.2 # via # -c requirements/_base.txt # requests -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # flask @@ -95,7 +97,7 @@ flask==3.1.1 # via # flask-cors # moto -flask-cors==6.0.0 +flask-cors==6.0.1 # via moto frozenlist==1.6.0 # via @@ -197,7 +199,9 @@ parse==1.20.2 pathable==0.4.4 # via jsonschema-path pluggy==1.6.0 - # via pytest + # via + # pytest + # pytest-cov ply==3.11 # via jsonpath-ng propcache==0.3.1 @@ -213,7 +217,7 @@ py-partiql-parser==0.6.1 # via moto pycparser==2.22 # via cffi -pydantic==2.11.4 +pydantic==2.11.7 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -222,19 +226,23 @@ pydantic-core==2.33.2 # via # -c requirements/_base.txt # pydantic +pygments==2.19.1 + # via + # -c requirements/_base.txt + # pytest pyparsing==3.2.3 # via moto -pytest==8.3.5 +pytest==8.4.1 # via # -r requirements/_test.in # pytest-asyncio # pytest-cov # pytest-mock -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via -r requirements/_test.in -pytest-cov==6.1.1 +pytest-cov==6.2.1 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in @@ -269,7 +277,7 @@ referencing==0.35.1 # jsonschema-specifications regex==2024.11.6 # via cfn-lint -requests==2.32.3 +requests==2.32.4 # via # -c requirements/_base.txt # docker @@ -291,7 +299,7 @@ s3transfer==0.11.3 # via # -c requirements/_base.txt # boto3 -setuptools==80.7.1 +setuptools==80.9.0 # via moto six==1.17.0 # via @@ -311,7 +319,7 @@ sympy==1.14.0 # via cfn-lint types-pyyaml==6.0.12.20250516 # via -r requirements/_test.in -typing-extensions==4.13.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # anyio @@ -326,7 +334,7 @@ typing-inspection==0.4.0 # pydantic tzdata==2025.2 # via faker -urllib3==2.4.0 +urllib3==2.5.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt diff --git a/services/clusters-keeper/requirements/_tools.txt b/services/clusters-keeper/requirements/_tools.txt index c76d3992bbee..cebc096a0295 100644 --- a/services/clusters-keeper/requirements/_tools.txt +++ b/services/clusters-keeper/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # -c requirements/_test.txt @@ -28,7 +28,7 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.1.0 # via @@ -43,7 +43,9 @@ packaging==25.0 # black # build pathspec==0.12.1 - # via black + # via + # black + # mypy pip==25.1.1 # via pip-tools pip-tools==7.4.1 @@ -70,13 +72,13 @@ pyyaml==6.0.2 # watchdog ruff==0.11.10 # via -r requirements/../../../requirements/devenv.txt -setuptools==80.7.1 +setuptools==80.9.0 # via # -c requirements/_test.txt # pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.13.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # -c requirements/_test.txt diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/cli.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/cli.py index b65355463c47..66a71546a00b 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/cli.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/cli.py @@ -19,6 +19,6 @@ def run(): """Runs application""" typer.secho("Sorry, this entrypoint is intentionally disabled. Use instead") typer.secho( - "$ uvicorn simcore_service_clusters_keeper.main:the_app", + "$ uvicorn --factory simcore_service_clusters_keeper.main:app_factory", fg=typer.colors.BLUE, ) diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/core/application.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/core/application.py index bbda1b456a4a..1c8c7e448c0a 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/core/application.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/core/application.py @@ -26,26 +26,10 @@ from ..rpc.rpc_routes import setup_rpc_routes from .settings import ApplicationSettings -_LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR -_NOISY_LOGGERS = ( - "aiobotocore", - "aio_pika", - "aiormq", - "botocore", - "werkzeug", -) - _logger = logging.getLogger(__name__) def create_app(settings: ApplicationSettings) -> FastAPI: - # keep mostly quiet noisy loggers - quiet_level: int = max( - min(logging.root.level + _LOG_LEVEL_STEP, logging.CRITICAL), logging.WARNING - ) - for name in _NOISY_LOGGERS: - logging.getLogger(name).setLevel(quiet_level) - _logger.info("app settings: %s", settings.model_dump_json(indent=1)) app = FastAPI( diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/core/settings.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/core/settings.py index 525148fa257e..463ac51189e8 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/core/settings.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/core/settings.py @@ -4,6 +4,7 @@ from aws_library.ec2 import EC2InstanceBootSpecific, EC2Tags from common_library.basic_types import DEFAULT_FACTORY +from common_library.logging.logging_utils_filtering import LoggerName, MessageSubstring from fastapi import FastAPI from models_library.basic_types import ( BootModeEnum, @@ -23,7 +24,7 @@ field_validator, ) from pydantic_settings import SettingsConfigDict -from servicelib.logging_utils_filtering import LoggerName, MessageSubstring +from servicelib.logging_utils import LogLevelInt from settings_library.base import BaseCustomSettings from settings_library.docker_registry import RegistrySettings from settings_library.ec2 import EC2Settings @@ -117,8 +118,8 @@ class WorkersEC2InstancesSettings(BaseCustomSettings): " this is required to start a new EC2 instance", ), ] - WORKERS_EC2_INSTANCES_SUBNET_ID: Annotated[ - str, + WORKERS_EC2_INSTANCES_SUBNET_IDS: Annotated[ + list[str], Field( min_length=1, description="A subnet is a range of IP addresses in your VPC " @@ -185,8 +186,8 @@ class PrimaryEC2InstancesSettings(BaseCustomSettings): " this is required to start a new EC2 instance", ), ] - PRIMARY_EC2_INSTANCES_SUBNET_ID: Annotated[ - str, + PRIMARY_EC2_INSTANCES_SUBNET_IDS: Annotated[ + list[str], Field( min_length=1, description="A subnet is a range of IP addresses in your VPC " @@ -426,13 +427,28 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): ), ] + CLUSTERS_KEEPER_DASK_NPROCS: Annotated[ + int, + Field( + description="overrides the default number of worker processes in the dask-sidecars, setting it to negative values will use dask defaults (see description in 'dask worker --help')", + ), + ] + CLUSTERS_KEEPER_DASK_NTHREADS: Annotated[ NonNegativeInt, Field( - description="overrides the default number of threads in the dask-sidecars, setting it to 0 will use the default (see description in dask-sidecar)", + description="overrides the default number of threads per process in the dask-sidecars, setting it to 0 will use the default (see description in dask-sidecar)", ), ] + CLUSTERS_KEEPER_DASK_NTHREADS_MULTIPLIER: Annotated[ + PositiveInt, + Field( + description="multiplier for the default number of threads per process in the dask-sidecars, (see description in dask-sidecar)", + le=10, + ), + ] = 1 + CLUSTERS_KEEPER_DASK_WORKER_SATURATION: Annotated[ NonNegativeFloat | Literal["inf"], Field( @@ -454,8 +470,8 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): ] @cached_property - def LOG_LEVEL(self) -> LogLevel: # noqa: N802 - return self.CLUSTERS_KEEPER_LOGLEVEL + def log_level(self) -> LogLevelInt: + return cast(LogLevelInt, self.CLUSTERS_KEEPER_LOGLEVEL) @field_validator("CLUSTERS_KEEPER_LOGLEVEL", mode="before") @classmethod diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/data/docker-compose.yml b/services/clusters-keeper/src/simcore_service_clusters_keeper/data/docker-compose.yml index 6ba13f58eac9..761d3029c454 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/data/docker-compose.yml +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/data/docker-compose.yml @@ -56,8 +56,9 @@ services: - cluster environment: DASK_LOG_FORMAT_LOCAL_DEV_ENABLED: 1 - DASK_NPROCS: 1 + DASK_NPROCS: ${DASK_NPROCS} DASK_NTHREADS: ${DASK_NTHREADS} + DASK_NTHREADS_MULTIPLIER: ${DASK_NTHREADS_MULTIPLIER} DASK_SCHEDULER_URL: tls://dask-scheduler:8786 DASK_SIDECAR_NON_USABLE_RAM: 0 DASK_SIDECAR_NUM_NON_USABLE_CPUS: 0 @@ -111,7 +112,7 @@ services: EC2_INSTANCES_MAX_INSTANCES: ${WORKERS_EC2_INSTANCES_MAX_INSTANCES} EC2_INSTANCES_NAME_PREFIX: ${EC2_INSTANCES_NAME_PREFIX} EC2_INSTANCES_SECURITY_GROUP_IDS: ${WORKERS_EC2_INSTANCES_SECURITY_GROUP_IDS} - EC2_INSTANCES_SUBNET_ID: ${WORKERS_EC2_INSTANCES_SUBNET_ID} + EC2_INSTANCES_SUBNET_IDS: ${WORKERS_EC2_INSTANCES_SUBNET_IDS} EC2_INSTANCES_TIME_BEFORE_DRAINING: ${WORKERS_EC2_INSTANCES_TIME_BEFORE_DRAINING} EC2_INSTANCES_TIME_BEFORE_TERMINATION: ${WORKERS_EC2_INSTANCES_TIME_BEFORE_TERMINATION} LOG_FORMAT_LOCAL_DEV_ENABLED: 1 @@ -164,7 +165,7 @@ services: resources: limits: memory: 512M - cpus: "0.5" + cpus: "1.0" prometheus: image: prom/prometheus:v2.51.0@sha256:5ccad477d0057e62a7cd1981ffcc43785ac10c5a35522dc207466ff7e7ec845f diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/main.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/main.py index b2844bde6afb..d4e4bdf99ee4 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/main.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/main.py @@ -1,22 +1,39 @@ -"""Main application to be deployed by uvicorn (or equivalent) server - -""" +"""Main application to be deployed by uvicorn (or equivalent) server""" import logging +from typing import Final +from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.logging_utils import config_all_loggers +from servicelib.fastapi.logging_lifespan import create_logging_shutdown_event from simcore_service_clusters_keeper.core.application import create_app from simcore_service_clusters_keeper.core.settings import ApplicationSettings -the_settings = ApplicationSettings.create_from_envs() -logging.basicConfig(level=the_settings.log_level) -logging.root.setLevel(the_settings.log_level) -config_all_loggers( - log_format_local_dev_enabled=the_settings.CLUSTERS_KEEPER_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=the_settings.CLUSTERS_KEEPER_LOG_FILTER_MAPPING, - tracing_settings=the_settings.CLUSTERS_KEEPER_TRACING, +_logger = logging.getLogger(__name__) + +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aiobotocore", + "aio_pika", + "aiormq", + "botocore", + "werkzeug", ) -# SINGLETON FastAPI app -the_app: FastAPI = create_app(the_settings) + +def app_factory() -> FastAPI: + app_settings = ApplicationSettings.create_from_envs() + logging_shutdown_event = create_logging_shutdown_event( + log_format_local_dev_enabled=app_settings.CLUSTERS_KEEPER_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.CLUSTERS_KEEPER_LOG_FILTER_MAPPING, + tracing_settings=app_settings.CLUSTERS_KEEPER_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) + + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + app = create_app(settings=app_settings) + app.add_event_handler("shutdown", logging_shutdown_event) + return app diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters.py index 89860549fd3a..a95c879a2a34 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters.py @@ -46,10 +46,10 @@ async def _get_primary_ec2_params( app_settings.CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES.PRIMARY_EC2_INSTANCES_ALLOWED_TYPES.items() ) ) - ec2_instance_types: list[ - EC2InstanceType - ] = await ec2_client.get_ec2_instance_capabilities( - instance_type_names={ec2_type_name} + ec2_instance_types: list[EC2InstanceType] = ( + await ec2_client.get_ec2_instance_capabilities( + instance_type_names={ec2_type_name} + ) ) assert ec2_instance_types # nosec assert len(ec2_instance_types) == 1 # nosec @@ -77,7 +77,7 @@ async def create_cluster( ami_id=ec2_instance_boot_specs.ami_id, key_name=app_settings.CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES.PRIMARY_EC2_INSTANCES_KEY_NAME, security_group_ids=app_settings.CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES.PRIMARY_EC2_INSTANCES_SECURITY_GROUP_IDS, - subnet_id=app_settings.CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES.PRIMARY_EC2_INSTANCES_SUBNET_ID, + subnet_ids=app_settings.CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES.PRIMARY_EC2_INSTANCES_SUBNET_IDS, iam_instance_profile=app_settings.CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES.PRIMARY_EC2_INSTANCES_ATTACHED_IAM_PROFILE, ) new_ec2_instance_data: list[EC2InstanceData] = await ec2_client.launch_instances( diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters_management_task.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters_management_task.py index c540d7b160f9..70e80c550f80 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters_management_task.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters_management_task.py @@ -2,8 +2,8 @@ import logging from collections.abc import Awaitable, Callable +from common_library.async_tools import cancel_wait_task from fastapi import FastAPI -from servicelib.async_utils import cancel_wait_task from servicelib.background_task import create_periodic_task from servicelib.redis import exclusive diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/redis.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/redis.py index 8e2d5b71e339..595d41a4a55b 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/redis.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/redis.py @@ -19,6 +19,7 @@ async def on_startup() -> None: app.state.redis_client_sdk = RedisClientSDK( redis_locks_dsn, client_name=APP_NAME ) + await app.state.redis_client_sdk.setup() async def on_shutdown() -> None: redis_client_sdk: None | RedisClientSDK = app.state.redis_client_sdk diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/clusters.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/clusters.py index d2820ef2b882..653cb6f0ecdc 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/clusters.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/utils/clusters.py @@ -89,20 +89,22 @@ def _convert_to_env_dict(entries: dict[str, Any]) -> str: f"CLUSTERS_KEEPER_EC2_ENDPOINT={app_settings.CLUSTERS_KEEPER_EC2_ACCESS.EC2_ENDPOINT or 'null'}", f"CLUSTERS_KEEPER_EC2_REGION_NAME={app_settings.CLUSTERS_KEEPER_EC2_ACCESS.EC2_REGION_NAME}", f"CLUSTERS_KEEPER_EC2_SECRET_ACCESS_KEY={app_settings.CLUSTERS_KEEPER_EC2_ACCESS.EC2_SECRET_ACCESS_KEY}", - f"DASK_NTHREADS={app_settings.CLUSTERS_KEEPER_DASK_NTHREADS or ''}", + f"DASK_NPROCS={app_settings.CLUSTERS_KEEPER_DASK_NPROCS}", + f"DASK_NTHREADS={app_settings.CLUSTERS_KEEPER_DASK_NTHREADS}", + f"DASK_NTHREADS_MULTIPLIER={app_settings.CLUSTERS_KEEPER_DASK_NTHREADS_MULTIPLIER}", f"DASK_TLS_CA_FILE={_HOST_TLS_CA_FILE_PATH}", f"DASK_TLS_CERT={_HOST_TLS_CERT_FILE_PATH}", f"DASK_TLS_KEY={_HOST_TLS_KEY_FILE_PATH}", f"DASK_WORKER_SATURATION={app_settings.CLUSTERS_KEEPER_DASK_WORKER_SATURATION}", f"DOCKER_IMAGE_TAG={app_settings.CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DOCKER_IMAGE_TAG}", f"EC2_INSTANCES_NAME_PREFIX={cluster_machines_name_prefix}", - f"LOG_LEVEL={app_settings.LOG_LEVEL}", + f"LOG_LEVEL={app_settings.log_level}", f"WORKERS_EC2_INSTANCES_ALLOWED_TYPES={_convert_to_env_dict(app_settings.CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES.WORKERS_EC2_INSTANCES_ALLOWED_TYPES)}", f"WORKERS_EC2_INSTANCES_CUSTOM_TAGS={_convert_to_env_dict(app_settings.CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES.WORKERS_EC2_INSTANCES_CUSTOM_TAGS | additional_custom_tags)}", f"WORKERS_EC2_INSTANCES_KEY_NAME={app_settings.CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES.WORKERS_EC2_INSTANCES_KEY_NAME}", f"WORKERS_EC2_INSTANCES_MAX_INSTANCES={app_settings.CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES.WORKERS_EC2_INSTANCES_MAX_INSTANCES}", f"WORKERS_EC2_INSTANCES_SECURITY_GROUP_IDS={_convert_to_env_list(app_settings.CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES.WORKERS_EC2_INSTANCES_SECURITY_GROUP_IDS)}", - f"WORKERS_EC2_INSTANCES_SUBNET_ID={app_settings.CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES.WORKERS_EC2_INSTANCES_SUBNET_ID}", + f"WORKERS_EC2_INSTANCES_SUBNET_IDS={_convert_to_env_list(app_settings.CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES.WORKERS_EC2_INSTANCES_SUBNET_IDS)}", f"WORKERS_EC2_INSTANCES_TIME_BEFORE_DRAINING={app_settings.CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES.WORKERS_EC2_INSTANCES_TIME_BEFORE_DRAINING}", f"WORKERS_EC2_INSTANCES_TIME_BEFORE_TERMINATION={app_settings.CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES.WORKERS_EC2_INSTANCES_TIME_BEFORE_TERMINATION}", f"AUTOSCALING_RABBITMQ={_convert_to_env_dict(model_dump_with_secrets(app_settings.CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES.PRIMARY_EC2_INSTANCES_RABBIT, show_secrets=True)) if app_settings.CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES.PRIMARY_EC2_INSTANCES_RABBIT else 'null'}", diff --git a/services/clusters-keeper/tests/manual/README.md b/services/clusters-keeper/tests/manual/README.md index 4ef8e0bd72c6..6fd9ce5efb68 100644 --- a/services/clusters-keeper/tests/manual/README.md +++ b/services/clusters-keeper/tests/manual/README.md @@ -70,14 +70,14 @@ PRIMARY_EC2_INSTANCES_ALLOWED_TYPES='{"t2.medium":"ami_id": "XXXXXXXX", "custom_ PRIMARY_EC2_INSTANCES_KEY_NAME=XXXXXXX PRIMARY_EC2_INSTANCES_MAX_INSTANCES=10 PRIMARY_EC2_INSTANCES_SECURITY_GROUP_IDS="[\"XXXXXXX\"]" -PRIMARY_EC2_INSTANCES_SUBNET_ID=XXXXXXX +PRIMARY_EC2_INSTANCES_SUBNET_IDS="[\"XXXXXXX\"]" CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES={} WORKERS_EC2_INSTANCES_ALLOWED_TYPES='{"g4dn.xlarge": {"ami_id": "XXXXXXXX", "custom_boot_scripts": ["whoami"], "pre_pull_images": ["ubuntu:latest"]}}' WORKERS_EC2_INSTANCES_KEY_NAME=XXXXXXX WORKERS_EC2_INSTANCES_MAX_INSTANCES=10 WORKERS_EC2_INSTANCES_SECURITY_GROUP_IDS="[\"XXXXXXX\"]" -WORKERS_EC2_INSTANCES_SUBNET_ID=XXXXXXX +WORKERS_EC2_INSTANCES_SUBNET_IDS="[\"XXXXXXX\"]" WORKERS_EC2_INSTANCES_TIME_BEFORE_DRAINING="00:00:20" WORKERS_EC2_INSTANCES_TIME_BEFORE_TERMINATION="00:03:00" WORKERS_EC2_INSTANCES_CUSTOM_TAGS='{"osparc-tag": "some fun tag value"}' diff --git a/services/clusters-keeper/tests/unit/conftest.py b/services/clusters-keeper/tests/unit/conftest.py index a80776951ded..f6c4283f77f9 100644 --- a/services/clusters-keeper/tests/unit/conftest.py +++ b/services/clusters-keeper/tests/unit/conftest.py @@ -3,7 +3,6 @@ # pylint:disable=redefined-outer-name import importlib.resources -import json import random from collections.abc import AsyncIterator, Awaitable, Callable, Iterator from pathlib import Path @@ -17,6 +16,7 @@ import yaml from asgi_lifespan import LifespanManager from aws_library.ec2 import EC2InstanceBootSpecific +from common_library.json_serialization import json_dumps from faker import Faker from fakeredis.aioredis import FakeRedis from fastapi import FastAPI @@ -39,6 +39,7 @@ from types_aiobotocore_ec2.literals import InstanceTypeType pytest_plugins = [ + "pytest_simcore.asyncio_event_loops", "pytest_simcore.aws_ec2_service", "pytest_simcore.aws_server", "pytest_simcore.docker", @@ -47,6 +48,7 @@ "pytest_simcore.docker_swarm", "pytest_simcore.environment_configs", "pytest_simcore.faker_users_data", + "pytest_simcore.logging", "pytest_simcore.rabbit_service", "pytest_simcore.repository_paths", "pytest_simcore.simcore_service_library_fixtures", @@ -128,26 +130,28 @@ def app_environment( "CLUSTERS_KEEPER_SSM_SECRET_ACCESS_KEY": faker.pystr(), "CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES": "{}", "CLUSTERS_KEEPER_EC2_INSTANCES_PREFIX": faker.pystr(), + "CLUSTERS_KEEPER_DASK_NPROCS": f"{faker.pyint()}", "CLUSTERS_KEEPER_DASK_NTHREADS": f"{faker.pyint(min_value=0)}", + "CLUSTERS_KEEPER_DASK_NTHREADS_MULTIPLIER": f"{faker.pyint(min_value=1, max_value=10)}", "CLUSTERS_KEEPER_DASK_WORKER_SATURATION": f"{faker.pyfloat(min_value=0.1)}", "CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH": "{}", "PRIMARY_EC2_INSTANCES_KEY_NAME": faker.pystr(), - "PRIMARY_EC2_INSTANCES_SECURITY_GROUP_IDS": json.dumps( + "PRIMARY_EC2_INSTANCES_SECURITY_GROUP_IDS": json_dumps( faker.pylist(allowed_types=(str,)) ), - "PRIMARY_EC2_INSTANCES_SUBNET_ID": faker.pystr(), - "PRIMARY_EC2_INSTANCES_ALLOWED_TYPES": json.dumps( + "PRIMARY_EC2_INSTANCES_SUBNET_IDS": json_dumps( + faker.pylist(allowed_types=(str,)) + ), + "PRIMARY_EC2_INSTANCES_ALLOWED_TYPES": json_dumps( { random.choice( # noqa: S311 ec2_instances - ): EC2InstanceBootSpecific.model_config["json_schema_extra"][ - "examples" - ][ + ): EC2InstanceBootSpecific.model_json_schema()["examples"][ 1 ] # NOTE: we use example with custom script } ), - "PRIMARY_EC2_INSTANCES_CUSTOM_TAGS": json.dumps( + "PRIMARY_EC2_INSTANCES_CUSTOM_TAGS": json_dumps( {"osparc-tag": "the pytest tag is here"} ), "PRIMARY_EC2_INSTANCES_ATTACHED_IAM_PROFILE": "", # must be empty since we would need to add it to moto as well @@ -157,22 +161,22 @@ def app_environment( "PRIMARY_EC2_INSTANCES_PROMETHEUS_USERNAME": faker.user_name(), "PRIMARY_EC2_INSTANCES_PROMETHEUS_PASSWORD": faker.password(), "CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES": "{}", - "WORKERS_EC2_INSTANCES_ALLOWED_TYPES": json.dumps( + "WORKERS_EC2_INSTANCES_ALLOWED_TYPES": json_dumps( { ec2_type_name: random.choice( # noqa: S311 - EC2InstanceBootSpecific.model_config["json_schema_extra"][ - "examples" - ] + EC2InstanceBootSpecific.model_json_schema()["examples"] ) for ec2_type_name in ec2_instances } ), - "WORKERS_EC2_INSTANCES_SECURITY_GROUP_IDS": json.dumps( + "WORKERS_EC2_INSTANCES_SECURITY_GROUP_IDS": json_dumps( + faker.pylist(allowed_types=(str,)) + ), + "WORKERS_EC2_INSTANCES_SUBNET_IDS": json_dumps( faker.pylist(allowed_types=(str,)) ), - "WORKERS_EC2_INSTANCES_SUBNET_ID": faker.pystr(), "WORKERS_EC2_INSTANCES_KEY_NAME": faker.pystr(), - "WORKERS_EC2_INSTANCES_CUSTOM_TAGS": json.dumps( + "WORKERS_EC2_INSTANCES_CUSTOM_TAGS": json_dumps( {"osparc-tag": "the pytest worker tag value is here"} ), }, @@ -192,10 +196,10 @@ def mocked_primary_ec2_instances_envs( monkeypatch, { "PRIMARY_EC2_INSTANCES_KEY_NAME": "osparc-pytest", - "PRIMARY_EC2_INSTANCES_SECURITY_GROUP_IDS": json.dumps( + "PRIMARY_EC2_INSTANCES_SECURITY_GROUP_IDS": json_dumps( [aws_security_group_id] ), - "PRIMARY_EC2_INSTANCES_SUBNET_ID": aws_subnet_id, + "PRIMARY_EC2_INSTANCES_SUBNET_IDS": json_dumps([aws_subnet_id]), }, ) return app_environment | envs diff --git a/services/clusters-keeper/tests/unit/test_core_settings.py b/services/clusters-keeper/tests/unit/test_core_settings.py index 021d7f4f1077..914f097cc979 100644 --- a/services/clusters-keeper/tests/unit/test_core_settings.py +++ b/services/clusters-keeper/tests/unit/test_core_settings.py @@ -4,12 +4,15 @@ import json -import random +import secrets import pytest from aws_library.ec2 import EC2InstanceBootSpecific from pydantic import ValidationError -from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import ( + EnvVarsDict, + setenvs_from_dict, +) from simcore_service_clusters_keeper.core.settings import ApplicationSettings from types_aiobotocore_ec2.literals import InstanceTypeType @@ -50,10 +53,8 @@ def test_multiple_primary_ec2_instances_raises( { "PRIMARY_EC2_INSTANCES_ALLOWED_TYPES": json.dumps( { - ec2_type_name: random.choice( # noqa: S311 - EC2InstanceBootSpecific.model_config["json_schema_extra"][ - "examples" - ] + ec2_type_name: secrets.choice( + EC2InstanceBootSpecific.model_json_schema()["examples"] ) for ec2_type_name in ec2_instances } @@ -110,3 +111,17 @@ def test_valid_primary_custom_tags( {"PRIMARY_EC2_INSTANCES_CUSTOM_TAGS": json.dumps(valid_tag)}, ) ApplicationSettings.create_from_envs() + + +def test_valid_application_settings( + monkeypatch: pytest.MonkeyPatch, + app_environment: EnvVarsDict, +): + # Mock + assert app_environment + + # Test + settings = ApplicationSettings() # type: ignore + assert settings + + assert settings == ApplicationSettings.create_from_envs() diff --git a/services/clusters-keeper/tests/unit/test_main.py b/services/clusters-keeper/tests/unit/test_main.py index 96d7fb8507d2..13cd355678c8 100644 --- a/services/clusters-keeper/tests/unit/test_main.py +++ b/services/clusters-keeper/tests/unit/test_main.py @@ -7,6 +7,6 @@ def test_main_app(app_environment: EnvVarsDict): - from simcore_service_clusters_keeper.main import the_app, the_settings + from simcore_service_clusters_keeper.main import app_factory - assert the_app.state.settings == the_settings + app_factory() diff --git a/services/dask-sidecar/Dockerfile b/services/dask-sidecar/Dockerfile index ae85188d40e4..c468ad87cae5 100644 --- a/services/dask-sidecar/Dockerfile +++ b/services/dask-sidecar/Dockerfile @@ -2,10 +2,10 @@ # Define arguments in the global scope ARG PYTHON_VERSION="3.11.9" -ARG UV_VERSION="0.6" +ARG UV_VERSION="0.7" FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build # we docker image is built based on debian -FROM --platform=${TARGETPLATFORM} python:${PYTHON_VERSION}-slim-bookworm AS base +FROM python:${PYTHON_VERSION}-slim-bookworm AS base ARG TARGETPLATFORM ARG BUILDPLATFORM RUN echo "I am running on $BUILDPLATFORM, building for $TARGETPLATFORM" > /log @@ -26,6 +26,7 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=private \ set -eux \ && apt-get update \ && apt-get install -y --no-install-recommends \ + fd-find \ iputils-ping \ curl \ gosu \ @@ -92,10 +93,7 @@ RUN uv venv "${VIRTUAL_ENV}" -RUN --mount=type=cache,target=/root/.cache/uv \ - uv pip install --upgrade \ - wheel \ - setuptools + WORKDIR /build @@ -110,6 +108,9 @@ WORKDIR /build FROM build AS prod-only-deps ENV SC_BUILD_TARGET=prod-only-deps +# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode +ENV UV_COMPILE_BYTECODE=1 \ + UV_LINK_MODE=copy WORKDIR /build/services/dask-sidecar @@ -134,8 +135,6 @@ ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production ENV PYTHONOPTIMIZE=TRUE -# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode -ENV UV_COMPILE_BYTECODE=1 WORKDIR /home/scu # ensure home folder is read/writable for user scu diff --git a/services/dask-sidecar/docker/boot.sh b/services/dask-sidecar/docker/boot.sh index 30cb7397189c..5a01d008c936 100755 --- a/services/dask-sidecar/docker/boot.sh +++ b/services/dask-sidecar/docker/boot.sh @@ -25,7 +25,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then python --version | sed 's/^/ /' command -v python | sed 's/^/ /' cd services/dask-sidecar - uv pip --quiet sync requirements/dev.txt + uv pip --quiet sync --link-mode=copy requirements/dev.txt cd - print_info "PIP :" uv pip list @@ -34,7 +34,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy if command -v uv >/dev/null 2>&1; then - uv pip install debugpy + uv pip install --link-mode=copy debugpy else pip install debugpy fi @@ -167,18 +167,31 @@ else DASK_NTHREADS=${DASK_NTHREADS:="$num_cpus"} DASK_MEMORY_LIMIT=${DASK_MEMORY_LIMIT:="$ram"} DASK_WORKER_NAME=${DASK_WORKER_NAME:="dask-sidecar_$(hostname)_$(date +'%Y-%m-%d_%T')_$$"} + # If DASK_NTHREADS_MULTIPLIER is defined, multiply DASK_NTHREADS (round to nearest int, min 1) + if [ -n "${DASK_NTHREADS_MULTIPLIER:-}" ]; then + # check DASK_NTHREADS_MULTIPLIER is a number + if awk -v m="$DASK_NTHREADS_MULTIPLIER" 'BEGIN{ if (m+0==m) exit 0; else exit 1 }'; then + # multiply and round to nearest int, min 1 + new_nthreads=$(awk -v n="$DASK_NTHREADS" -v m="$DASK_NTHREADS_MULTIPLIER" 'BEGIN{ r=n*m; if(r<1) r=1; printf("%d", (r==int(r)?int(r):int(r+0.5))) }') + DASK_NTHREADS="$new_nthreads" + print_info "DASK_NTHREADS multiplied by ${DASK_NTHREADS_MULTIPLIER} -> ${DASK_NTHREADS}" + else + print_info "DASK_NTHREADS_MULTIPLIER is not numeric: ${DASK_NTHREADS_MULTIPLIER}" + fi + fi + # # 'daemonic processes are not allowed to have children' arises when running the sidecar.cli # because multi-processing library is used by the sidecar and the nanny does not like it # setting --no-nanny fixes this: see https://github.com/dask/distributed/issues/2142 - print_info "Starting as a dask worker "${DASK_WORKER_VERSION}" -> "${DASK_SCHEDULER_URL}" ..." - print_info "Worker resources set as: "$resources"" + print_info "Starting as a dask worker ${DASK_WORKER_VERSION} -> ${DASK_SCHEDULER_URL} ..." + print_info "Worker resources set as: $resources" if [ "${SC_BOOT_MODE}" = "debug" ]; then exec watchmedo auto-restart --recursive --pattern="*.py;*/src/*" --ignore-patterns="*test*;pytest_simcore/*;setup.py;*ignore*" --ignore-directories -- \ dask worker "${DASK_SCHEDULER_URL}" \ --local-directory /tmp/dask-sidecar \ --preload simcore_service_dask_sidecar.worker \ - --nworkers ${DASK_NPROCS} \ + --nworkers "${DASK_NPROCS}" \ --nthreads "${DASK_NTHREADS}" \ --dashboard-address 8787 \ --memory-limit "${DASK_MEMORY_LIMIT}" \ @@ -188,7 +201,7 @@ else exec dask worker "${DASK_SCHEDULER_URL}" \ --local-directory /tmp/dask-sidecar \ --preload simcore_service_dask_sidecar.worker \ - --nworkers ${DASK_NPROCS} \ + --nworkers "${DASK_NPROCS}" \ --nthreads "${DASK_NTHREADS}" \ --dashboard-address 8787 \ --memory-limit "${DASK_MEMORY_LIMIT}" \ diff --git a/services/dask-sidecar/docker/entrypoint.sh b/services/dask-sidecar/docker/entrypoint.sh index f69fd1a71d86..b65860a3ef65 100755 --- a/services/dask-sidecar/docker/entrypoint.sh +++ b/services/dask-sidecar/docker/entrypoint.sh @@ -58,10 +58,9 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME" echo "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \; - # change user property of files already around + fdfind --owner ":$SC_USER_ID" --exclude proc --exec-batch chgrp --no-dereference "$CONT_GROUPNAME" . '/' echo "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \; + fdfind --owner "$SC_USER_ID:" --exclude proc --exec-batch chown --no-dereference "$SC_USER_NAME" . '/' fi fi diff --git a/services/dask-sidecar/requirements/_base.txt b/services/dask-sidecar/requirements/_base.txt index d38bf44ab711..a5aa284705f8 100644 --- a/services/dask-sidecar/requirements/_base.txt +++ b/services/dask-sidecar/requirements/_base.txt @@ -16,7 +16,7 @@ aiofiles==24.1.0 # -r requirements/_base.in aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -92,7 +92,7 @@ certifi==2025.4.26 # requests charset-normalizer==3.4.2 # via requests -click==8.1.8 +click==8.2.1 # via # dask # distributed @@ -109,12 +109,6 @@ dask==2025.5.0 # -r requirements/../../../packages/dask-task-models-library/requirements/_base.in # -r requirements/_base.in # distributed -deprecated==1.2.18 - # via - # opentelemetry-api - # opentelemetry-exporter-otlp-proto-grpc - # opentelemetry-exporter-otlp-proto-http - # opentelemetry-semantic-conventions distributed==2025.5.0 # via dask dnspython==2.7.0 @@ -179,6 +173,11 @@ jmespath==1.0.1 # via # aiobotocore # botocore +jsonref==1.1.0 + # via + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema==4.23.0 # via # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/_base.in @@ -212,59 +211,64 @@ numpy==2.2.6 # bokeh # contourpy # pandas -opentelemetry-api==1.33.1 +opentelemetry-api==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.33.1 +opentelemetry-exporter-otlp==1.34.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.33.1 +opentelemetry-exporter-otlp-proto-common==1.34.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.33.1 +opentelemetry-exporter-otlp-proto-grpc==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.33.1 +opentelemetry-exporter-otlp-proto-http==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.54b1 +opentelemetry-instrumentation==0.55b1 # via # opentelemetry-instrumentation-aio-pika + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aio-pika==0.54b1 +opentelemetry-instrumentation-aio-pika==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-logging==0.54b1 +opentelemetry-instrumentation-asyncpg==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.54b1 +opentelemetry-instrumentation-logging==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.54b1 +opentelemetry-instrumentation-redis==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.33.1 +opentelemetry-instrumentation-requests==0.55b1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-proto==1.34.1 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.33.1 +opentelemetry-sdk==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.54b1 +opentelemetry-semantic-conventions==0.55b1 # via # opentelemetry-instrumentation + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.54b1 +opentelemetry-util-http==0.55b1 # via opentelemetry-instrumentation-requests orjson==3.10.18 # via @@ -318,7 +322,7 @@ propcache==0.3.1 # via # aiohttp # yarl -protobuf==5.29.4 +protobuf==5.29.5 # via # googleapis-common-protos # opentelemetry-proto @@ -328,7 +332,7 @@ psutil==7.0.0 # distributed pycryptodome==3.23.0 # via stream-zip -pydantic==2.11.4 +pydantic==2.11.7 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -371,7 +375,7 @@ pydantic==2.11.4 # pydantic-settings pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.10.4 +pydantic-extra-types==2.10.5 # via # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -494,9 +498,9 @@ referencing==0.35.1 # jsonschema-specifications repro-zipfile==0.4.0 # via -r requirements/_base.in -requests==2.32.3 +requests==2.32.4 # via opentelemetry-exporter-otlp-proto-http -rich==14.0.0 +rich==14.1.0 # via # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -534,20 +538,24 @@ tornado==6.5 # distributed tqdm==4.67.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.15.4 +typer==0.16.1 # via # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in types-python-dateutil==2.9.0.20250516 # via arrow -typing-extensions==4.13.2 +typing-extensions==4.14.1 # via # aiodebug # anyio # exceptiongroup # faststream + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http # opentelemetry-sdk + # opentelemetry-semantic-conventions # pydantic # pydantic-core # pydantic-extra-types @@ -557,7 +565,7 @@ typing-inspection==0.4.0 # via pydantic tzdata==2025.2 # via pandas -urllib3==2.4.0 +urllib3==2.5.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -583,7 +591,6 @@ urllib3==2.4.0 wrapt==1.17.2 # via # aiobotocore - # deprecated # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-redis diff --git a/services/dask-sidecar/requirements/_dask-distributed.txt b/services/dask-sidecar/requirements/_dask-distributed.txt index 35f16cb2d4e8..0e803796f9c4 100644 --- a/services/dask-sidecar/requirements/_dask-distributed.txt +++ b/services/dask-sidecar/requirements/_dask-distributed.txt @@ -2,7 +2,7 @@ blosc==1.11.3 # via # -c requirements/./_base.txt # -r requirements/_dask-distributed.in -click==8.1.8 +click==8.2.1 # via # -c requirements/./_base.txt # dask @@ -90,7 +90,7 @@ tornado==6.5 # via # -c requirements/./_base.txt # distributed -urllib3==2.4.0 +urllib3==2.5.0 # via # -c requirements/./_base.txt # distributed diff --git a/services/dask-sidecar/requirements/_test.txt b/services/dask-sidecar/requirements/_test.txt index 91ad970abd03..c90d37e7c988 100644 --- a/services/dask-sidecar/requirements/_test.txt +++ b/services/dask-sidecar/requirements/_test.txt @@ -12,7 +12,7 @@ aiohappyeyeballs==2.6.1 # via # -c requirements/_base.txt # aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -73,7 +73,7 @@ charset-normalizer==3.4.2 # via # -c requirements/_base.txt # requests -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # flask @@ -97,7 +97,7 @@ flask==3.1.1 # via # flask-cors # moto -flask-cors==6.0.0 +flask-cors==6.0.1 # via moto frozenlist==1.6.0 # via @@ -182,7 +182,9 @@ packaging==25.0 pathable==0.4.4 # via jsonschema-path pluggy==1.6.0 - # via pytest + # via + # pytest + # pytest-cov ply==3.11 # via jsonpath-ng pprintpp==0.4.0 @@ -196,7 +198,7 @@ py-partiql-parser==0.6.1 # via moto pycparser==2.22 # via cffi -pydantic==2.11.4 +pydantic==2.11.7 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -207,11 +209,15 @@ pydantic-core==2.33.2 # pydantic pyftpdlib==2.0.1 # via pytest-localftpserver +pygments==2.19.1 + # via + # -c requirements/_base.txt + # pytest pyopenssl==25.1.0 # via pytest-localftpserver pyparsing==3.2.3 # via moto -pytest==8.3.5 +pytest==8.4.1 # via # -r requirements/_test.in # pytest-asyncio @@ -221,9 +227,9 @@ pytest==8.3.5 # pytest-localftpserver # pytest-mock # pytest-sugar -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via -r requirements/_test.in -pytest-cov==6.1.1 +pytest-cov==6.2.1 # via -r requirements/_test.in pytest-icdiff==0.9 # via -r requirements/_test.in @@ -231,7 +237,7 @@ pytest-instafail==0.5.0 # via -r requirements/_test.in pytest-localftpserver==1.3.2 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements/_test.in pytest-sugar==1.0.0 # via -r requirements/_test.in @@ -262,7 +268,7 @@ referencing==0.35.1 # jsonschema-specifications regex==2024.11.6 # via cfn-lint -requests==2.32.3 +requests==2.32.4 # via # -c requirements/_base.txt # docker @@ -280,7 +286,7 @@ rpds-py==0.25.0 # referencing s3transfer==0.11.3 # via boto3 -setuptools==80.7.1 +setuptools==80.9.0 # via moto six==1.17.0 # via @@ -305,7 +311,7 @@ types-awscrt==0.27.2 # via botocore-stubs types-s3transfer==0.12.0 # via types-aioboto3 -typing-extensions==4.13.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # aws-sam-translator @@ -325,7 +331,7 @@ tzdata==2025.2 # via # -c requirements/_base.txt # faker -urllib3==2.4.0 +urllib3==2.5.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt diff --git a/services/dask-sidecar/requirements/_tools.txt b/services/dask-sidecar/requirements/_tools.txt index c76d3992bbee..cebc096a0295 100644 --- a/services/dask-sidecar/requirements/_tools.txt +++ b/services/dask-sidecar/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # -c requirements/_test.txt @@ -28,7 +28,7 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.1.0 # via @@ -43,7 +43,9 @@ packaging==25.0 # black # build pathspec==0.12.1 - # via black + # via + # black + # mypy pip==25.1.1 # via pip-tools pip-tools==7.4.1 @@ -70,13 +72,13 @@ pyyaml==6.0.2 # watchdog ruff==0.11.10 # via -r requirements/../../../requirements/devenv.txt -setuptools==80.7.1 +setuptools==80.9.0 # via # -c requirements/_test.txt # pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.13.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # -c requirements/_test.txt diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/_meta.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/_meta.py index 36e8a5c664b8..9e9e10a311fe 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/_meta.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/_meta.py @@ -1,7 +1,4 @@ -""" Application's metadata - -""" - +"""Application's metadata""" from typing import Final @@ -9,7 +6,7 @@ from models_library.basic_types import VersionStr from servicelib.utils_meta import PackageInfo -info: Final = PackageInfo(package_name="simcore_service_dask_sidecar") +info: Final = PackageInfo(package_name="simcore-service-dask-sidecar") __version__: Final[VersionStr] = info.__version__ diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/docker_utils.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/docker_utils.py index 7b34ef409d23..b4b7aad93366 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/docker_utils.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/docker_utils.py @@ -434,6 +434,9 @@ async def managed_monitor_container_log_task( # noqa: PLR0913 # pylint: disable await monitoring_task +_AIODOCKER_PULLING_TIMEOUT_S: Final[int] = 60 * _MINUTE + + async def pull_image( docker_client: Docker, docker_auth: DockerBasicAuth, @@ -448,6 +451,7 @@ async def pull_image( "username": docker_auth.username, "password": docker_auth.password.get_secret_value(), }, + timeout=_AIODOCKER_PULLING_TIMEOUT_S, ): await log_publishing_cb( f"Pulling {service_key}:{service_version}: {pull_progress}...", diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/rabbitmq_worker_plugin.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/rabbitmq_worker_plugin.py index ba4936284d7b..ef288fea483b 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/rabbitmq_worker_plugin.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/rabbitmq_worker_plugin.py @@ -6,7 +6,7 @@ from typing import Final import distributed -from servicelib.async_utils import cancel_wait_task +from common_library.async_tools import cancel_wait_task from servicelib.logging_utils import log_catch, log_context from servicelib.rabbitmq import RabbitMQClient, wait_till_rabbitmq_responsive from servicelib.rabbitmq._models import RabbitMessage diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/settings.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/settings.py index e0a3e41d3a58..ae5b7d504064 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/settings.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/settings.py @@ -1,9 +1,11 @@ +from functools import cached_property from pathlib import Path -from typing import Annotated, Any +from typing import Annotated, Any, cast +from common_library.logging.logging_utils_filtering import LoggerName, MessageSubstring from models_library.basic_types import LogLevel from pydantic import AliasChoices, Field, field_validator -from servicelib.logging_utils_filtering import LoggerName, MessageSubstring +from servicelib.logging_utils import LogLevelInt from settings_library.application import BaseApplicationSettings from settings_library.rabbit import RabbitSettings from settings_library.utils_logging import MixinLoggingSettings @@ -61,6 +63,10 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): RabbitSettings | None, Field(json_schema_extra={"auto_default_from_env": True}) ] + @cached_property + def log_level(self) -> LogLevelInt: + return cast(LogLevelInt, self.DASK_SIDECAR_LOGLEVEL) + @field_validator("DASK_SIDECAR_LOGLEVEL", mode="before") @classmethod def _validate_loglevel(cls, value: Any) -> str: diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/utils/logs.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/utils/logs.py index 74b158de9e2e..76eaef674313 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/utils/logs.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/utils/logs.py @@ -1,20 +1,21 @@ -import logging +from typing import Final -from servicelib.logging_utils import config_all_loggers +from servicelib.logging_utils import setup_loggers from ..settings import ApplicationSettings +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aio_pika", + "aiormq", + "werkzeug", +) + def setup_app_logging(settings: ApplicationSettings) -> None: - # set up logging - logging.basicConfig(level=settings.DASK_SIDECAR_LOGLEVEL.value) - logging.root.setLevel(level=settings.DASK_SIDECAR_LOGLEVEL.value) - # NOTE: Dask attaches a StreamHandler to the logger in distributed - # removing them solves dual propagation of logs - for handler in logging.getLogger("distributed").handlers: - logging.getLogger("distributed").removeHandler(handler) - config_all_loggers( + setup_loggers( log_format_local_dev_enabled=settings.DASK_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=settings.DASK_LOG_FILTER_MAPPING, tracing_settings=None, # no tracing for dask sidecar + log_base_level=settings.log_level, + noisy_loggers=_NOISY_LOGGERS, ) diff --git a/services/dask-sidecar/tests/unit/conftest.py b/services/dask-sidecar/tests/unit/conftest.py index 2e3fb246f884..e4bc735025c7 100644 --- a/services/dask-sidecar/tests/unit/conftest.py +++ b/services/dask-sidecar/tests/unit/conftest.py @@ -42,6 +42,7 @@ "pytest_simcore.docker_swarm", "pytest_simcore.environment_configs", "pytest_simcore.faker_users_data", + "pytest_simcore.logging", "pytest_simcore.rabbit_service", "pytest_simcore.repository_paths", ] diff --git a/services/datcore-adapter/Dockerfile b/services/datcore-adapter/Dockerfile index 716f194c97a3..5b655edbf595 100644 --- a/services/datcore-adapter/Dockerfile +++ b/services/datcore-adapter/Dockerfile @@ -2,7 +2,7 @@ # Define arguments in the global scope ARG PYTHON_VERSION="3.11.9" -ARG UV_VERSION="0.6" +ARG UV_VERSION="0.7" FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build FROM python:${PYTHON_VERSION}-slim-bookworm AS base-arm64 @@ -31,6 +31,7 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=private \ set -eux && \ apt-get update && \ apt-get install -y --no-install-recommends \ + fd-find \ gosu \ && apt-get clean -y \ && rm -rf /var/lib/apt/lists/* \ @@ -90,10 +91,7 @@ RUN uv venv "${VIRTUAL_ENV}" -RUN --mount=type=cache,target=/root/.cache/uv \ - uv pip install --upgrade \ - wheel \ - setuptools + WORKDIR /build @@ -111,6 +109,10 @@ FROM build AS prod-only-deps ENV SC_BUILD_TARGET=prod-only-deps +# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode +ENV UV_COMPILE_BYTECODE=1 \ + UV_LINK_MODE=copy + WORKDIR /build/services/datcore-adapter RUN \ @@ -135,8 +137,6 @@ ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production ENV PYTHONOPTIMIZE=TRUE -# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode -ENV UV_COMPILE_BYTECODE=1 WORKDIR /home/scu diff --git a/services/datcore-adapter/docker/boot.sh b/services/datcore-adapter/docker/boot.sh index 187ea506ba8f..d4c20cad1ab7 100755 --- a/services/datcore-adapter/docker/boot.sh +++ b/services/datcore-adapter/docker/boot.sh @@ -24,7 +24,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/datcore-adapter - uv pip --quiet sync requirements/dev.txt + uv pip --quiet sync --link-mode=copy requirements/dev.txt cd - echo "$INFO" "PIP :" uv pip list @@ -33,7 +33,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy if command -v uv >/dev/null 2>&1; then - uv pip install debugpy + uv pip install --link-mode=copy debugpy else pip install debugpy fi @@ -45,19 +45,22 @@ SERVER_LOG_LEVEL=$(echo "${APP_LOG_LEVEL}" | tr '[:upper:]' '[:lower:]') echo "$INFO" "Log-level app/server: $APP_LOG_LEVEL/$SERVER_LOG_LEVEL" if [ "${SC_BOOT_MODE}" = "debug" ]; then - reload_dir_packages=$(find /devel/packages -maxdepth 3 -type d -path "*/src/*" ! -path "*.*" -exec echo '--reload-dir {} \' \;) + reload_dir_packages=$(fdfind src /devel/packages --exec echo '--reload-dir {} ' | tr '\n' ' ') exec sh -c " cd services/datcore-adapter/src/simcore_service_datcore_adapter && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${DATCORE_ADAPTER_REMOTE_DEBUG_PORT} -m uvicorn main:the_app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${DATCORE_ADAPTER_REMOTE_DEBUG_PORT} -m \ + uvicorn \ + --factory main:app_factory \ --host 0.0.0.0 \ --reload \ - $reload_dir_packages + $reload_dir_packages \ --reload-dir . \ --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_datcore_adapter.main:the_app \ + exec uvicorn \ + --factory simcore_service_datcore_adapter.main:app_factory \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/datcore-adapter/docker/entrypoint.sh b/services/datcore-adapter/docker/entrypoint.sh index 25153a6b2a2a..357d8b604d98 100755 --- a/services/datcore-adapter/docker/entrypoint.sh +++ b/services/datcore-adapter/docker/entrypoint.sh @@ -19,6 +19,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" # # DEVELOPMENT MODE @@ -56,10 +57,9 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME" echo "$INFO" "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \; - # change user property of files already around + fdfind --owner ":$SC_USER_ID" --exclude proc --exec-batch chgrp --no-dereference "$CONT_GROUPNAME" . '/' echo "$INFO" "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \; + fdfind --owner "$SC_USER_ID:" --exclude proc --exec-batch chown --no-dereference "$SC_USER_NAME" . '/' fi fi diff --git a/services/datcore-adapter/requirements/_base.txt b/services/datcore-adapter/requirements/_base.txt index 3017b02e2a16..04f44f397123 100644 --- a/services/datcore-adapter/requirements/_base.txt +++ b/services/datcore-adapter/requirements/_base.txt @@ -14,7 +14,7 @@ aiofiles==23.2.1 # -r requirements/_base.in aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -47,6 +47,8 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi attrs==23.2.0 @@ -77,19 +79,14 @@ certifi==2024.2.2 # httpcore # httpx # requests + # sentry-sdk charset-normalizer==3.3.2 # via requests -click==8.1.7 +click==8.2.1 # via # rich-toolkit # typer # uvicorn -deprecated==1.2.14 - # via - # opentelemetry-api - # opentelemetry-exporter-otlp-proto-grpc - # opentelemetry-exporter-otlp-proto-http - # opentelemetry-semantic-conventions dnspython==2.6.1 # via email-validator email-validator==2.1.1 @@ -98,15 +95,18 @@ email-validator==2.1.1 # pydantic fast-depends==2.4.12 # via faststream -fastapi==0.115.12 +fastapi==0.116.1 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi-lifespan-manager -fastapi-cli==0.0.7 + # fastapi-pagination +fastapi-cli==0.0.8 # via fastapi +fastapi-cloud-cli==0.1.5 + # via fastapi-cli fastapi-lifespan-manager==0.1.4 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -fastapi-pagination==0.12.31 +fastapi-pagination==0.14.0 # via -r requirements/_base.in faststream==0.5.31 # via -r requirements/../../../packages/service-library/requirements/_base.in @@ -114,13 +114,13 @@ frozenlist==1.4.1 # via # aiohttp # aiosignal -googleapis-common-protos==1.65.0 +googleapis-common-protos==1.70.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http grpcio==1.66.0 # via opentelemetry-exporter-otlp-proto-grpc -h11==0.14.0 +h11==0.16.0 # via # httpcore # uvicorn @@ -128,7 +128,7 @@ h2==4.1.0 # via httpx hpack==4.0.0 # via h2 -httpcore==1.0.5 +httpcore==1.0.9 # via httpx httptools==0.6.4 # via uvicorn @@ -148,6 +148,7 @@ httpx==0.27.0 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi + # fastapi-cloud-cli hyperframe==6.0.1 # via h2 idna==3.6 @@ -178,6 +179,10 @@ jmespath==1.0.1 # via # boto3 # botocore +jsonref==1.1.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema==4.21.1 # via # -r requirements/../../../packages/models-library/requirements/_base.in @@ -194,7 +199,7 @@ multidict==6.0.5 # via # aiohttp # yarl -opentelemetry-api==1.26.0 +opentelemetry-api==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc @@ -202,6 +207,7 @@ opentelemetry-api==1.26.0 # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-logging @@ -209,58 +215,63 @@ opentelemetry-api==1.26.0 # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.26.0 +opentelemetry-exporter-otlp==1.34.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.26.0 +opentelemetry-exporter-otlp-proto-common==1.34.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.26.0 +opentelemetry-exporter-otlp-proto-grpc==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.26.0 +opentelemetry-exporter-otlp-proto-http==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.47b0 +opentelemetry-instrumentation==0.55b1 # via # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aio-pika==0.47b0 +opentelemetry-instrumentation-aio-pika==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-asgi==0.47b0 +opentelemetry-instrumentation-asgi==0.55b1 # via opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-fastapi==0.47b0 +opentelemetry-instrumentation-asyncpg==0.55b1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-fastapi==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-httpx==0.47b0 +opentelemetry-instrumentation-httpx==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-logging==0.47b0 +opentelemetry-instrumentation-logging==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.47b0 +opentelemetry-instrumentation-redis==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.47b0 +opentelemetry-instrumentation-requests==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.26.0 +opentelemetry-proto==1.34.1 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.26.0 +opentelemetry-sdk==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.47b0 +opentelemetry-semantic-conventions==0.55b1 # via + # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.47b0 +opentelemetry-util-http==0.55b1 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi @@ -288,6 +299,8 @@ orjson==3.10.0 # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in +packaging==25.0 + # via opentelemetry-instrumentation pamqp==3.3.0 # via aiormq prometheus-client==0.20.0 @@ -296,7 +309,7 @@ propcache==0.3.1 # via # aiohttp # yarl -protobuf==4.25.4 +protobuf==5.29.5 # via # googleapis-common-protos # opentelemetry-proto @@ -304,7 +317,7 @@ psutil==6.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in pycryptodome==3.21.0 # via stream-zip -pydantic==2.10.2 +pydantic==2.11.7 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -332,12 +345,13 @@ pydantic==2.10.2 # -r requirements/_base.in # fast-depends # fastapi + # fastapi-cloud-cli # fastapi-pagination # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.1 +pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.9.0 +pydantic-extra-types==2.10.5 # via # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -347,7 +361,7 @@ pydantic-extra-types==2.9.0 # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in -pydantic-settings==2.6.1 +pydantic-settings==2.7.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -428,24 +442,28 @@ referencing==0.29.3 # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications -requests==2.32.3 +requests==2.32.4 # via opentelemetry-exporter-otlp-proto-http -rich==13.7.1 +rich==14.1.0 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # rich-toolkit # typer -rich-toolkit==0.14.7 - # via fastapi-cli +rich-toolkit==0.15.0 + # via + # fastapi-cli + # fastapi-cloud-cli +rignore==0.6.4 + # via fastapi-cloud-cli rpds-py==0.18.0 # via # jsonschema # referencing s3transfer==0.12.0 # via boto3 -setuptools==74.0.0 - # via opentelemetry-instrumentation +sentry-sdk==2.35.0 + # via fastapi-cloud-cli shellingham==1.5.4 # via typer six==1.16.0 @@ -453,8 +471,9 @@ six==1.16.0 sniffio==1.3.1 # via # anyio + # asgi-lifespan # httpx -starlette==0.41.0 +starlette==0.47.2 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -477,26 +496,36 @@ toolz==0.12.1 # via -r requirements/../../../packages/service-library/requirements/_base.in tqdm==4.66.2 # via -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.12.3 +typer==0.16.1 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # fastapi-cli + # fastapi-cloud-cli types-python-dateutil==2.9.0.20240316 # via arrow -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # aiodebug # aiodocker # fastapi # fastapi-pagination # faststream + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http # opentelemetry-sdk + # opentelemetry-semantic-conventions # pydantic # pydantic-core + # pydantic-extra-types # rich-toolkit + # starlette # typer -urllib3==2.2.3 + # typing-inspection +typing-inspection==0.4.1 + # via pydantic +urllib3==2.5.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -512,10 +541,12 @@ urllib3==2.2.3 # -c requirements/../../../requirements/constraints.txt # botocore # requests + # sentry-sdk uvicorn==0.34.2 # via # fastapi # fastapi-cli + # fastapi-cloud-cli uvloop==0.21.0 # via uvicorn watchfiles==0.21.0 @@ -524,9 +555,9 @@ websockets==12.0 # via uvicorn wrapt==1.16.0 # via - # deprecated # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis yarl==1.20.0 # via diff --git a/services/datcore-adapter/requirements/_test.txt b/services/datcore-adapter/requirements/_test.txt index 4ff7b32ba9a1..0d1aa35b6912 100644 --- a/services/datcore-adapter/requirements/_test.txt +++ b/services/datcore-adapter/requirements/_test.txt @@ -3,7 +3,9 @@ anyio==4.3.0 # -c requirements/_base.txt # httpx asgi-lifespan==2.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in boto3-stubs==1.37.4 # via -r requirements/_test.in botocore-stubs==1.37.4 @@ -29,11 +31,11 @@ execnet==2.1.1 # via pytest-xdist faker==36.1.1 # via -r requirements/_test.in -h11==0.14.0 +h11==0.16.0 # via # -c requirements/_base.txt # httpcore -httpcore==1.0.5 +httpcore==1.0.9 # via # -c requirements/_base.txt # httpx @@ -52,15 +54,22 @@ idna==3.6 # requests iniconfig==2.0.0 # via pytest -packaging==24.2 +packaging==25.0 # via + # -c requirements/_base.txt # pytest # pytest-sugar pluggy==1.5.0 - # via pytest + # via + # pytest + # pytest-cov pprintpp==0.4.0 # via pytest-icdiff -pytest==8.3.5 +pygments==2.17.2 + # via + # -c requirements/_base.txt + # pytest +pytest==8.4.1 # via # -r requirements/_test.in # pytest-asyncio @@ -70,23 +79,23 @@ pytest==8.3.5 # pytest-mock # pytest-sugar # pytest-xdist -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via -r requirements/_test.in -pytest-cov==6.0.0 +pytest-cov==6.2.1 # via -r requirements/_test.in pytest-icdiff==0.9 # via -r requirements/_test.in pytest-instafail==0.5.0 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in pytest-sugar==1.0.0 # via -r requirements/_test.in -pytest-xdist==3.6.1 +pytest-xdist==3.8.0 # via -r requirements/_test.in -requests==2.32.3 +requests==2.32.4 # via # -c requirements/_base.txt # -r requirements/_test.in @@ -104,13 +113,13 @@ types-awscrt==0.23.10 # via botocore-stubs types-s3transfer==0.11.3 # via boto3-stubs -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # boto3-stubs tzdata==2025.1 # via faker -urllib3==2.2.3 +urllib3==2.5.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt diff --git a/services/datcore-adapter/requirements/_tools.txt b/services/datcore-adapter/requirements/_tools.txt index 68ae37614adf..ec152500172a 100644 --- a/services/datcore-adapter/requirements/_tools.txt +++ b/services/datcore-adapter/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.7 +click==8.2.1 # via # -c requirements/_base.txt # black @@ -27,21 +27,24 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via -r requirements/../../../requirements/devenv.txt -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via # black # mypy nodeenv==1.9.1 # via pre-commit -packaging==24.2 +packaging==25.0 # via + # -c requirements/_base.txt # -c requirements/_test.txt # black # build pathspec==0.12.1 - # via black + # via + # black + # mypy pip==25.0.1 # via pip-tools pip-tools==7.4.1 @@ -67,13 +70,11 @@ pyyaml==6.0.1 # watchdog ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==74.0.0 - # via - # -c requirements/_base.txt - # pip-tools +setuptools==80.9.0 + # via pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # -c requirements/_test.txt diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/cli.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/cli.py index 60839168e97b..790065733801 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/cli.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/cli.py @@ -20,6 +20,6 @@ def run() -> None: """Runs application""" typer.secho("Sorry, this entrypoint is intentionally disabled. Use instead") typer.secho( - f"$ uvicorn {PROJECT_NAME}.main:the_app", + f"$ uvicorn --factory {PROJECT_NAME}.main:app_factory", fg=typer.colors.BLUE, ) diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/core/application.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/core/application.py index 50fd0b8b8884..397a686e7463 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/core/application.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/core/application.py @@ -27,27 +27,10 @@ ) from .settings import ApplicationSettings -LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR -NOISY_LOGGERS = ( - "aiocache", - "botocore", - "hpack", -) - _logger = logging.getLogger(__name__) def create_app(settings: ApplicationSettings) -> FastAPI: - # keep mostly quiet noisy loggers - quiet_level: int = max( - min(logging.root.level + LOG_LEVEL_STEP, logging.CRITICAL), logging.WARNING - ) - - for name in NOISY_LOGGERS: - logging.getLogger(name).setLevel(quiet_level) - - _logger.debug("App settings:\n%s", settings.model_dump_json(indent=1)) - app = FastAPI( debug=settings.SC_BOOT_MODE in [BootModeEnum.DEBUG, BootModeEnum.DEVELOPMENT, BootModeEnum.LOCAL], diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/core/settings.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/core/settings.py index 98f091c76e93..0b5089375123 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/core/settings.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/core/settings.py @@ -1,10 +1,10 @@ from typing import Annotated from common_library.basic_types import DEFAULT_FACTORY +from common_library.logging.logging_utils_filtering import LoggerName, MessageSubstring from models_library.basic_types import LogLevel from pydantic import AliasChoices, Field, TypeAdapter, field_validator from pydantic.networks import AnyUrl -from servicelib.logging_utils_filtering import LoggerName, MessageSubstring from settings_library.application import BaseApplicationSettings from settings_library.base import BaseCustomSettings from settings_library.tracing import TracingSettings diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py index 7bd6a7871631..0a46179524df 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/main.py @@ -1,22 +1,37 @@ """Main application to be deployed in for example uvicorn""" import logging +from typing import Final +from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.logging_utils import config_all_loggers +from servicelib.fastapi.logging_lifespan import create_logging_shutdown_event from simcore_service_datcore_adapter.core.application import create_app from simcore_service_datcore_adapter.core.settings import ApplicationSettings -_the_settings = ApplicationSettings.create_from_envs() +_logger = logging.getLogger(__name__) -# SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 -logging.basicConfig(level=_the_settings.log_level) # NOSONAR -logging.root.setLevel(_the_settings.log_level) -config_all_loggers( - log_format_local_dev_enabled=_the_settings.DATCORE_ADAPTER_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=_the_settings.DATCORE_ADAPTER_LOG_FILTER_MAPPING, - tracing_settings=_the_settings.DATCORE_ADAPTER_TRACING, +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aiocache", + "botocore", + "hpack", ) -# SINGLETON FastAPI app -the_app: FastAPI = create_app(_the_settings) + +def app_factory() -> FastAPI: + app_settings = ApplicationSettings.create_from_envs() + logging_shutdown_event = create_logging_shutdown_event( + log_format_local_dev_enabled=app_settings.DATCORE_ADAPTER_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.DATCORE_ADAPTER_LOG_FILTER_MAPPING, + tracing_settings=app_settings.DATCORE_ADAPTER_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) + + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + app = create_app(settings=app_settings) + app.add_event_handler("shutdown", logging_shutdown_event) + return app diff --git a/services/datcore-adapter/tests/unit/conftest.py b/services/datcore-adapter/tests/unit/conftest.py index 6090efe85ae2..976d86ad3341 100644 --- a/services/datcore-adapter/tests/unit/conftest.py +++ b/services/datcore-adapter/tests/unit/conftest.py @@ -25,8 +25,10 @@ from starlette.testclient import TestClient pytest_plugins = [ + "pytest_simcore.asyncio_event_loops", "pytest_simcore.cli_runner", "pytest_simcore.environment_configs", + "pytest_simcore.logging", "pytest_simcore.repository_paths", "pytest_simcore.pytest_global_environs", ] @@ -69,9 +71,9 @@ def pennsieve_mock_dataset_packages(mocks_dir: Path) -> dict[str, Any]: def minimal_app( app_environment: None, ) -> FastAPI: - from simcore_service_datcore_adapter.main import the_app + from simcore_service_datcore_adapter.main import app_factory - return the_app + return app_factory() @pytest.fixture() diff --git a/services/datcore-adapter/tests/unit/test_core_settings.py b/services/datcore-adapter/tests/unit/test_core_settings.py index 6ab82562ad26..31496dee4eb7 100644 --- a/services/datcore-adapter/tests/unit/test_core_settings.py +++ b/services/datcore-adapter/tests/unit/test_core_settings.py @@ -2,41 +2,17 @@ # pylint: disable=unused-argument # pylint: disable=redefined-outer-name - -import pytest from pytest_simcore.helpers.monkeypatch_envs import ( EnvVarsDict, - delenvs_from_dict, - setenvs_from_dict, ) from simcore_service_datcore_adapter.core.settings import ApplicationSettings -@pytest.fixture -def app_environment( - monkeypatch: pytest.MonkeyPatch, - app_environment: EnvVarsDict, - external_envfile_dict: EnvVarsDict, -) -> EnvVarsDict: - """ - NOTE: To run against repo.config in osparc-config repo - - ln -s /path/to/osparc-config/deployments/mydeploy.com/repo.config .secrets - pytest --external-envfile=.secrets tests/unit/test_core_settings.py - - """ - if external_envfile_dict: - delenvs_from_dict(monkeypatch, app_environment, raising=False) - return setenvs_from_dict( - monkeypatch, - {**external_envfile_dict}, - ) - return app_environment - - -def test_unit_app_environment(app_environment: EnvVarsDict): +def test_valid_application_settings(app_environment: EnvVarsDict): assert app_environment - settings = ApplicationSettings.create_from_envs() - print("captured settings: \n", settings.model_dump_json(indent=2)) + + settings = ApplicationSettings() # type: ignore + assert settings + assert settings == ApplicationSettings.create_from_envs() assert settings.PENNSIEVE diff --git a/services/director-v2/.env-devel b/services/director-v2/.env-devel index 33425caf3031..83b9a460ac08 100644 --- a/services/director-v2/.env-devel +++ b/services/director-v2/.env-devel @@ -29,7 +29,6 @@ DIRECTOR_V2_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS='{}' LOG_LEVEL=DEBUG -POSTGRES_ENDPOINT=postgres:5432 POSTGRES_USER=test POSTGRES_PASSWORD=test POSTGRES_DB=test diff --git a/services/director-v2/Dockerfile b/services/director-v2/Dockerfile index ac093716b765..ab57f8d54726 100644 --- a/services/director-v2/Dockerfile +++ b/services/director-v2/Dockerfile @@ -2,7 +2,7 @@ # Define arguments in the global scope ARG PYTHON_VERSION="3.11.9" -ARG UV_VERSION="0.6" +ARG UV_VERSION="0.7" FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build FROM python:${PYTHON_VERSION}-slim-bookworm AS base-arm64 @@ -31,6 +31,7 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=private \ set -eux && \ apt-get update && \ apt-get install -y --no-install-recommends \ + fd-find \ gosu \ && apt-get clean -y \ && rm -rf /var/lib/apt/lists/* \ @@ -90,10 +91,7 @@ RUN uv venv "${VIRTUAL_ENV}" -RUN --mount=type=cache,target=/root/.cache/uv \ - uv pip install --upgrade \ - wheel \ - setuptools + WORKDIR /build @@ -110,6 +108,9 @@ WORKDIR /build FROM build AS prod-only-deps ENV SC_BUILD_TARGET=prod-only-deps +# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode +ENV UV_COMPILE_BYTECODE=1 \ + UV_LINK_MODE=copy WORKDIR /build/services/director-v2 @@ -135,8 +136,6 @@ ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production ENV PYTHONOPTIMIZE=TRUE -# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode -ENV UV_COMPILE_BYTECODE=1 WORKDIR /home/scu diff --git a/services/director-v2/Makefile b/services/director-v2/Makefile index 030084bcb4e1..0332ca3a657e 100644 --- a/services/director-v2/Makefile +++ b/services/director-v2/Makefile @@ -18,7 +18,7 @@ openapi.json: .env @set -o allexport; \ source .env; \ set +o allexport; \ - python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(the_app.openapi(), indent=2) )" > $@ + python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(app_factory().openapi(), indent=2) )" > $@ DOCKER_API_VERSION ?= 1.41 @@ -65,7 +65,7 @@ down down-extra: ## stops extra stack run-devel: .env up-extra ## starts app with extra stack # start app (within $<) in devel mode - uvicorn $(APP_PACKAGE_NAME).__main__:the_app \ + uvicorn --factory $(APP_PACKAGE_NAME).__main__:app_factory \ --reload --reload-dir $(SRC_DIR) \ --port=8000 --host=0.0.0.0 diff --git a/services/director-v2/VERSION b/services/director-v2/VERSION index 276cbf9e2858..197c4d5c2d7c 100644 --- a/services/director-v2/VERSION +++ b/services/director-v2/VERSION @@ -1 +1 @@ -2.3.0 +2.4.0 diff --git a/services/director-v2/docker-compose-extra.yml b/services/director-v2/docker-compose-extra.yml index 5923b000384c..ad9ba2af2be2 100644 --- a/services/director-v2/docker-compose-extra.yml +++ b/services/director-v2/docker-compose-extra.yml @@ -1,6 +1,6 @@ services: postgres: - image: postgres:14.5-alpine@sha256:db802f226b620fc0b8adbeca7859eb203c8d3c9ce5d84870fadee05dea8f50ce + image: "postgres:14.8-alpine@sha256:150dd39ccb7ae6c7ba6130c3582c39a30bb5d3d22cb08ad0ba37001e3f829abc" init: true environment: - POSTGRES_USER=${POSTGRES_USER:-test} @@ -24,7 +24,7 @@ services: "log_line_prefix=[%p] [%a] [%c] [%x] " ] rabbit: - image: itisfoundation/rabbitmq:3.13.7-management + image: itisfoundation/rabbitmq:4.1.2-management init: true environment: - RABBITMQ_DEFAULT_USER=${RABBIT_USER} diff --git a/services/director-v2/docker/boot.sh b/services/director-v2/docker/boot.sh index 1af7ab240de5..b8d637b23ec3 100755 --- a/services/director-v2/docker/boot.sh +++ b/services/director-v2/docker/boot.sh @@ -24,7 +24,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/director-v2 - uv pip --quiet sync requirements/dev.txt + uv pip --quiet sync --link-mode=copy requirements/dev.txt cd - echo "$INFO" "PIP :" uv pip list @@ -33,7 +33,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy if command -v uv >/dev/null 2>&1; then - uv pip install debugpy + uv pip install --link-mode=copy debugpy else pip install debugpy fi @@ -47,19 +47,22 @@ SERVER_LOG_LEVEL=$(echo "${APP_LOG_LEVEL}" | tr '[:upper:]' '[:lower:]') echo "$INFO" "Log-level app/server: $APP_LOG_LEVEL/$SERVER_LOG_LEVEL" if [ "${SC_BOOT_MODE}" = "debug" ]; then - reload_dir_packages=$(find /devel/packages -maxdepth 3 -type d -path "*/src/*" ! -path "*.*" -exec echo '--reload-dir {} \' \;) + reload_dir_packages=$(fdfind src /devel/packages --exec echo '--reload-dir {} ' | tr '\n' ' ') exec sh -c " cd services/director-v2/src/simcore_service_director_v2 && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${DIRECTOR_V2_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${DIRECTOR_V2_REMOTE_DEBUGGING_PORT} -m \ + uvicorn \ + --factory main:app_factory \ --host 0.0.0.0 \ --reload \ - $reload_dir_packages + $reload_dir_packages \ --reload-dir . \ --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_director_v2.main:the_app \ + exec uvicorn \ + --factory simcore_service_director_v2.main:app_factory \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/director-v2/docker/entrypoint.sh b/services/director-v2/docker/entrypoint.sh index 42995c6ae780..9d5a38625a13 100755 --- a/services/director-v2/docker/entrypoint.sh +++ b/services/director-v2/docker/entrypoint.sh @@ -26,6 +26,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" # # DEVELOPMENT MODE @@ -63,10 +64,9 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME" echo "$INFO" "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \; - # change user property of files already around + fdfind --owner ":$SC_USER_ID" --exclude proc --exec-batch chgrp --no-dereference "$CONT_GROUPNAME" . '/' echo "$INFO" "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \; + fdfind --owner "$SC_USER_ID:" --exclude proc --exec-batch chown --no-dereference "$SC_USER_NAME" . '/' fi fi diff --git a/services/director-v2/openapi.json b/services/director-v2/openapi.json index 5f57b836564b..701fd9b5b544 100644 --- a/services/director-v2/openapi.json +++ b/services/director-v2/openapi.json @@ -3,7 +3,7 @@ "info": { "title": "simcore-service-director-v2", "description": "Orchestrates the pipeline of services defined by the user", - "version": "2.3.0" + "version": "2.4.0" }, "servers": [ { @@ -65,9 +65,9 @@ "tags": [ "computations" ], - "summary": "Create Computation", + "summary": "Create Or Update Or Start Computation", "description": "Create and optionally start a new computation", - "operationId": "create_computation_v2_computations_post", + "operationId": "create_or_update_or_start_computation_v2_computations_post", "requestBody": { "content": { "application/json": { @@ -1348,6 +1348,19 @@ } ], "description": "contains information about the wallet used to bill the running service" + }, + "collection_run_id": { + "anyOf": [ + { + "type": "string", + "format": "uuid" + }, + { + "type": "null" + } + ], + "title": "Collection Run Id", + "description": "In case start_pipeline is True, this is the collection run id to which the comp run belongs." } }, "type": "object", @@ -1669,8 +1682,16 @@ "address": { "anyOf": [ { - "type": "string", - "pattern": "^\\${1,2}(?:\\{)?OSPARC_VARIABLE_[A-Za-z0-9_]+(?:\\})?(:-.+)?$" + "oneOf": [ + { + "type": "string", + "pattern": "^\\${1,2}(?:\\{)?OSPARC_VARIABLE_[A-Za-z0-9_]+(?:\\})?(:-.+)?$" + }, + { + "type": "string", + "pattern": "^\\${1,4}(?:\\{)?OSPARC_VARIABLE_[A-Za-z0-9_]+(?:\\})?(:-.+)?$" + } + ] }, { "type": "string" @@ -1689,8 +1710,16 @@ "minimum": 0 }, { - "type": "string", - "pattern": "^\\${1,2}(?:\\{)?OSPARC_VARIABLE_[A-Za-z0-9_]+(?:\\})?(:-.+)?$" + "oneOf": [ + { + "type": "string", + "pattern": "^\\${1,2}(?:\\{)?OSPARC_VARIABLE_[A-Za-z0-9_]+(?:\\})?(:-.+)?$" + }, + { + "type": "string", + "pattern": "^\\${1,4}(?:\\{)?OSPARC_VARIABLE_[A-Za-z0-9_]+(?:\\})?(:-.+)?$" + } + ] } ], "title": "Port" @@ -2316,8 +2345,16 @@ "hostname": { "anyOf": [ { - "type": "string", - "pattern": "^\\${1,2}(?:\\{)?OSPARC_VARIABLE_[A-Za-z0-9_]+(?:\\})?(:-.+)?$" + "oneOf": [ + { + "type": "string", + "pattern": "^\\${1,2}(?:\\{)?OSPARC_VARIABLE_[A-Za-z0-9_]+(?:\\})?(:-.+)?$" + }, + { + "type": "string", + "pattern": "^\\${1,4}(?:\\{)?OSPARC_VARIABLE_[A-Za-z0-9_]+(?:\\})?(:-.+)?$" + } + ] }, { "type": "string" @@ -2336,8 +2373,16 @@ "minimum": 0 }, { - "type": "string", - "pattern": "^\\${1,2}(?:\\{)?OSPARC_VARIABLE_[A-Za-z0-9_]+(?:\\})?(:-.+)?$" + "oneOf": [ + { + "type": "string", + "pattern": "^\\${1,2}(?:\\{)?OSPARC_VARIABLE_[A-Za-z0-9_]+(?:\\})?(:-.+)?$" + }, + { + "type": "string", + "pattern": "^\\${1,4}(?:\\{)?OSPARC_VARIABLE_[A-Za-z0-9_]+(?:\\})?(:-.+)?$" + } + ] }, { "$ref": "#/components/schemas/_PortRange" @@ -2360,6 +2405,58 @@ "title": "NATRule", "description": "Content of \"simcore.service.containers-allowed-outgoing-permit-list\" label" }, + "NodeShareState": { + "properties": { + "locked": { + "type": "boolean", + "title": "Locked", + "description": "True if the node is locked, False otherwise" + }, + "current_user_groupids": { + "anyOf": [ + { + "items": { + "type": "integer", + "exclusiveMinimum": true, + "minimum": 0 + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Current User Groupids", + "description": "Group(s) that currently have access to the node (or locked it)" + }, + "status": { + "anyOf": [ + { + "$ref": "#/components/schemas/NodeShareStatus" + }, + { + "type": "null" + } + ], + "description": "Reason why the node is locked, None if not locked" + } + }, + "additionalProperties": false, + "type": "object", + "required": [ + "locked" + ], + "title": "NodeShareState" + }, + "NodeShareStatus": { + "type": "string", + "enum": [ + "OPENING", + "OPENED", + "CLOSING" + ], + "title": "NodeShareStatus" + }, "NodeState": { "properties": { "modified": { @@ -2397,6 +2494,17 @@ "title": "Progress", "description": "current progress of the task if available (None if not started or not a computational task)", "default": 0 + }, + "lock_state": { + "anyOf": [ + { + "$ref": "#/components/schemas/NodeShareState" + }, + { + "type": "null" + } + ], + "description": "the node's lock state" } }, "additionalProperties": false, @@ -2770,6 +2878,12 @@ ], "title": "Service Message", "description": "additional information related to service state" + }, + "is_collaborative": { + "type": "boolean", + "title": "Is Collaborative", + "description": "True if service allows collaboration (multi-tenant access)", + "default": false } }, "type": "object", @@ -2789,24 +2903,29 @@ "type": "string", "enum": [ "UNKNOWN", - "PUBLISHED", "NOT_STARTED", + "PUBLISHED", "PENDING", + "WAITING_FOR_CLUSTER", "WAITING_FOR_RESOURCES", "STARTED", "SUCCESS", "FAILED", - "ABORTED", - "WAITING_FOR_CLUSTER" + "ABORTED" ], "title": "RunningState", - "description": "State of execution of a project's computational workflow\n\nSEE StateType for task state" + "description": "State of execution of a project's computational workflow\n\nSEE StateType for task state\n\n# Computational backend states explained:\n- UNKNOWN - The backend doesn't know about the task anymore, it has disappeared from the system or it was never created (eg. when we are asking for the task)\n- NOT_STARTED - Default state when the task is created\n- PUBLISHED - The task has been submitted to the computational backend (click on \"Run\" button in the UI)\n- PENDING - Task has been transferred to the Dask scheduler and is waiting for a worker to pick it up (director-v2 --> Dask scheduler)\n - But! it is also transition state (ex. PENDING -> WAITING_FOR_CLUSTER -> PENDING -> WAITING_FOR_RESOURCES -> PENDING -> STARTED)\n- WAITING_FOR_CLUSTER - No cluster (Dask scheduler) is available to run the task; waiting for one to become available\n- WAITING_FOR_RESOURCES - No worker (Dask worker) is available to run the task; waiting for one to become available\n- STARTED - A worker has picked up the task and is executing it\n- SUCCESS - Task finished successfully\n- FAILED - Task finished with an error\n- ABORTED - Task was aborted before completion" }, "SchedulerData": { "properties": { "paths_mapping": { "$ref": "#/components/schemas/PathMappingsLabel" }, + "is_collaborative": { + "type": "boolean", + "title": "Is Collaborative", + "default": false + }, "simcore.service.compose-spec": { "anyOf": [ { @@ -3386,8 +3505,16 @@ "minimum": 0 }, { - "type": "string", - "pattern": "^\\${1,2}(?:\\{)?OSPARC_VARIABLE_[A-Za-z0-9_]+(?:\\})?(:-.+)?$" + "oneOf": [ + { + "type": "string", + "pattern": "^\\${1,2}(?:\\{)?OSPARC_VARIABLE_[A-Za-z0-9_]+(?:\\})?(:-.+)?$" + }, + { + "type": "string", + "pattern": "^\\${1,4}(?:\\{)?OSPARC_VARIABLE_[A-Za-z0-9_]+(?:\\})?(:-.+)?$" + } + ] } ], "title": "Lower" @@ -3402,8 +3529,16 @@ "minimum": 0 }, { - "type": "string", - "pattern": "^\\${1,2}(?:\\{)?OSPARC_VARIABLE_[A-Za-z0-9_]+(?:\\})?(:-.+)?$" + "oneOf": [ + { + "type": "string", + "pattern": "^\\${1,2}(?:\\{)?OSPARC_VARIABLE_[A-Za-z0-9_]+(?:\\})?(:-.+)?$" + }, + { + "type": "string", + "pattern": "^\\${1,4}(?:\\{)?OSPARC_VARIABLE_[A-Za-z0-9_]+(?:\\})?(:-.+)?$" + } + ] } ], "title": "Upper" diff --git a/services/director-v2/requirements/_base.in b/services/director-v2/requirements/_base.in index 7a30881a3319..3b5a52e9513f 100644 --- a/services/director-v2/requirements/_base.in +++ b/services/director-v2/requirements/_base.in @@ -25,7 +25,7 @@ aiodocker networkx ordered-set orjson -pydantic[dotenv] +pydantic python-socketio redis rich diff --git a/services/director-v2/requirements/_base.txt b/services/director-v2/requirements/_base.txt index 2eded29c1068..62ee25b2d610 100644 --- a/services/director-v2/requirements/_base.txt +++ b/services/director-v2/requirements/_base.txt @@ -25,7 +25,7 @@ aiofiles==24.1.0 # -r requirements/../../../packages/simcore-sdk/requirements/_base.in aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -89,6 +89,8 @@ arrow==1.3.0 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi asyncpg==0.30.0 @@ -141,9 +143,10 @@ certifi==2025.4.26 # httpcore # httpx # requests + # sentry-sdk charset-normalizer==3.4.2 # via requests -click==8.1.8 +click==8.2.1 # via # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # dask @@ -161,12 +164,6 @@ dask==2025.5.0 # -r requirements/../../../packages/dask-task-models-library/requirements/_base.in # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # distributed -deprecated==1.2.18 - # via - # opentelemetry-api - # opentelemetry-exporter-otlp-proto-grpc - # opentelemetry-exporter-otlp-proto-http - # opentelemetry-semantic-conventions distributed==2025.5.0 # via # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt @@ -181,12 +178,14 @@ exceptiongroup==1.3.0 # via aio-pika fast-depends==2.4.12 # via faststream -fastapi==0.115.12 +fastapi==0.116.1 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi-lifespan-manager -fastapi-cli==0.0.7 +fastapi-cli==0.0.8 # via fastapi +fastapi-cloud-cli==0.1.5 + # via fastapi-cli fastapi-lifespan-manager==0.1.4 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in faststream==0.5.41 @@ -263,7 +262,9 @@ httpx==0.28.1 # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in + # -r requirements/../../../packages/simcore-sdk/requirements/_base.in # fastapi + # fastapi-cloud-cli hyperframe==6.1.0 # via h2 idna==3.10 @@ -317,6 +318,13 @@ jinja2==3.1.6 # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # distributed # fastapi +jsonref==1.1.0 + # via + # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema==4.23.0 # via # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/_base.in @@ -392,7 +400,7 @@ networkx==3.4.2 # via -r requirements/_base.in numpy==2.2.6 # via -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt -opentelemetry-api==1.33.1 +opentelemetry-api==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in @@ -409,19 +417,19 @@ opentelemetry-api==1.33.1 # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.33.1 +opentelemetry-exporter-otlp==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.33.1 +opentelemetry-exporter-otlp-proto-common==1.34.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.33.1 +opentelemetry-exporter-otlp-proto-grpc==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.33.1 +opentelemetry-exporter-otlp-proto-http==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.54b1 +opentelemetry-instrumentation==0.55b1 # via # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-asgi @@ -431,44 +439,44 @@ opentelemetry-instrumentation==0.54b1 # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aio-pika==0.54b1 +opentelemetry-instrumentation-aio-pika==0.55b1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-asgi==0.54b1 +opentelemetry-instrumentation-asgi==0.55b1 # via opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-asyncpg==0.54b1 +opentelemetry-instrumentation-asyncpg==0.55b1 # via - # -r requirements/../../../packages/postgres-database/requirements/_base.in - # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in -opentelemetry-instrumentation-fastapi==0.54b1 + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-fastapi==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-httpx==0.54b1 +opentelemetry-instrumentation-httpx==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-logging==0.54b1 +opentelemetry-instrumentation-logging==0.55b1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.54b1 +opentelemetry-instrumentation-redis==0.55b1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.54b1 +opentelemetry-instrumentation-requests==0.55b1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.33.1 +opentelemetry-proto==1.34.1 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.33.1 +opentelemetry-sdk==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.54b1 +opentelemetry-semantic-conventions==0.55b1 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi @@ -478,7 +486,7 @@ opentelemetry-semantic-conventions==0.54b1 # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.54b1 +opentelemetry-util-http==0.55b1 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi @@ -568,7 +576,7 @@ propcache==0.3.1 # via # aiohttp # yarl -protobuf==5.29.4 +protobuf==5.29.5 # via # googleapis-common-protos # opentelemetry-proto @@ -582,7 +590,7 @@ psycopg2-binary==2.9.10 # via sqlalchemy pycryptodome==3.23.0 # via stream-zip -pydantic==2.11.4 +pydantic==2.11.7 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -654,11 +662,12 @@ pydantic==2.11.4 # -r requirements/_base.in # fast-depends # fastapi + # fastapi-cloud-cli # pydantic-extra-types # pydantic-settings pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.10.4 +pydantic-extra-types==2.10.5 # via # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -866,9 +875,9 @@ referencing==0.35.1 # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications -requests==2.32.3 +requests==2.32.4 # via opentelemetry-exporter-otlp-proto-http -rich==14.0.0 +rich==14.1.0 # via # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -878,12 +887,18 @@ rich==14.0.0 # -r requirements/_base.in # rich-toolkit # typer -rich-toolkit==0.14.6 - # via fastapi-cli +rich-toolkit==0.15.0 + # via + # fastapi-cli + # fastapi-cloud-cli +rignore==0.6.4 + # via fastapi-cloud-cli rpds-py==0.25.0 # via # jsonschema # referencing +sentry-sdk==2.35.0 + # via fastapi-cloud-cli shellingham==1.5.4 # via typer simple-websocket==1.1.0 @@ -891,7 +906,9 @@ simple-websocket==1.1.0 six==1.17.0 # via python-dateutil sniffio==1.3.1 - # via anyio + # via + # anyio + # asgi-lifespan sortedcontainers==2.4.0 # via # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt @@ -936,7 +953,7 @@ sqlalchemy==1.4.54 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/_base.in # alembic -starlette==0.46.2 +starlette==0.47.2 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -1004,7 +1021,7 @@ tqdm==4.67.1 # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/_base.in -typer==0.15.4 +typer==0.16.1 # via # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -1012,9 +1029,10 @@ typer==0.15.4 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in # fastapi-cli + # fastapi-cloud-cli types-python-dateutil==2.9.0.20250516 # via arrow -typing-extensions==4.13.2 +typing-extensions==4.14.1 # via # aiodebug # alembic @@ -1024,17 +1042,22 @@ typing-extensions==4.13.2 # faststream # flexcache # flexparser + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http # opentelemetry-sdk + # opentelemetry-semantic-conventions # pint # pydantic # pydantic-core # pydantic-extra-types # rich-toolkit + # starlette # typer # typing-inspection typing-inspection==0.4.0 # via pydantic -urllib3==2.4.0 +urllib3==2.5.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/dask-task-models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -1073,10 +1096,12 @@ urllib3==2.4.0 # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # distributed # requests + # sentry-sdk uvicorn==0.34.2 # via # fastapi # fastapi-cli + # fastapi-cloud-cli uvloop==0.21.0 # via uvicorn watchfiles==1.0.5 @@ -1085,7 +1110,6 @@ websockets==15.0.1 # via uvicorn wrapt==1.17.2 # via - # deprecated # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-httpx diff --git a/services/director-v2/requirements/_test.in b/services/director-v2/requirements/_test.in index 2fb831189bae..34b5327e2538 100644 --- a/services/director-v2/requirements/_test.in +++ b/services/director-v2/requirements/_test.in @@ -17,6 +17,7 @@ async-asgi-testclient # replacement for fastapi.testclient.TestClient [see b) be dask[distributed,diagnostics] docker Faker +fakeredis[lua] flaky pytest pytest-asyncio diff --git a/services/director-v2/requirements/_test.txt b/services/director-v2/requirements/_test.txt index b556d2b115ef..b00bc02280e0 100644 --- a/services/director-v2/requirements/_test.txt +++ b/services/director-v2/requirements/_test.txt @@ -14,7 +14,7 @@ aiohappyeyeballs==2.6.1 # via # -c requirements/_base.txt # aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -38,7 +38,9 @@ anyio==4.9.0 # -c requirements/_base.txt # httpx asgi-lifespan==2.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in async-asgi-testclient==1.4.11 # via -r requirements/_test.in attrs==25.3.0 @@ -66,7 +68,7 @@ charset-normalizer==3.4.2 # via # -c requirements/_base.txt # requests -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # dask @@ -99,6 +101,8 @@ execnet==2.1.1 # via pytest-xdist faker==37.3.0 # via -r requirements/_test.in +fakeredis==2.30.3 + # via -r requirements/_test.in flaky==3.8.1 # via -r requirements/_test.in frozenlist==1.6.0 @@ -159,6 +163,8 @@ locket==1.0.0 # -c requirements/_base.txt # distributed # partd +lupa==2.5 + # via fakeredis mako==1.3.10 # via # -c requirements/../../../requirements/constraints.txt @@ -180,7 +186,7 @@ multidict==6.4.4 # aiohttp # async-asgi-testclient # yarl -mypy==1.15.0 +mypy==1.16.1 # via sqlalchemy mypy-extensions==1.1.0 # via mypy @@ -210,10 +216,14 @@ partd==1.4.2 # via # -c requirements/_base.txt # dask +pathspec==0.12.1 + # via mypy pillow==11.2.1 # via bokeh pluggy==1.6.0 - # via pytest + # via + # pytest + # pytest-cov pprintpp==0.4.0 # via pytest-icdiff propcache==0.3.1 @@ -225,7 +235,11 @@ psutil==7.0.0 # via # -c requirements/_base.txt # distributed -pytest==8.3.5 +pygments==2.19.1 + # via + # -c requirements/_base.txt + # pytest +pytest==8.4.1 # via # -r requirements/_test.in # pytest-asyncio @@ -234,19 +248,19 @@ pytest==8.3.5 # pytest-icdiff # pytest-mock # pytest-xdist -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via -r requirements/_test.in -pytest-cov==6.1.1 +pytest-cov==6.2.1 # via -r requirements/_test.in -pytest-docker==3.2.1 +pytest-docker==3.2.3 # via -r requirements/_test.in pytest-icdiff==0.9 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in -pytest-xdist==3.6.1 +pytest-xdist==3.8.0 # via -r requirements/_test.in python-dateutil==2.9.0.post0 # via @@ -263,7 +277,12 @@ pyyaml==6.0.2 # bokeh # dask # distributed -requests==2.32.3 +redis==6.1.0 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # fakeredis +requests==2.32.4 # via # -c requirements/_base.txt # async-asgi-testclient @@ -285,6 +304,7 @@ sortedcontainers==2.4.0 # via # -c requirements/_base.txt # distributed + # fakeredis sqlalchemy==1.4.54 # via # -c requirements/../../../requirements/constraints.txt @@ -314,7 +334,7 @@ types-psycopg2==2.9.21.20250516 # via -r requirements/_test.in types-pyyaml==6.0.12.20250516 # via -r requirements/_test.in -typing-extensions==4.13.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # alembic @@ -326,7 +346,7 @@ tzdata==2025.2 # via # faker # pandas -urllib3==2.4.0 +urllib3==2.5.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt diff --git a/services/director-v2/requirements/_tools.txt b/services/director-v2/requirements/_tools.txt index 19dcf22bed60..06a3d2c9da2f 100644 --- a/services/director-v2/requirements/_tools.txt +++ b/services/director-v2/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # -c requirements/_test.txt @@ -28,7 +28,7 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt @@ -46,7 +46,10 @@ packaging==25.0 # black # build pathspec==0.12.1 - # via black + # via + # -c requirements/_test.txt + # black + # mypy pip==25.1.1 # via pip-tools pip-tools==7.4.1 @@ -74,11 +77,11 @@ pyyaml==6.0.2 # watchdog ruff==0.11.10 # via -r requirements/../../../requirements/devenv.txt -setuptools==80.7.1 +setuptools==80.9.0 # via pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.13.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # -c requirements/_test.txt diff --git a/services/director-v2/setup.cfg b/services/director-v2/setup.cfg index f84ced2849b5..20165c1866de 100644 --- a/services/director-v2/setup.cfg +++ b/services/director-v2/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 2.3.0 +current_version = 2.4.0 commit = True message = services/director-v2 version: {current_version} → {new_version} tag = False diff --git a/services/director-v2/src/simcore_service_director_v2/_meta.py b/services/director-v2/src/simcore_service_director_v2/_meta.py index 4ebfef7135c9..5d213c0857ce 100644 --- a/services/director-v2/src/simcore_service_director_v2/_meta.py +++ b/services/director-v2/src/simcore_service_director_v2/_meta.py @@ -1,6 +1,5 @@ -""" Application's metadata +"""Application's metadata.""" -""" from typing import Final from models_library.basic_types import VersionStr @@ -15,5 +14,5 @@ VERSION: Final[Version] = info.version API_VERSION: Final[VersionStr] = info.__version__ API_VTAG: Final[str] = info.api_prefix_path_tag -APP_NAME: Final[str] = PROJECT_NAME +APP_NAME: Final[str] = info.app_name SUMMARY: Final[str] = info.get_summary() diff --git a/services/director-v2/src/simcore_service_director_v2/api/dependencies/database.py b/services/director-v2/src/simcore_service_director_v2/api/dependencies/database.py index 949ef83bbdf6..806c122fcc8d 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/dependencies/database.py +++ b/services/director-v2/src/simcore_service_director_v2/api/dependencies/database.py @@ -26,17 +26,11 @@ def get_base_repository(engine: AsyncEngine, repo_type: type[RepoType]) -> RepoT # now the current solution is to acquire connection when needed. # Get pool metrics - checkedin = engine.pool.checkedin() # type: ignore # connections available in pool - checkedout = engine.pool.checkedout() # type: ignore # connections in use + in_use = engine.pool.checkedout() # type: ignore # connections in use total_size = engine.pool.size() # type: ignore # current total connections - if (checkedin < 2) and (total_size > 1): # noqa: PLR2004 - logger.warning( - "Database connection pool near limits: total=%d, in_use=%d, available=%d", - total_size, - checkedout, - checkedin, - ) + if (total_size > 1) and (in_use > (total_size - 2)): + logger.warning("Database connection pool near limits: %s", engine.pool.status()) return repo_type(db_engine=engine) diff --git a/services/director-v2/src/simcore_service_director_v2/api/errors/http_error.py b/services/director-v2/src/simcore_service_director_v2/api/errors/http_error.py index 5edfb25aa200..ddf050d91a93 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/errors/http_error.py +++ b/services/director-v2/src/simcore_service_director_v2/api/errors/http_error.py @@ -1,10 +1,15 @@ -from typing import Awaitable, Callable +import logging +from collections.abc import Awaitable, Callable +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from fastapi import HTTPException from fastapi.encoders import jsonable_encoder +from servicelib.status_codes_utils import is_5xx_server_error from starlette.requests import Request from starlette.responses import JSONResponse +_logger = logging.getLogger(__name__) + async def http_error_handler(_: Request, exc: Exception) -> JSONResponse: assert isinstance(exc, HTTPException) @@ -24,8 +29,18 @@ def make_http_error_handler_for_exception( SEE https://docs.python.org/3/library/exceptions.html#concrete-exceptions """ - async def _http_error_handler(_: Request, exc: Exception) -> JSONResponse: + async def _http_error_handler(request: Request, exc: Exception) -> JSONResponse: assert isinstance(exc, exception_cls) # nosec + + if is_5xx_server_error(status_code): + _logger.exception( + **create_troubleshooting_log_kwargs( + f"HTTP error handler caught an {exception_cls.__name__} exception and responds with {status_code} status code", + error=exc, + error_context={"request": request, "status_code": status_code}, + ) + ) + return JSONResponse( content=jsonable_encoder({"errors": [str(exc)]}), status_code=status_code ) diff --git a/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py b/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py index 9f096911030a..80d16237dcd7 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py +++ b/services/director-v2/src/simcore_service_director_v2/api/routes/computations.py @@ -53,11 +53,11 @@ ComputationalRunNotFoundError, ComputationalSchedulerError, ConfigurationError, + PipelineTaskMissingError, PricingPlanUnitNotFoundError, ProjectNotFoundError, WalletNotEnoughCreditsError, ) -from ...models.comp_pipelines import CompPipelineAtDB from ...models.comp_runs import CompRunsAtDB, ProjectMetadataDict, RunMetadataDict from ...models.comp_tasks import CompTaskAtDB from ...modules.catalog import CatalogClient @@ -70,6 +70,7 @@ from ...modules.db.repositories.users import UsersRepository from ...modules.resource_usage_tracker_client import ResourceUsageTrackerClient from ...utils import computations as utils +from ...utils.computations_tasks import validate_pipeline from ...utils.dags import ( compute_pipeline_details, compute_pipeline_started_timestamp, @@ -83,7 +84,6 @@ from ..dependencies.database import get_repository from ..dependencies.rabbitmq import rabbitmq_rpc_client from ..dependencies.rut_client import get_rut_client -from .computations_tasks import analyze_pipeline _PIPELINE_ABORT_TIMEOUT_S: Final[timedelta] = timedelta(seconds=30) @@ -93,16 +93,20 @@ async def _check_pipeline_not_running_or_raise_409( - comp_tasks_repo: CompTasksRepository, computation: ComputationCreate + comp_runs_repo: CompRunsRepository, + computation: ComputationCreate, ) -> None: - pipeline_state = utils.get_pipeline_state_from_task_states( - await comp_tasks_repo.list_computational_tasks(computation.project_id) - ) - if utils.is_pipeline_running(pipeline_state): - raise HTTPException( - status_code=status.HTTP_409_CONFLICT, - detail=f"Project {computation.project_id} already started, current state is {pipeline_state}", + with contextlib.suppress(ComputationalRunNotFoundError): + last_run = await comp_runs_repo.get_latest_run_by_project( + project_id=computation.project_id ) + pipeline_state = last_run.result + + if utils.is_pipeline_running(pipeline_state): + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, + detail=f"Project {computation.project_id} already started, current state is {pipeline_state}", + ) async def _check_pipeline_startable( @@ -220,6 +224,11 @@ async def _try_start_pipeline( wallet_id = computation.wallet_info.wallet_id wallet_name = computation.wallet_info.wallet_name + if computation.collection_run_id is None: + raise HTTPException( + status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, + detail=f"Project {computation.project_id} has no collection run ID", + ) await run_new_pipeline( app, user_id=computation.user_id, @@ -241,6 +250,7 @@ async def _try_start_pipeline( ) or {}, use_on_demand_clusters=computation.use_on_demand_clusters, + collection_run_id=computation.collection_run_id, ) @@ -268,7 +278,8 @@ async def _try_start_pipeline( ) # NOTE: in case of a burst of calls to that endpoint, we might end up in a weird state. @run_sequentially_in_context(target_args=["computation.project_id"]) -async def create_computation( # noqa: PLR0913 # pylint: disable=too-many-positional-arguments +# NOTE: This endpoint is historically used for CREATE, UPDATE or START a computation! +async def create_or_update_or_start_computation( # noqa: PLR0913 # pylint: disable=too-many-positional-arguments computation: ComputationCreate, request: Request, project_repo: Annotated[ @@ -301,7 +312,7 @@ async def create_computation( # noqa: PLR0913 # pylint: disable=too-many-positi project: ProjectAtDB = await project_repo.get_project(computation.project_id) # check if current state allow to modify the computation - await _check_pipeline_not_running_or_raise_409(comp_tasks_repo, computation) + await _check_pipeline_not_running_or_raise_409(comp_runs_repo, computation) # create the complete DAG graph complete_dag = create_complete_dag(project.workbench) @@ -352,20 +363,14 @@ async def create_computation( # noqa: PLR0913 # pylint: disable=too-many-positi projects_metadata_repo=projects_metadata_repo, ) - # filter the tasks by the effective pipeline - filtered_tasks = [ - t - for t in comp_tasks - if f"{t.node_id}" in set(minimal_computational_dag.nodes()) - ] - pipeline_state = utils.get_pipeline_state_from_task_states(filtered_tasks) - # get run details if any last_run: CompRunsAtDB | None = None + pipeline_state = RunningState.NOT_STARTED with contextlib.suppress(ComputationalRunNotFoundError): - last_run = await comp_runs_repo.get( - user_id=computation.user_id, project_id=computation.project_id + last_run = await comp_runs_repo.get_latest_run_by_project( + project_id=computation.project_id ) + pipeline_state = last_run.result return ComputationGet( id=computation.project_id, @@ -448,20 +453,15 @@ async def get_computation( # check that project actually exists await project_repo.get_project(project_id) - pipeline_dag, all_tasks, filtered_tasks = await analyze_pipeline( - project_id, comp_pipelines_repo, comp_tasks_repo - ) - - pipeline_state: RunningState = utils.get_pipeline_state_from_task_states( - filtered_tasks - ) - - _logger.debug( - "Computational task status by %s for %s has %s", - f"{user_id=}", - f"{project_id=}", - f"{pipeline_state=}", - ) + try: + pipeline_dag, all_tasks, _filtered_tasks = await validate_pipeline( + project_id, comp_pipelines_repo, comp_tasks_repo + ) + except PipelineTaskMissingError as exc: + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, + detail="The tasks referenced by the pipeline are missing", + ) from exc # create the complete DAG graph complete_dag = create_complete_dag_from_tasks(all_tasks) @@ -471,8 +471,17 @@ async def get_computation( # get run details if any last_run: CompRunsAtDB | None = None + pipeline_state = RunningState.NOT_STARTED with contextlib.suppress(ComputationalRunNotFoundError): - last_run = await comp_runs_repo.get(user_id=user_id, project_id=project_id) + last_run = await comp_runs_repo.get_latest_run_by_project(project_id=project_id) + pipeline_state = last_run.result + + _logger.debug( + "Computational task status by %s for %s has %s", + f"{user_id=}", + f"{project_id=}", + f"{pipeline_state=}", + ) self_url = request.url.remove_query_params("user_id") return ComputationGet( @@ -527,31 +536,24 @@ async def stop_computation( # check the project exists await project_repo.get_project(project_id) # get the project pipeline - pipeline_at_db: CompPipelineAtDB = await comp_pipelines_repo.get_pipeline( - project_id - ) - pipeline_dag: nx.DiGraph = pipeline_at_db.get_graph() + pipeline_at_db = await comp_pipelines_repo.get_pipeline(project_id) + pipeline_dag = pipeline_at_db.get_graph() # get the project task states tasks: list[CompTaskAtDB] = await comp_tasks_repo.list_tasks(project_id) # create the complete DAG graph complete_dag = create_complete_dag_from_tasks(tasks) - # filter the tasks by the effective pipeline - filtered_tasks = [ - t for t in tasks if f"{t.node_id}" in set(pipeline_dag.nodes()) - ] - pipeline_state = utils.get_pipeline_state_from_task_states(filtered_tasks) - - if utils.is_pipeline_running(pipeline_state): - await stop_pipeline( - request.app, user_id=computation_stop.user_id, project_id=project_id - ) - - # get run details if any + # stop the pipeline if it is running last_run: CompRunsAtDB | None = None + pipeline_state = RunningState.UNKNOWN with contextlib.suppress(ComputationalRunNotFoundError): - last_run = await comp_runs_repo.get( - user_id=computation_stop.user_id, project_id=project_id + last_run = await comp_runs_repo.get_latest_run_by_project( + project_id=project_id ) + pipeline_state = last_run.result + if utils.is_pipeline_running(last_run.result): + await stop_pipeline( + request.app, user_id=computation_stop.user_id, project_id=project_id + ) return ComputationGet( id=project_id, @@ -593,15 +595,20 @@ async def delete_computation( comp_tasks_repo: Annotated[ CompTasksRepository, Depends(get_repository(CompTasksRepository)) ], + comp_runs_repo: Annotated[ + CompRunsRepository, Depends(get_repository(CompRunsRepository)) + ], ) -> None: try: # get the project project: ProjectAtDB = await project_repo.get_project(project_id) # check if current state allow to stop the computation - comp_tasks: list[CompTaskAtDB] = await comp_tasks_repo.list_computational_tasks( - project_id - ) - pipeline_state = utils.get_pipeline_state_from_task_states(comp_tasks) + pipeline_state = RunningState.UNKNOWN + with contextlib.suppress(ComputationalRunNotFoundError): + last_run = await comp_runs_repo.get_latest_run_by_project( + project_id=project_id + ) + pipeline_state = last_run.result if utils.is_pipeline_running(pipeline_state): if not computation_stop.force: raise HTTPException( @@ -633,12 +640,10 @@ def return_last_value(retry_state: Any) -> Any: before_sleep=before_sleep_log(_logger, logging.INFO), ) async def check_pipeline_stopped() -> bool: - comp_tasks: list[CompTaskAtDB] = ( - await comp_tasks_repo.list_computational_tasks(project_id) - ) - pipeline_state = utils.get_pipeline_state_from_task_states( - comp_tasks, + last_run = await comp_runs_repo.get_latest_run_by_project( + project_id=project_id ) + pipeline_state = last_run.result return utils.is_pipeline_stopped(pipeline_state) # wait for the pipeline to be stopped diff --git a/services/director-v2/src/simcore_service_director_v2/api/routes/computations_tasks.py b/services/director-v2/src/simcore_service_director_v2/api/routes/computations_tasks.py index 45f24d13835b..a97b2b60f65b 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/routes/computations_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/api/routes/computations_tasks.py @@ -7,9 +7,8 @@ """ import logging -from typing import Annotated, NamedTuple +from typing import Annotated -import networkx as nx from fastapi import APIRouter, Depends, HTTPException from models_library.api_schemas_directorv2.computations import ( TaskLogFileGet, @@ -22,11 +21,11 @@ from servicelib.utils import logged_gather from starlette import status -from ...models.comp_pipelines import CompPipelineAtDB -from ...models.comp_tasks import CompTaskAtDB +from ...core.errors import PipelineTaskMissingError from ...modules.db.repositories.comp_pipelines import CompPipelinesRepository from ...modules.db.repositories.comp_tasks import CompTasksRepository from ...utils import dask as dask_utils +from ...utils.computations_tasks import validate_pipeline from ..dependencies.database import get_repository log = logging.getLogger(__name__) @@ -37,48 +36,6 @@ # HELPERS ------------------------------------------------------------------- -class PipelineInfo(NamedTuple): - # NOTE: kept old names for legacy but should rename for clarity - pipeline_dag: nx.DiGraph - all_tasks: list[CompTaskAtDB] # all nodes in pipeline - filtered_tasks: list[CompTaskAtDB] # nodes that actually run i.e. part of the dag - - -async def analyze_pipeline( - project_id: ProjectID, - comp_pipelines_repo: CompPipelinesRepository, - comp_tasks_repo: CompTasksRepository, -) -> PipelineInfo: - """ - Loads and validates data from pipelines and tasks tables and - reports it back as PipelineInfo - """ - - # NOTE: Here it is assumed the project exists in comp_tasks/comp_pipeline - # get the project pipeline - pipeline_at_db: CompPipelineAtDB = await comp_pipelines_repo.get_pipeline( - project_id - ) - pipeline_dag: nx.DiGraph = pipeline_at_db.get_graph() - - # get the project task states - all_tasks: list[CompTaskAtDB] = await comp_tasks_repo.list_tasks(project_id) - - # filter the tasks by the effective pipeline - filtered_tasks = [ - t for t in all_tasks if f"{t.node_id}" in set(pipeline_dag.nodes()) - ] - - # check that we have the expected tasks - if len(filtered_tasks) != len(pipeline_dag): - raise HTTPException( - status_code=status.HTTP_409_CONFLICT, - detail="The tasks referenced by the pipeline are missing", - ) - - return PipelineInfo(pipeline_dag, all_tasks, filtered_tasks) - - # ROUTES HANDLERS -------------------------------------------------------------- @@ -101,7 +58,13 @@ async def get_all_tasks_log_files( Each log is only available when the corresponding task is done """ # gets computation task ids - info = await analyze_pipeline(project_id, comp_pipelines_repo, comp_tasks_repo) + try: + info = await validate_pipeline(project_id, comp_pipelines_repo, comp_tasks_repo) + except PipelineTaskMissingError as exc: + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, + detail="The tasks referenced by the pipeline are missing", + ) from exc iter_task_ids = (t.node_id for t in info.filtered_tasks) tasks_logs_files: list[TaskLogFileGet] = await logged_gather( diff --git a/services/director-v2/src/simcore_service_director_v2/api/routes/dynamic_scheduler.py b/services/director-v2/src/simcore_service_director_v2/api/routes/dynamic_scheduler.py index dadfdc3cdfc6..53aaac235044 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/routes/dynamic_scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/api/routes/dynamic_scheduler.py @@ -4,18 +4,17 @@ from fastapi import APIRouter, Depends, HTTPException, status from models_library.projects_nodes_io import NodeID from pydantic import BaseModel, PositiveInt -from servicelib.fastapi.long_running_tasks.client import ( +from servicelib.fastapi.long_running_tasks._manager import FastAPILongRunningManager +from servicelib.fastapi.long_running_tasks.server import get_long_running_manager +from servicelib.long_running_tasks import lrt_api +from servicelib.long_running_tasks.errors import TaskAlreadyRunningError +from servicelib.long_running_tasks.models import ( ProgressMessage, ProgressPercent, -) -from servicelib.fastapi.long_running_tasks.server import ( - TaskAlreadyRunningError, TaskId, TaskProgress, - TasksManager, - get_tasks_manager, - start_task, ) +from servicelib.long_running_tasks.task import TaskRegistry from tenacity import retry from tenacity.before_sleep import before_sleep_log from tenacity.retry import retry_if_result @@ -94,32 +93,39 @@ async def update_service_observation( ) async def delete_service_containers( node_uuid: NodeID, - tasks_manager: Annotated[TasksManager, Depends(get_tasks_manager)], + long_running_manager: Annotated[ + FastAPILongRunningManager, Depends(get_long_running_manager) + ], dynamic_sidecars_scheduler: Annotated[ DynamicSidecarsScheduler, Depends(get_dynamic_sidecar_scheduler) ], ): async def _task_remove_service_containers( - task_progress: TaskProgress, node_uuid: NodeID + progress: TaskProgress, node_uuid: NodeID ) -> None: async def _progress_callback( message: ProgressMessage, percent: ProgressPercent | None, _: TaskId ) -> None: - task_progress.update(message=message, percent=percent) + await progress.update(message=message, percent=percent) await dynamic_sidecars_scheduler.remove_service_containers( node_uuid=node_uuid, progress_callback=_progress_callback ) + TaskRegistry.register(_task_remove_service_containers) + try: - return start_task( - tasks_manager, - task=_task_remove_service_containers, # type: ignore[arg-type] + return await lrt_api.start_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + _task_remove_service_containers.__name__, unique=True, node_uuid=node_uuid, ) except TaskAlreadyRunningError as e: raise HTTPException(status.HTTP_409_CONFLICT, detail=f"{e}") from e + finally: + TaskRegistry.unregister(_task_remove_service_containers) @router.get( @@ -152,33 +158,40 @@ async def get_service_state( ) async def save_service_state( node_uuid: NodeID, - tasks_manager: Annotated[TasksManager, Depends(get_tasks_manager)], + long_running_manager: Annotated[ + FastAPILongRunningManager, Depends(get_long_running_manager) + ], dynamic_sidecars_scheduler: Annotated[ DynamicSidecarsScheduler, Depends(get_dynamic_sidecar_scheduler) ], ): async def _task_save_service_state( - task_progress: TaskProgress, + progress: TaskProgress, node_uuid: NodeID, ) -> None: async def _progress_callback( message: ProgressMessage, percent: ProgressPercent | None, _: TaskId ) -> None: - task_progress.update(message=message, percent=percent) + await progress.update(message=message, percent=percent) await dynamic_sidecars_scheduler.save_service_state( node_uuid=node_uuid, progress_callback=_progress_callback ) + TaskRegistry.register(_task_save_service_state) + try: - return start_task( - tasks_manager, - task=_task_save_service_state, # type: ignore[arg-type] + return await lrt_api.start_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + _task_save_service_state.__name__, unique=True, node_uuid=node_uuid, ) except TaskAlreadyRunningError as e: raise HTTPException(status.HTTP_409_CONFLICT, detail=f"{e}") from e + finally: + TaskRegistry.unregister(_task_save_service_state) @router.post( @@ -194,32 +207,39 @@ async def _progress_callback( ) async def push_service_outputs( node_uuid: NodeID, - tasks_manager: Annotated[TasksManager, Depends(get_tasks_manager)], + long_running_manager: Annotated[ + FastAPILongRunningManager, Depends(get_long_running_manager) + ], dynamic_sidecars_scheduler: Annotated[ DynamicSidecarsScheduler, Depends(get_dynamic_sidecar_scheduler) ], ): async def _task_push_service_outputs( - task_progress: TaskProgress, node_uuid: NodeID + progress: TaskProgress, node_uuid: NodeID ) -> None: async def _progress_callback( message: ProgressMessage, percent: ProgressPercent | None, _: TaskId ) -> None: - task_progress.update(message=message, percent=percent) + await progress.update(message=message, percent=percent) await dynamic_sidecars_scheduler.push_service_outputs( node_uuid=node_uuid, progress_callback=_progress_callback ) + TaskRegistry.register(_task_push_service_outputs) + try: - return start_task( - tasks_manager, - task=_task_push_service_outputs, # type: ignore[arg-type] + return await lrt_api.start_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + _task_push_service_outputs.__name__, unique=True, node_uuid=node_uuid, ) except TaskAlreadyRunningError as e: raise HTTPException(status.HTTP_409_CONFLICT, detail=f"{e}") from e + finally: + TaskRegistry.unregister(_task_push_service_outputs) @router.delete( @@ -235,27 +255,34 @@ async def _progress_callback( ) async def delete_service_docker_resources( node_uuid: NodeID, - tasks_manager: Annotated[TasksManager, Depends(get_tasks_manager)], + long_running_manager: Annotated[ + FastAPILongRunningManager, Depends(get_long_running_manager) + ], dynamic_sidecars_scheduler: Annotated[ DynamicSidecarsScheduler, Depends(get_dynamic_sidecar_scheduler) ], ): async def _task_cleanup_service_docker_resources( - task_progress: TaskProgress, node_uuid: NodeID + progress: TaskProgress, node_uuid: NodeID ) -> None: await dynamic_sidecars_scheduler.remove_service_sidecar_proxy_docker_networks_and_volumes( - task_progress=task_progress, node_uuid=node_uuid + task_progress=progress, node_uuid=node_uuid ) + TaskRegistry.register(_task_cleanup_service_docker_resources) + try: - return start_task( - tasks_manager, - task=_task_cleanup_service_docker_resources, # type: ignore[arg-type] + return await lrt_api.start_task( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + _task_cleanup_service_docker_resources.__name__, unique=True, node_uuid=node_uuid, ) except TaskAlreadyRunningError as e: raise HTTPException(status.HTTP_409_CONFLICT, detail=f"{e}") from e + finally: + TaskRegistry.unregister(_task_cleanup_service_docker_resources) @router.post( diff --git a/services/director-v2/src/simcore_service_director_v2/api/rpc/_computations.py b/services/director-v2/src/simcore_service_director_v2/api/rpc/_computations.py index eeb270c46cbb..25d2a133ec57 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/rpc/_computations.py +++ b/services/director-v2/src/simcore_service_director_v2/api/rpc/_computations.py @@ -1,11 +1,15 @@ # pylint: disable=too-many-arguments from fastapi import FastAPI from models_library.api_schemas_directorv2.comp_runs import ( + ComputationCollectionRunRpcGetPage, + ComputationCollectionRunTaskRpcGet, + ComputationCollectionRunTaskRpcGetPage, ComputationRunRpcGetPage, ComputationTaskRpcGet, ComputationTaskRpcGetPage, ) from models_library.api_schemas_directorv2.computations import TaskLogFileGet +from models_library.computations import CollectionRunID from models_library.products import ProductName from models_library.projects import ProjectID from models_library.rest_ordering import OrderBy @@ -15,9 +19,15 @@ from servicelib.utils import limited_gather from ...core.errors import ComputationalRunNotFoundError +from ...models.comp_run_snapshot_tasks import ( + CompRunSnapshotTaskDBGet, +) from ...models.comp_runs import CompRunsAtDB from ...models.comp_tasks import ComputationTaskForRpcDBGet from ...modules.db.repositories.comp_runs import CompRunsRepository +from ...modules.db.repositories.comp_runs_snapshot_tasks import ( + CompRunsSnapshotTasksRepository, +) from ...modules.db.repositories.comp_tasks import CompTasksRepository from ...utils import dask as dask_utils @@ -85,8 +95,44 @@ async def list_computations_iterations_page( ) +@router.expose(reraise_if_error_type=()) +async def list_computation_collection_runs_page( + app: FastAPI, + *, + product_name: ProductName, + user_id: UserID, + project_ids: list[ProjectID] | None, + filter_only_running: bool = False, + # pagination + offset: int = 0, + limit: int = 20, +) -> ComputationCollectionRunRpcGetPage: + comp_runs_repo = CompRunsRepository.instance(db_engine=app.state.engine) + + collection_run_ids: list[CollectionRunID] | None = None + if filter_only_running is True: + collection_run_ids = await comp_runs_repo.list_all_collection_run_ids_for_user_currently_running_computations( + product_name=product_name, user_id=user_id + ) + if collection_run_ids == []: + return ComputationCollectionRunRpcGetPage(items=[], total=0) + + total, comp_runs_output = await comp_runs_repo.list_group_by_collection_run_id( + product_name=product_name, + user_id=user_id, + project_ids_or_none=project_ids, + collection_run_ids_or_none=collection_run_ids, + offset=offset, + limit=limit, + ) + return ComputationCollectionRunRpcGetPage( + items=comp_runs_output, + total=total, + ) + + async def _fetch_task_log( - user_id: UserID, task: ComputationTaskForRpcDBGet + user_id: UserID, task: CompRunSnapshotTaskDBGet | ComputationTaskForRpcDBGet ) -> TaskLogFileGet | None: if not task.state.is_running(): return await dask_utils.get_task_log_file( @@ -182,3 +228,63 @@ async def list_computations_latest_iteration_tasks_page( items=comp_tasks_output, total=total, ) + + +@router.expose(reraise_if_error_type=()) +async def list_computation_collection_run_tasks_page( + app: FastAPI, + *, + product_name: ProductName, + user_id: UserID, + collection_run_id: CollectionRunID, + # pagination + offset: int = 0, + limit: int = 20, + # ordering + order_by: OrderBy | None = None, +) -> ComputationCollectionRunTaskRpcGetPage: + comp_runs_snapshot_tasks_repo = CompRunsSnapshotTasksRepository.instance( + db_engine=app.state.engine + ) + + total, comp_tasks = ( + await comp_runs_snapshot_tasks_repo.list_computation_collection_run_tasks( + product_name=product_name, + user_id=user_id, + collection_run_id=collection_run_id, + offset=offset, + limit=limit, + order_by=order_by, + ) + ) + + # Run all log fetches concurrently + log_files = await limited_gather( + *[_fetch_task_log(user_id, task) for task in comp_tasks], + limit=20, + ) + + comp_tasks_output = [ + ComputationCollectionRunTaskRpcGet( + project_uuid=task.project_uuid, + node_id=task.node_id, + state=task.state, + progress=task.progress, + image=task.image, + started_at=task.started_at, + ended_at=task.ended_at, + log_download_link=log_file.download_link if log_file else None, + service_run_id=ServiceRunID.get_resource_tracking_run_id_for_computational( + user_id, + task.project_uuid, + task.node_id, + task.iteration, + ), + ) + for task, log_file in zip(comp_tasks, log_files, strict=True) + ] + + return ComputationCollectionRunTaskRpcGetPage( + items=comp_tasks_output, + total=total, + ) diff --git a/services/director-v2/src/simcore_service_director_v2/api/rpc/_computations_tasks.py b/services/director-v2/src/simcore_service_director_v2/api/rpc/_computations_tasks.py new file mode 100644 index 000000000000..203f2134946c --- /dev/null +++ b/services/director-v2/src/simcore_service_director_v2/api/rpc/_computations_tasks.py @@ -0,0 +1,48 @@ +from pathlib import Path + +from fastapi import FastAPI +from models_library.api_schemas_directorv2.computations import TaskLogFileIdGet +from models_library.projects import ProjectID +from servicelib.rabbitmq import RPCRouter +from servicelib.rabbitmq.rpc_interfaces.director_v2.errors import ( + ComputationalTaskMissingError, +) +from simcore_sdk.node_ports_common import data_items_utils + +from ...constants import LOGS_FILE_NAME +from ...core.errors import PipelineNotFoundError, PipelineTaskMissingError +from ...modules.db.repositories.comp_pipelines import CompPipelinesRepository +from ...modules.db.repositories.comp_tasks import CompTasksRepository +from ...utils.computations_tasks import validate_pipeline + +router = RPCRouter() + + +@router.expose(reraise_if_error_type=(ComputationalTaskMissingError,)) +async def get_computation_task_log_file_ids( + app: FastAPI, + project_id: ProjectID, +) -> list[TaskLogFileIdGet]: + comp_pipelines_repo = CompPipelinesRepository.instance(db_engine=app.state.engine) + comp_tasks_repo = CompTasksRepository.instance(db_engine=app.state.engine) + + try: + info = await validate_pipeline( + project_id=project_id, + comp_pipelines_repo=comp_pipelines_repo, + comp_tasks_repo=comp_tasks_repo, + ) + except (PipelineNotFoundError, PipelineTaskMissingError) as exc: + raise ComputationalTaskMissingError(project_id=project_id) from exc + + iter_task_ids = (t.node_id for t in info.filtered_tasks) + + return [ + TaskLogFileIdGet( + task_id=node_id, + file_id=data_items_utils.create_simcore_file_id( + Path(LOGS_FILE_NAME), f"{project_id}", f"{node_id}" + ), + ) + for node_id in iter_task_ids + ] diff --git a/services/director-v2/src/simcore_service_director_v2/api/rpc/routes.py b/services/director-v2/src/simcore_service_director_v2/api/rpc/routes.py index ad6bdba28c75..c7cd84acd052 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/rpc/routes.py +++ b/services/director-v2/src/simcore_service_director_v2/api/rpc/routes.py @@ -8,16 +8,12 @@ from servicelib.rabbitmq import RPCRouter from ...modules.rabbitmq import get_rabbitmq_rpc_server -from . import ( - _computations, -) +from . import _computations, _computations_tasks _logger = logging.getLogger(__name__) -ROUTERS: list[RPCRouter] = [ - _computations.router, -] +ROUTERS: list[RPCRouter] = [_computations.router, _computations_tasks.router] def setup_rpc_api_routes(app: FastAPI) -> None: diff --git a/services/director-v2/src/simcore_service_director_v2/cli/_close_and_save_service.py b/services/director-v2/src/simcore_service_director_v2/cli/_close_and_save_service.py index fb8f70bf62f4..820a6104a836 100644 --- a/services/director-v2/src/simcore_service_director_v2/cli/_close_and_save_service.py +++ b/services/director-v2/src/simcore_service_director_v2/cli/_close_and_save_service.py @@ -15,12 +15,14 @@ ) from servicelib.fastapi.http_client_thin import UnexpectedStatusError from servicelib.fastapi.long_running_tasks.client import ( - Client, + HttpClient, + periodic_task_result, + setup, +) +from servicelib.long_running_tasks.models import ( ProgressMessage, ProgressPercent, TaskId, - periodic_task_result, - setup, ) from tenacity.asyncio import AsyncRetrying from tenacity.retry import retry_if_exception_type @@ -45,7 +47,7 @@ async def _minimal_app() -> AsyncIterator[FastAPI]: async def _track_and_display( - client: Client, + client: HttpClient, task_id: TaskId, update_interval: PositiveFloat, task_timeout: PositiveFloat, @@ -103,7 +105,7 @@ async def async_close_and_save_service( f"{node_id}", is_disabled=True ) - client = Client( + client = HttpClient( app=app, async_client=thin_dv2_localhost_client.client, base_url=f"{TypeAdapter(AnyHttpUrl).validate_python(thin_dv2_localhost_client.BASE_ADDRESS)}", diff --git a/services/director-v2/src/simcore_service_director_v2/cli/_core.py b/services/director-v2/src/simcore_service_director_v2/cli/_core.py index bc7cc0958986..f857949b2742 100644 --- a/services/director-v2/src/simcore_service_director_v2/cli/_core.py +++ b/services/director-v2/src/simcore_service_director_v2/cli/_core.py @@ -16,7 +16,6 @@ from rich.live import Live from rich.table import Table from servicelib.services_utils import get_service_from_key -from simcore_service_director_v2.modules.catalog import CatalogClient from tenacity.asyncio import AsyncRetrying from tenacity.stop import stop_after_attempt from tenacity.wait import wait_random_exponential @@ -25,6 +24,7 @@ from ..core.settings import AppSettings from ..models.dynamic_services_scheduler import DynamicSidecarNamesHelper from ..modules import db, director_v0, dynamic_sidecar +from ..modules.catalog import CatalogClient from ..modules.db.repositories.projects import ProjectsRepository from ..modules.dynamic_sidecar import api_client from ..modules.projects_networks import requires_dynamic_sidecar diff --git a/services/director-v2/src/simcore_service_director_v2/constants.py b/services/director-v2/src/simcore_service_director_v2/constants.py index d4a5690d9bbf..a5a3a8f72d4d 100644 --- a/services/director-v2/src/simcore_service_director_v2/constants.py +++ b/services/director-v2/src/simcore_service_director_v2/constants.py @@ -15,13 +15,17 @@ # - local # - itisfoundation # - 10.0.0.0:8473 (IP & Port) -DYNAMIC_SIDECAR_DOCKER_IMAGE_RE = ( +DYNAMIC_SIDECAR_DOCKER_IMAGE_RE: Final[str] = ( r"^(([_a-zA-Z0-9:.-]+)/)?(dynamic-sidecar):([_a-zA-Z0-9.-]+)$" ) -REGEX_DY_SERVICE_SIDECAR = rf"^{DYNAMIC_SIDECAR_SERVICE_PREFIX}_[a-zA-Z0-9-_]*" -REGEX_DY_SERVICE_PROXY = rf"^{DYNAMIC_PROXY_SERVICE_PREFIX}_[a-zA-Z0-9-_]*" +LOGS_FILE_NAME: Final[str] = "logs.zip" -UNDEFINED_STR_METADATA = "undefined-metadata" -UNDEFINED_DOCKER_LABEL = "undefined-label" -UNDEFINED_API_BASE_URL = "https://api.local" +REGEX_DY_SERVICE_SIDECAR: Final[str] = ( + rf"^{DYNAMIC_SIDECAR_SERVICE_PREFIX}_[a-zA-Z0-9-_]*" +) +REGEX_DY_SERVICE_PROXY: Final[str] = rf"^{DYNAMIC_PROXY_SERVICE_PREFIX}_[a-zA-Z0-9-_]*" + +UNDEFINED_STR_METADATA: Final[str] = "undefined-metadata" +UNDEFINED_DOCKER_LABEL: Final[str] = "undefined-label" +UNDEFINED_API_BASE_URL: Final[str] = "https://api.local" diff --git a/services/director-v2/src/simcore_service_director_v2/core/application.py b/services/director-v2/src/simcore_service_director_v2/core/application.py index 5031c74a6185..9b68f4b09a42 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/application.py +++ b/services/director-v2/src/simcore_service_director_v2/core/application.py @@ -1,7 +1,12 @@ import logging +from typing import Final +from common_library.json_serialization import json_dumps from fastapi import FastAPI, HTTPException, status from fastapi.exceptions import RequestValidationError +from fastapi_lifespan_manager import LifespanManager +from servicelib.fastapi.lifespan_utils import Lifespan +from servicelib.fastapi.logging_lifespan import create_logging_shutdown_event from servicelib.fastapi.openapi import ( get_common_oas_options, override_fastapi_openapi_method, @@ -11,7 +16,6 @@ initialize_fastapi_app_tracing, setup_tracing, ) -from servicelib.logging_utils import config_all_loggers from .._meta import API_VERSION, API_VTAG, APP_NAME, PROJECT_NAME, SUMMARY from ..api.entrypoints import api_router @@ -30,6 +34,7 @@ dynamic_services, dynamic_sidecar, instrumentation, + long_running_tasks, notifier, rabbitmq, redis, @@ -49,6 +54,13 @@ _logger = logging.getLogger(__name__) +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aio_pika", + "aiormq", + "httpcore", + "httpx", +) + def _set_exception_handlers(app: FastAPI): app.add_exception_handler(HTTPException, http_error_handler) @@ -94,56 +106,65 @@ def _set_exception_handlers(app: FastAPI): ) -_LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR -_NOISY_LOGGERS = ( - "aio_pika", - "aiormq", - "httpcore", -) +def create_app_lifespan(logging_lifespan: Lifespan | None = None) -> LifespanManager: + app_lifespan = LifespanManager() + if logging_lifespan: + app_lifespan.add(logging_lifespan) + return app_lifespan -def create_base_app(settings: AppSettings | None = None) -> FastAPI: - if settings is None: - settings = AppSettings.create_from_envs() - assert settings # nosec +def create_base_app( + app_settings: AppSettings | None = None, +) -> FastAPI: + if app_settings is None: + app_settings = AppSettings.create_from_envs() - logging.basicConfig(level=settings.LOG_LEVEL.value) - logging.root.setLevel(settings.LOG_LEVEL.value) - config_all_loggers( - log_format_local_dev_enabled=settings.DIRECTOR_V2_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=settings.DIRECTOR_V2_LOG_FILTER_MAPPING, - tracing_settings=settings.DIRECTOR_V2_TRACING, + logging_shutdown_event = create_logging_shutdown_event( + log_format_local_dev_enabled=app_settings.DIRECTOR_V2_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.DIRECTOR_V2_LOG_FILTER_MAPPING, + tracing_settings=app_settings.DIRECTOR_V2_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, ) - _logger.debug(settings.model_dump_json(indent=2)) - # keep mostly quiet noisy loggers - quiet_level: int = max( - min(logging.root.level + _LOG_LEVEL_STEP, logging.CRITICAL), logging.WARNING + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), ) - for name in _NOISY_LOGGERS: - logging.getLogger(name).setLevel(quiet_level) + assert app_settings # nosec - assert settings.SC_BOOT_MODE # nosec + assert app_settings.SC_BOOT_MODE # nosec app = FastAPI( - debug=settings.SC_BOOT_MODE.is_devel_mode(), + debug=app_settings.SC_BOOT_MODE.is_devel_mode(), title=PROJECT_NAME, description=SUMMARY, version=API_VERSION, openapi_url=f"/api/{API_VTAG}/openapi.json", - **get_common_oas_options(is_devel_mode=settings.SC_BOOT_MODE.is_devel_mode()), + **get_common_oas_options( + is_devel_mode=app_settings.SC_BOOT_MODE.is_devel_mode() + ), ) override_fastapi_openapi_method(app) - app.state.settings = settings + app.state.settings = app_settings app.include_router(api_router) + + app.add_event_handler("shutdown", logging_shutdown_event) + return app -def init_app(settings: AppSettings | None = None) -> FastAPI: +def create_app( # noqa: C901, PLR0912 + settings: AppSettings | None = None, +) -> FastAPI: app = create_base_app(settings) if settings is None: settings = app.state.settings + _logger.info( + "Application settings: %s", + json_dumps(settings, indent=2, sort_keys=True), + ) assert settings # nosec substitutions.setup(app) @@ -200,6 +221,7 @@ def init_app(settings: AppSettings | None = None) -> FastAPI: dynamic_sidecar.setup(app) socketio.setup(app) notifier.setup(app) + long_running_tasks.setup(app) if ( settings.DIRECTOR_V2_COMPUTATIONAL_BACKEND.COMPUTATIONAL_BACKEND_DASK_CLIENT_ENABLED diff --git a/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/__init__.py b/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/__init__.py index c3ed002edd6d..91c9972a61f7 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/__init__.py +++ b/services/director-v2/src/simcore_service_director_v2/core/dynamic_services_settings/__init__.py @@ -13,7 +13,9 @@ class DynamicServicesSettings(BaseCustomSettings): default=True, description="Enables/Disables the dynamic_sidecar submodule" ) - DYNAMIC_SIDECAR: DynamicSidecarSettings = Field(json_schema_extra={"auto_default_from_env": True}) + DYNAMIC_SIDECAR: DynamicSidecarSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) DYNAMIC_SCHEDULER: DynamicServicesSchedulerSettings = Field( json_schema_extra={"auto_default_from_env": True} @@ -31,4 +33,6 @@ class DynamicServicesSettings(BaseCustomSettings): json_schema_extra={"auto_default_from_env": True} ) - WEBSERVER_SETTINGS: WebServerSettings = Field(json_schema_extra={"auto_default_from_env": True}) + WEBSERVER_AUTH_SETTINGS: WebServerSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) diff --git a/services/director-v2/src/simcore_service_director_v2/core/errors.py b/services/director-v2/src/simcore_service_director_v2/core/errors.py index cd992d5155fd..2e5f556c4ed6 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/errors.py +++ b/services/director-v2/src/simcore_service_director_v2/core/errors.py @@ -67,6 +67,10 @@ class WalletNotEnoughCreditsError(DirectorError): msg_template = "Wallet '{wallet_name}' has {wallet_credit_amount} credits." +class PipelineTaskMissingError(DirectorError): + msg_template = "Pipeline associated with project_id {project_id} is missing task(s)" + + # # SCHEDULER ERRORS # diff --git a/services/director-v2/src/simcore_service_director_v2/core/settings.py b/services/director-v2/src/simcore_service_director_v2/core/settings.py index 03f256b01b0e..18b8bdb9af65 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/settings.py +++ b/services/director-v2/src/simcore_service_director_v2/core/settings.py @@ -6,6 +6,8 @@ from functools import cached_property from typing import Annotated, cast +from common_library.basic_types import DEFAULT_FACTORY +from common_library.logging.logging_utils_filtering import LoggerName, MessageSubstring from common_library.pydantic_validators import validate_numeric_string_as_timedelta from fastapi import FastAPI from models_library.basic_types import LogLevel, PortInt @@ -15,6 +17,7 @@ ClusterTypeInModel, NoAuthentication, ) +from models_library.rabbitmq_basic_types import RPCNamespace from pydantic import ( AliasChoices, AnyUrl, @@ -23,7 +26,6 @@ PositiveInt, field_validator, ) -from servicelib.logging_utils_filtering import LoggerName, MessageSubstring from settings_library.application import BaseApplicationSettings from settings_library.base import BaseCustomSettings from settings_library.catalog import CatalogSettings @@ -50,38 +52,67 @@ class ComputationalBackendSettings(BaseCustomSettings): - COMPUTATIONAL_BACKEND_ENABLED: bool = Field( - default=True, - ) - COMPUTATIONAL_BACKEND_SCHEDULING_CONCURRENCY: PositiveInt = Field( - default=50, - description="defines how many pipelines the application can schedule concurrently", - ) - COMPUTATIONAL_BACKEND_DASK_CLIENT_ENABLED: bool = Field( - default=True, - ) - COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_URL: AnyUrl = Field( - ..., - description="This is the cluster that will be used by default" - " when submitting computational services (typically " - "tcp://dask-scheduler:8786, tls://dask-scheduler:8786 for the internal cluster", - ) - COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH: ClusterAuthentication = Field( - default=..., - description="this is the cluster authentication that will be used by default", - ) - COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_FILE_LINK_TYPE: FileLinkType = Field( - FileLinkType.S3, - description=f"Default file link type to use with the internal cluster '{list(FileLinkType)}'", - ) - COMPUTATIONAL_BACKEND_DEFAULT_FILE_LINK_TYPE: FileLinkType = Field( - FileLinkType.PRESIGNED, - description=f"Default file link type to use with computational backend '{list(FileLinkType)}'", - ) - COMPUTATIONAL_BACKEND_ON_DEMAND_CLUSTERS_FILE_LINK_TYPE: FileLinkType = Field( - FileLinkType.PRESIGNED, - description=f"Default file link type to use with computational backend on-demand clusters '{list(FileLinkType)}'", - ) + COMPUTATIONAL_BACKEND_ENABLED: bool = True + COMPUTATIONAL_BACKEND_SCHEDULING_CONCURRENCY: Annotated[ + PositiveInt, + Field( + description="defines how many pipelines the application can schedule concurrently" + ), + ] = 50 + COMPUTATIONAL_BACKEND_DASK_CLIENT_ENABLED: bool = True + COMPUTATIONAL_BACKEND_PER_CLUSTER_MAX_DISTRIBUTED_CONCURRENT_CONNECTIONS: Annotated[ + PositiveInt, + Field( + description="defines how many concurrent connections to each dask scheduler are allowed accross all director-v2 replicas" + ), + ] = 20 + COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_URL: Annotated[ + AnyUrl, + Field( + description="This is the cluster that will be used by default" + " when submitting computational services (typically " + "tcp://dask-scheduler:8786, tls://dask-scheduler:8786 for the internal cluster", + ), + ] + COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH: Annotated[ + ClusterAuthentication, + Field( + description="this is the cluster authentication that will be used by default" + ), + ] + COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_FILE_LINK_TYPE: Annotated[ + FileLinkType, + Field( + description=f"Default file link type to use with the internal cluster '{list(FileLinkType)}'" + ), + ] = FileLinkType.S3 + COMPUTATIONAL_BACKEND_DEFAULT_FILE_LINK_TYPE: Annotated[ + FileLinkType, + Field( + description=f"Default file link type to use with computational backend '{list(FileLinkType)}'" + ), + ] = FileLinkType.PRESIGNED + COMPUTATIONAL_BACKEND_ON_DEMAND_CLUSTERS_FILE_LINK_TYPE: Annotated[ + FileLinkType, + Field( + description=f"Default file link type to use with computational backend on-demand clusters '{list(FileLinkType)}'" + ), + ] = FileLinkType.PRESIGNED + COMPUTATIONAL_BACKEND_MAX_WAITING_FOR_CLUSTER_TIMEOUT: Annotated[ + datetime.timedelta, + Field( + description="maximum time a pipeline can wait for a cluster to start" + "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formatting)." + ), + ] = datetime.timedelta(minutes=10) + + COMPUTATIONAL_BACKEND_MAX_WAITING_FOR_RETRIEVING_RESULTS: Annotated[ + datetime.timedelta, + Field( + description="maximum time the computational scheduler waits until retrieving results from the computational backend is failed" + "(default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formatting)." + ), + ] = datetime.timedelta(minutes=10) @cached_property def default_cluster(self) -> BaseCluster: @@ -111,91 +142,107 @@ class AppSettings(BaseApplicationSettings, MixinLoggingSettings): ), ] = LogLevel.INFO - DIRECTOR_V2_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( - default=False, - validation_alias=AliasChoices( - "DIRECTOR_V2_LOG_FORMAT_LOCAL_DEV_ENABLED", - "LOG_FORMAT_LOCAL_DEV_ENABLED", + DIRECTOR_V2_LOG_FORMAT_LOCAL_DEV_ENABLED: Annotated[ + bool, + Field( + validation_alias=AliasChoices( + "DIRECTOR_V2_LOG_FORMAT_LOCAL_DEV_ENABLED", + "LOG_FORMAT_LOCAL_DEV_ENABLED", + ), + description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", ), - description="Enables local development log format. WARNING: make sure it is disabled if you want to have structured logs!", - ) - DIRECTOR_V2_LOG_FILTER_MAPPING: dict[LoggerName, list[MessageSubstring]] = Field( - default_factory=dict, - validation_alias=AliasChoices( - "DIRECTOR_V2_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING" + ] = False + DIRECTOR_V2_LOG_FILTER_MAPPING: Annotated[ + dict[LoggerName, list[MessageSubstring]], + Field( + default_factory=dict, + validation_alias=AliasChoices( + "DIRECTOR_V2_LOG_FILTER_MAPPING", "LOG_FILTER_MAPPING" + ), + description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.", ), - description="is a dictionary that maps specific loggers (such as 'uvicorn.access' or 'gunicorn.access') to a list of log message patterns that should be filtered out.", - ) + ] = DEFAULT_FACTORY DIRECTOR_V2_DEV_FEATURES_ENABLED: bool = False - DIRECTOR_V2_DEV_FEATURE_R_CLONE_MOUNTS_ENABLED: bool = Field( - default=False, - description=( - "Under development feature. If enabled state " - "is saved using rclone docker volumes." + DIRECTOR_V2_DEV_FEATURE_R_CLONE_MOUNTS_ENABLED: Annotated[ + bool, + Field( + description=( + "Under development feature. If enabled state " + "is saved using rclone docker volumes." + ) ), - ) + ] = False # for passing self-signed certificate to spawned services - DIRECTOR_V2_SELF_SIGNED_SSL_SECRET_ID: str = Field( - default="", - description="ID of the docker secret containing the self-signed certificate", - ) - DIRECTOR_V2_SELF_SIGNED_SSL_SECRET_NAME: str = Field( - default="", - description="Name of the docker secret containing the self-signed certificate", - ) - DIRECTOR_V2_SELF_SIGNED_SSL_FILENAME: str = Field( - default="", - description="Filepath to self-signed osparc.crt file *as mounted inside the container*, empty strings disables it", - ) + DIRECTOR_V2_SELF_SIGNED_SSL_SECRET_ID: Annotated[ + str, + Field( + description="ID of the docker secret containing the self-signed certificate" + ), + ] = "" + DIRECTOR_V2_SELF_SIGNED_SSL_SECRET_NAME: Annotated[ + str, + Field( + description="Name of the docker secret containing the self-signed certificate" + ), + ] = "" + DIRECTOR_V2_SELF_SIGNED_SSL_FILENAME: Annotated[ + str, + Field( + description="Filepath to self-signed osparc.crt file *as mounted inside the container*, empty strings disables it" + ), + ] = "" DIRECTOR_V2_PROMETHEUS_INSTRUMENTATION_ENABLED: bool = True DIRECTOR_V2_PROFILING: bool = False - DIRECTOR_V2_REMOTE_DEBUGGING_PORT: PortInt | None = Field(default=None) + DIRECTOR_V2_REMOTE_DEBUGGING_PORT: PortInt | None = None # extras - SWARM_STACK_NAME: str = Field(default="undefined-please-check") - SERVICE_TRACKING_HEARTBEAT: datetime.timedelta = Field( - default=DEFAULT_RESOURCE_USAGE_HEARTBEAT_INTERVAL, - description="Service scheduler heartbeat (everytime a heartbeat is sent into RabbitMQ)" - " (default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)", - ) + SWARM_STACK_NAME: str = "undefined-please-check" + SERVICE_TRACKING_HEARTBEAT: Annotated[ + datetime.timedelta, + Field( + description="Service scheduler heartbeat (everytime a heartbeat is sent into RabbitMQ)" + " (default to seconds, or see https://pydantic-docs.helpmanual.io/usage/types/#datetime-types for string formating)" + ), + ] = DEFAULT_RESOURCE_USAGE_HEARTBEAT_INTERVAL - SIMCORE_SERVICES_NETWORK_NAME: str | None = Field( - default=None, - description="used to find the right network name", - ) - SIMCORE_SERVICES_PREFIX: str | None = Field( - "simcore/services", - description="useful when developing with an alternative registry namespace", - ) + SIMCORE_SERVICES_NETWORK_NAME: Annotated[ + str | None, Field(description="used to find the right network name") + ] = None + SIMCORE_SERVICES_PREFIX: Annotated[ + str | None, + Field( + description="useful when developing with an alternative registry namespace" + ), + ] = "simcore/services" - DIRECTOR_V2_NODE_PORTS_400_REQUEST_TIMEOUT_ATTEMPTS: NonNegativeInt = Field( - default=NODE_PORTS_400_REQUEST_TIMEOUT_ATTEMPTS_DEFAULT_VALUE, - description="forwarded to sidecars which use nodeports", - ) + DIRECTOR_V2_NODE_PORTS_400_REQUEST_TIMEOUT_ATTEMPTS: Annotated[ + NonNegativeInt, Field(description="forwarded to sidecars which use nodeports") + ] = NODE_PORTS_400_REQUEST_TIMEOUT_ATTEMPTS_DEFAULT_VALUE # debug settings - CLIENT_REQUEST: ClientRequestSettings = Field( - json_schema_extra={"auto_default_from_env": True} - ) + CLIENT_REQUEST: Annotated[ + ClientRequestSettings, Field(json_schema_extra={"auto_default_from_env": True}) + ] = DEFAULT_FACTORY # App modules settings --------------------- DIRECTOR_V2_STORAGE: Annotated[ StorageSettings, Field(json_schema_extra={"auto_default_from_env": True}) ] - DIRECTOR_V2_NODE_PORTS_STORAGE_AUTH: StorageAuthSettings | None = Field( - json_schema_extra={"auto_default_from_env": True} - ) + DIRECTOR_V2_NODE_PORTS_STORAGE_AUTH: Annotated[ + StorageAuthSettings | None, + Field(json_schema_extra={"auto_default_from_env": True}), + ] = None DIRECTOR_V2_CATALOG: Annotated[ CatalogSettings | None, Field(json_schema_extra={"auto_default_from_env": True}) ] - DIRECTOR_V0: DirectorV0Settings = Field( - json_schema_extra={"auto_default_from_env": True} - ) + DIRECTOR_V0: Annotated[ + DirectorV0Settings, Field(json_schema_extra={"auto_default_from_env": True}) + ] = DEFAULT_FACTORY DYNAMIC_SERVICES: Annotated[ DynamicServicesSettings, @@ -206,35 +253,57 @@ class AppSettings(BaseApplicationSettings, MixinLoggingSettings): PostgresSettings, Field(json_schema_extra={"auto_default_from_env": True}) ] - REDIS: RedisSettings = Field(json_schema_extra={"auto_default_from_env": True}) + REDIS: Annotated[ + RedisSettings, Field(json_schema_extra={"auto_default_from_env": True}) + ] = DEFAULT_FACTORY - DIRECTOR_V2_RABBITMQ: RabbitSettings = Field( - json_schema_extra={"auto_default_from_env": True} - ) + DIRECTOR_V2_RABBITMQ: Annotated[ + RabbitSettings, Field(json_schema_extra={"auto_default_from_env": True}) + ] = DEFAULT_FACTORY - TRAEFIK_SIMCORE_ZONE: str = Field("internal_simcore_stack") + TRAEFIK_SIMCORE_ZONE: str = "internal_simcore_stack" - DIRECTOR_V2_COMPUTATIONAL_BACKEND: ComputationalBackendSettings = Field( - json_schema_extra={"auto_default_from_env": True} - ) + DIRECTOR_V2_COMPUTATIONAL_BACKEND: Annotated[ + ComputationalBackendSettings, + Field(json_schema_extra={"auto_default_from_env": True}), + ] = DEFAULT_FACTORY - DIRECTOR_V2_DOCKER_REGISTRY: RegistrySettings = Field( - json_schema_extra={"auto_default_from_env": True}, - description="settings for the private registry deployed with the platform", - ) - DIRECTOR_V2_DOCKER_HUB_REGISTRY: RegistrySettings | None = Field( - default=None, description="public DockerHub registry settings" - ) + DIRECTOR_V2_DOCKER_REGISTRY: Annotated[ + RegistrySettings, + Field( + json_schema_extra={"auto_default_from_env": True}, + description="settings for the private registry deployed with the platform", + ), + ] = DEFAULT_FACTORY + DIRECTOR_V2_DOCKER_HUB_REGISTRY: Annotated[ + RegistrySettings | None, Field(description="public DockerHub registry settings") + ] = None - DIRECTOR_V2_RESOURCE_USAGE_TRACKER: ResourceUsageTrackerSettings = Field( - json_schema_extra={"auto_default_from_env": True}, - description="resource usage tracker service client's plugin", - ) + DIRECTOR_V2_RESOURCE_USAGE_TRACKER: Annotated[ + ResourceUsageTrackerSettings, + Field( + json_schema_extra={"auto_default_from_env": True}, + description="resource usage tracker service client's plugin", + ), + ] = DEFAULT_FACTORY - DIRECTOR_V2_TRACING: TracingSettings | None = Field( - json_schema_extra={"auto_default_from_env": True}, - description="settings for opentelemetry tracing", - ) + DIRECTOR_V2_TRACING: Annotated[ + TracingSettings | None, + Field( + json_schema_extra={"auto_default_from_env": True}, + description="settings for opentelemetry tracing", + ), + ] = None + + DIRECTOR_V2_WEBSERVER_RPC_NAMESPACE: Annotated[ + RPCNamespace, + Field(description="Namespace to connect to correct webserver's RPC interface"), + ] + + DIRECTOR_V2_WEBSERVER_RPC_NAMESPACE: Annotated[ + RPCNamespace, + Field(description="Namespace to connect to correct webserver's RPC interface"), + ] @field_validator("LOG_LEVEL", mode="before") @classmethod diff --git a/services/director-v2/src/simcore_service_director_v2/main.py b/services/director-v2/src/simcore_service_director_v2/main.py index 245fb26285ee..d0ecfb8b16ab 100644 --- a/services/director-v2/src/simcore_service_director_v2/main.py +++ b/services/director-v2/src/simcore_service_director_v2/main.py @@ -1,7 +1,12 @@ -"""Main application to be deployed in for example uvicorn. -""" +"""Main application to be deployed in for example uvicorn.""" + +import logging + from fastapi import FastAPI -from simcore_service_director_v2.core.application import init_app +from simcore_service_director_v2.core.application import create_app + +_logger = logging.getLogger(__name__) + -# SINGLETON FastAPI app -the_app: FastAPI = init_app() +def app_factory() -> FastAPI: + return create_app() diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_run_snapshot_tasks.py b/services/director-v2/src/simcore_service_director_v2/models/comp_run_snapshot_tasks.py index cb781452435a..66945da11d1c 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_run_snapshot_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_run_snapshot_tasks.py @@ -1,6 +1,21 @@ +from contextlib import suppress +from datetime import datetime +from typing import Annotated, Any + +from models_library.projects import ProjectID +from models_library.projects_nodes_io import NodeID +from models_library.projects_state import RunningState from models_library.resource_tracker import HardwareInfo -from pydantic import ConfigDict, PositiveInt +from pydantic import ( + BaseModel, + BeforeValidator, + ConfigDict, + PositiveInt, + field_validator, +) +from simcore_postgres_database.models.comp_pipeline import StateType +from ..utils.db import DB_TO_RUNNING_STATE from .comp_tasks import BaseCompTaskAtDB, Image @@ -75,3 +90,34 @@ class CompRunSnapshotTaskAtDBGet(BaseCompTaskAtDB): class CompRunSnapshotTaskAtDBCreate(BaseCompTaskAtDB): run_id: PositiveInt + + +def _none_to_zero_float_pre_validator(value: Any): + if value is None: + return 0.0 + return value + + +class CompRunSnapshotTaskDBGet(BaseModel): + snapshot_task_id: PositiveInt + run_id: PositiveInt + project_uuid: ProjectID + node_id: NodeID + state: RunningState + progress: Annotated[float, BeforeValidator(_none_to_zero_float_pre_validator)] + image: dict[str, Any] + started_at: datetime | None + ended_at: datetime | None + iteration: PositiveInt + + @field_validator("state", mode="before") + @classmethod + def convert_result_from_state_type_enum_if_needed(cls, v): + if isinstance(v, str): + # try to convert to a StateType, if it fails the validations will continue + # and pydantic will try to convert it to a RunninState later on + with suppress(ValueError): + v = StateType(v) + if isinstance(v, StateType): + return RunningState(DB_TO_RUNNING_STATE[StateType(v)]) + return v diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py index 9d1513db3c9e..e13607d22441 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_tasks.py @@ -276,3 +276,15 @@ class ComputationTaskForRpcDBGet(BaseModel): image: dict[str, Any] started_at: dt.datetime | None ended_at: dt.datetime | None + + @field_validator("state", mode="before") + @classmethod + def _convert_from_state_type_enum_if_needed(cls, v): + if isinstance(v, str): + # try to convert to a StateType, if it fails the validations will continue + # and pydantic will try to convert it to a RunninState later on + with suppress(ValueError): + v = StateType(v) + if isinstance(v, StateType): + return RunningState(DB_TO_RUNNING_STATE[v]) + return v diff --git a/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py b/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py index d0888dd1acf6..92ab096ef66e 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/models/dynamic_services_scheduler.py @@ -406,6 +406,8 @@ def endpoint(self) -> AnyHttpUrl: paths_mapping: PathMappingsLabel # overwrites in DynamicSidecarServiceLabels + is_collaborative: bool = False + user_preferences_path: Path | None = None callbacks_mapping: Annotated[CallbacksMapping, Field(default_factory=dict)] @@ -513,6 +515,7 @@ def from_http_request( "product_name": service.product_name, "product_api_base_url": service.product_api_base_url, "paths_mapping": simcore_service_labels.paths_mapping, + "is_collaborative": simcore_service_labels.is_collaborative, "callbacks_mapping": simcore_service_labels.callbacks_mapping, "compose_spec": json_dumps(simcore_service_labels.compose_spec), "container_http_entry": simcore_service_labels.container_http_entry, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_manager.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_manager.py index dd5ebbf20224..418d3fcde149 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_manager.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_manager.py @@ -1,22 +1,25 @@ +import contextlib import logging from typing import Final import networkx as nx +from common_library.async_tools import cancel_wait_task from fastapi import FastAPI +from models_library.computations import CollectionRunID from models_library.projects import ProjectID from models_library.users import UserID -from servicelib.async_utils import cancel_wait_task from servicelib.background_task import create_periodic_task -from servicelib.exception_utils import silence_exceptions +from servicelib.exception_utils import suppress_exceptions from servicelib.logging_utils import log_context from servicelib.redis import CouldNotAcquireLockError, exclusive from servicelib.utils import limited_gather from sqlalchemy.ext.asyncio import AsyncEngine +from ...core.errors import ComputationalRunNotFoundError from ...models.comp_pipelines import CompPipelineAtDB from ...models.comp_runs import RunMetadataDict from ...models.comp_tasks import CompTaskAtDB -from ...utils.rabbitmq import publish_project_log +from ...utils.rabbitmq import publish_pipeline_scheduling_state, publish_project_log from ..db import get_db_engine from ..db.repositories.comp_pipelines import CompPipelinesRepository from ..db.repositories.comp_runs import CompRunsRepository @@ -43,6 +46,7 @@ async def run_new_pipeline( project_id: ProjectID, run_metadata: RunMetadataDict, use_on_demand_clusters: bool, + collection_run_id: CollectionRunID, ) -> None: """Sets a new pipeline to be scheduled on the computational resources.""" # ensure the pipeline exists and is populated with something @@ -57,12 +61,25 @@ async def run_new_pipeline( ) return + with contextlib.suppress(ComputationalRunNotFoundError): + # if the run already exists and is scheduled, do not schedule again. + last_run = await CompRunsRepository.instance(db_engine).get( + user_id=user_id, project_id=project_id + ) + if last_run.result.is_running(): + _logger.warning( + "run for project %s is already running. not scheduling it again.", + f"{project_id=}", + ) + return + new_run = await CompRunsRepository.instance(db_engine).create( user_id=user_id, project_id=project_id, metadata=run_metadata, use_on_demand_clusters=use_on_demand_clusters, dag_adjacency_list=comp_pipeline_at_db.dag_adjacency_list, + collection_run_id=collection_run_id, ) tasks_to_run = await _get_pipeline_tasks_at_db(db_engine, project_id, dag) @@ -92,6 +109,9 @@ async def run_new_pipeline( log=f"Project pipeline scheduled using {'on-demand clusters' if use_on_demand_clusters else 'pre-defined clusters'}, starting soon...", log_level=logging.INFO, ) + await publish_pipeline_scheduling_state( + rabbitmq_client, user_id, project_id, new_run.result + ) async def stop_pipeline( @@ -128,8 +148,7 @@ async def _get_pipeline_at_db( project_id: ProjectID, db_engine: AsyncEngine ) -> CompPipelineAtDB: comp_pipeline_repo = CompPipelinesRepository.instance(db_engine) - pipeline_at_db = await comp_pipeline_repo.get_pipeline(project_id) - return pipeline_at_db + return await comp_pipeline_repo.get_pipeline(project_id) async def _get_pipeline_tasks_at_db( @@ -166,8 +185,12 @@ async def schedule_all_pipelines(app: FastAPI) -> None: ) if possibly_lost_scheduled_pipelines: _logger.error( - "found %d lost pipelines, they will be re-scheduled now", + "found %d lost pipelines, they will be re-scheduled now. '%s'", len(possibly_lost_scheduled_pipelines), + ", ".join( + f"{run.project_uuid=}|{run.iteration=}" + for run in possibly_lost_scheduled_pipelines + ), ) rabbitmq_client = get_rabbitmq_client(app) @@ -191,7 +214,10 @@ async def schedule_all_pipelines(app: FastAPI) -> None: async def setup_manager(app: FastAPI) -> None: app.state.scheduler_manager = create_periodic_task( - silence_exceptions((CouldNotAcquireLockError,))(schedule_all_pipelines), + suppress_exceptions( + (CouldNotAcquireLockError,), + reason="Multiple instances of the periodic task `computational scheduler manager` are running.", + )(schedule_all_pipelines), interval=SCHEDULER_INTERVAL, task_name=MODULE_NAME_SCHEDULER, app=app, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_models.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_models.py index 28dca04dc536..4fc5c1831ef6 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_models.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_models.py @@ -1,3 +1,4 @@ +from dataclasses import dataclass from typing import Literal from models_library.projects import ProjectID @@ -5,15 +6,22 @@ from models_library.users import UserID from ...models.comp_runs import Iteration +from ...models.comp_tasks import CompTaskAtDB class SchedulePipelineRabbitMessage(RabbitMessageBase): - channel_name: Literal[ + channel_name: Literal["simcore.services.director-v2.scheduling"] = ( "simcore.services.director-v2.scheduling" - ] = "simcore.services.director-v2.scheduling" + ) user_id: UserID project_id: ProjectID iteration: Iteration def routing_key(self) -> str | None: # pylint: disable=no-self-use # abstract return None + + +@dataclass(frozen=True, slots=True) +class TaskStateTracker: + previous: CompTaskAtDB + current: CompTaskAtDB diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_publisher.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_publisher.py index 42c4b1d79389..f358f5eb3b9c 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_publisher.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_publisher.py @@ -16,9 +16,10 @@ async def request_pipeline_scheduling( project_id: ProjectID, iteration: Iteration, ) -> None: - # NOTE: we should use the transaction and the asyncpg engine here to ensure 100% consistency - # https://github.com/ITISFoundation/osparc-simcore/issues/6818 - # async with transaction_context(get_asyncpg_engine(app)) as connection: + # NOTE: it is important that the DB is set up first before scheduling, in case the worker already schedules before we change the DB + await CompRunsRepository.instance(db_engine).mark_for_scheduling( + user_id=user_id, project_id=project_id, iteration=iteration + ) await rabbitmq_client.publish( SchedulePipelineRabbitMessage.get_channel_name(), SchedulePipelineRabbitMessage( @@ -27,6 +28,3 @@ async def request_pipeline_scheduling( iteration=iteration, ), ) - await CompRunsRepository.instance(db_engine).mark_for_scheduling( - user_id=user_id, project_id=project_id, iteration=iteration - ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_base.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_base.py index e1a37378dd1f..97b1bc1cf22c 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_base.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_base.py @@ -21,6 +21,8 @@ import arrow import networkx as nx +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs +from common_library.user_messages import user_message from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, NodeIDStr from models_library.projects_state import RunningState @@ -33,6 +35,7 @@ from servicelib.logging_utils import log_catch, log_context from servicelib.rabbitmq import RabbitMQClient, RabbitMQRPCClient from servicelib.redis import RedisClientSDK +from servicelib.utils import limited_gather from sqlalchemy.ext.asyncio import AsyncEngine from ...constants import UNDEFINED_STR_METADATA @@ -44,7 +47,6 @@ DaskClientAcquisisitonError, InvalidPipelineError, PipelineNotFoundError, - TaskSchedulingError, ) from ...core.settings import ComputationalBackendSettings from ...models.comp_pipelines import CompPipelineAtDB @@ -52,6 +54,7 @@ from ...models.comp_tasks import CompTaskAtDB from ...utils.computations import get_pipeline_state_from_task_states from ...utils.rabbitmq import ( + publish_pipeline_scheduling_state, publish_project_log, publish_service_resource_tracking_heartbeat, publish_service_resource_tracking_started, @@ -60,6 +63,7 @@ from ..db.repositories.comp_pipelines import CompPipelinesRepository from ..db.repositories.comp_runs import CompRunsRepository from ..db.repositories.comp_tasks import CompTasksRepository +from ._models import TaskStateTracker from ._publisher import request_pipeline_scheduling from ._utils import ( COMPLETED_STATES, @@ -73,9 +77,10 @@ _logger = logging.getLogger(__name__) -_Previous = CompTaskAtDB -_Current = CompTaskAtDB -_MAX_WAITING_FOR_CLUSTER_TIMEOUT_IN_MIN: Final[int] = 10 +_MAX_WAITING_TIME_FOR_UNKNOWN_TASKS: Final[datetime.timedelta] = datetime.timedelta( + seconds=30 +) +_PUBLICATION_CONCURRENCY_LIMIT: Final[int] = 10 def _auto_schedule_callback( @@ -111,52 +116,49 @@ async def _async_cb() -> None: @dataclass(frozen=True, slots=True) class SortedTasks: started: list[CompTaskAtDB] - completed: list[CompTaskAtDB] - waiting: list[CompTaskAtDB] - potentially_lost: list[CompTaskAtDB] - - -_MAX_WAITING_TIME_FOR_UNKNOWN_TASKS: Final[datetime.timedelta] = datetime.timedelta( - seconds=30 -) + completed: list[TaskStateTracker] + waiting: list[TaskStateTracker] + potentially_lost: list[TaskStateTracker] async def _triage_changed_tasks( - changed_tasks: list[tuple[_Previous, _Current]], + changed_tasks: list[TaskStateTracker], ) -> SortedTasks: started_tasks = [ - current - for previous, current in changed_tasks - if current.state in RUNNING_STATES + tracker.current + for tracker in changed_tasks + if tracker.current.state in RUNNING_STATES or ( - previous.state in WAITING_FOR_START_STATES - and current.state in COMPLETED_STATES + tracker.previous.state in WAITING_FOR_START_STATES + and tracker.current.state in COMPLETED_STATES ) ] completed_tasks = [ - current for _, current in changed_tasks if current.state in COMPLETED_STATES + tracker + for tracker in changed_tasks + if tracker.current.state in COMPLETED_STATES ] waiting_for_resources_tasks = [ - current - for previous, current in changed_tasks - if current.state in WAITING_FOR_START_STATES + tracker + for tracker in changed_tasks + if tracker.current.state in WAITING_FOR_START_STATES ] lost_tasks = [ - current - for previous, current in changed_tasks - if (current.state is RunningState.UNKNOWN) + tracker + for tracker in changed_tasks + if (tracker.current.state is RunningState.UNKNOWN) and ( - (arrow.utcnow().datetime - previous.modified) + (arrow.utcnow().datetime - tracker.previous.modified) > _MAX_WAITING_TIME_FOR_UNKNOWN_TASKS ) ] if lost_tasks: _logger.warning( "%s are currently in unknown state. TIP: If they are running in an external cluster and it is not yet ready, that might explain it. But inform @sanderegg nevertheless!", - [t.node_id for t in lost_tasks], + [t.current.node_id for t in lost_tasks], ) return SortedTasks( @@ -208,10 +210,13 @@ async def _update_run_result_from_tasks( project_id: ProjectID, iteration: Iteration, pipeline_tasks: dict[NodeIDStr, CompTaskAtDB], + current_result: RunningState, ) -> RunningState: pipeline_state_from_tasks = get_pipeline_state_from_task_states( list(pipeline_tasks.values()), ) + if pipeline_state_from_tasks == current_result: + return pipeline_state_from_tasks _logger.debug( "pipeline %s is currently in %s", f"{user_id=}_{project_id=}_{iteration=}", @@ -238,22 +243,46 @@ async def _set_run_result( final_state=(run_result in COMPLETED_STATES), ) - async def _set_schedule_done( + if run_result in COMPLETED_STATES: + # send event to notify the piipeline is done + await publish_project_log( + self.rabbitmq_client, + user_id=user_id, + project_id=project_id, + log=user_message( + f"Project pipeline execution for iteration {iteration} has completed with status: {run_result.value}", + _version=1, + ), + log_level=logging.INFO, + ) + await publish_pipeline_scheduling_state( + self.rabbitmq_client, user_id, project_id, run_result + ) + + async def _set_processing_done( self, user_id: UserID, project_id: ProjectID, iteration: Iteration, ) -> None: - await CompRunsRepository.instance(self.db_engine).mark_as_processed( - user_id=user_id, - project_id=project_id, - iteration=iteration, - ) + with log_context( + _logger, + logging.DEBUG, + msg=f"mark pipeline run for {iteration=} for {user_id=} and {project_id=} as processed", + ): + await CompRunsRepository.instance(self.db_engine).mark_as_processed( + user_id=user_id, + project_id=project_id, + iteration=iteration, + ) async def _set_states_following_failed_to_aborted( - self, project_id: ProjectID, dag: nx.DiGraph, run_id: PositiveInt + self, + project_id: ProjectID, + dag: nx.DiGraph, + tasks: dict[NodeIDStr, CompTaskAtDB], + run_id: PositiveInt, ) -> dict[NodeIDStr, CompTaskAtDB]: - tasks = await self._get_pipeline_tasks(project_id, dag) # Perform a reverse topological sort to ensure tasks are ordered from last to first sorted_node_ids = list(reversed(list(nx.topological_sort(dag)))) tasks = { @@ -293,6 +322,7 @@ async def _send_running_tasks_heartbeat( def _need_heartbeat(task: CompTaskAtDB) -> bool: if task.state not in RUNNING_STATES: return False + if task.last_heartbeat is None: assert task.start # nosec return bool( @@ -308,7 +338,7 @@ def _need_heartbeat(task: CompTaskAtDB) -> bool: project_id, dag ) if running_tasks := [t for t in tasks.values() if _need_heartbeat(t)]: - await asyncio.gather( + await limited_gather( *( publish_service_resource_tracking_heartbeat( self.rabbitmq_client, @@ -317,31 +347,29 @@ def _need_heartbeat(task: CompTaskAtDB) -> bool: ), ) for t in running_tasks - ) + ), + log=_logger, + limit=_PUBLICATION_CONCURRENCY_LIMIT, ) - comp_tasks_repo = CompTasksRepository(self.db_engine) - await asyncio.gather( - *( - comp_tasks_repo.update_project_task_last_heartbeat( - t.project_id, t.node_id, run_id, utc_now - ) - for t in running_tasks + comp_tasks_repo = CompTasksRepository.instance(self.db_engine) + for task in running_tasks: + await comp_tasks_repo.update_project_task_last_heartbeat( + project_id, task.node_id, run_id, utc_now ) - ) async def _get_changed_tasks_from_backend( self, user_id: UserID, processing_tasks: list[CompTaskAtDB], comp_run: CompRunsAtDB, - ) -> tuple[list[tuple[_Previous, _Current]], list[CompTaskAtDB]]: + ) -> tuple[list[TaskStateTracker], list[CompTaskAtDB]]: tasks_backend_status = await self._get_tasks_status( user_id, processing_tasks, comp_run ) return ( [ - ( + TaskStateTracker( task, task.model_copy(update={"state": backend_state}), ) @@ -372,7 +400,7 @@ async def _process_started_tasks( utc_now = arrow.utcnow().datetime # resource tracking - await asyncio.gather( + await limited_gather( *( publish_service_resource_tracking_started( self.rabbitmq_client, @@ -434,10 +462,12 @@ async def _process_started_tasks( service_additional_metadata={}, ) for t in tasks - ) + ), + log=_logger, + limit=_PUBLICATION_CONCURRENCY_LIMIT, ) # instrumentation - await asyncio.gather( + await limited_gather( *( publish_service_started_metrics( self.rabbitmq_client, @@ -448,24 +478,22 @@ async def _process_started_tasks( task=t, ) for t in tasks - ) + ), + log=_logger, + limit=_PUBLICATION_CONCURRENCY_LIMIT, ) # update DB comp_tasks_repo = CompTasksRepository(self.db_engine) - await asyncio.gather( - *( - comp_tasks_repo.update_project_tasks_state( - t.project_id, - run_id, - [t.node_id], - t.state, - optional_started=utc_now, - optional_progress=t.progress, - ) - for t in tasks + for task in tasks: + await comp_tasks_repo.update_project_tasks_state( + project_id, + run_id, + [task.node_id], + task.state, + optional_started=utc_now, + optional_progress=task.progress, ) - ) await CompRunsRepository.instance(self.db_engine).mark_as_started( user_id=user_id, project_id=project_id, @@ -474,20 +502,16 @@ async def _process_started_tasks( ) async def _process_waiting_tasks( - self, tasks: list[CompTaskAtDB], run_id: PositiveInt + self, tasks: list[TaskStateTracker], run_id: PositiveInt ) -> None: - comp_tasks_repo = CompTasksRepository(self.db_engine) - await asyncio.gather( - *( - comp_tasks_repo.update_project_tasks_state( - t.project_id, - run_id, - [t.node_id], - t.state, - ) - for t in tasks + comp_tasks_repo = CompTasksRepository.instance(self.db_engine) + for task in tasks: + await comp_tasks_repo.update_project_tasks_state( + task.current.project_id, + run_id, + [task.current.node_id], + task.current.state, ) - ) async def _update_states_from_comp_backend( self, @@ -574,7 +598,7 @@ async def _stop_tasks( async def _process_completed_tasks( self, user_id: UserID, - tasks: list[CompTaskAtDB], + tasks: list[TaskStateTracker], iteration: Iteration, comp_run: CompRunsAtDB, ) -> None: @@ -589,6 +613,10 @@ async def _process_executing_tasks( ) -> None: """process executing tasks from the 3rd party backend""" + @abstractmethod + async def _release_resources(self, comp_run: CompRunsAtDB) -> None: + """release resources used by the scheduler for a given user and project""" + async def apply( self, *, @@ -603,22 +631,30 @@ async def apply( msg=f"scheduling pipeline {user_id=}:{project_id=}:{iteration=}", ): dag: nx.DiGraph = nx.DiGraph() + try: comp_run = await CompRunsRepository.instance(self.db_engine).get( user_id, project_id, iteration ) dag = await self._get_pipeline_dag(project_id) + # 1. Update our list of tasks with data from backend (state, results) await self._update_states_from_comp_backend( user_id, project_id, iteration, dag, comp_run ) - # 2. Any task following a FAILED task shall be ABORTED + # 1.1. get the updated tasks NOTE: we need to get them again as some states might have changed + comp_tasks = await self._get_pipeline_tasks(project_id, dag) + # 2. timeout if waiting for cluster has been there for more than X minutes + comp_tasks = await self._timeout_if_waiting_for_cluster_too_long( + user_id, project_id, comp_run, comp_tasks + ) + # 3. Any task following a FAILED task shall be ABORTED comp_tasks = await self._set_states_following_failed_to_aborted( - project_id, dag, comp_run.run_id + project_id, dag, comp_tasks, comp_run.run_id ) - # 3. do we want to stop the pipeline now? + # 4. do we want to stop the pipeline now? if comp_run.cancelled: - await self._schedule_tasks_to_stop( + comp_tasks = await self._schedule_tasks_to_stop( user_id, project_id, comp_tasks, comp_run ) else: @@ -638,10 +674,7 @@ async def apply( iteration=iteration, ), ) - # 4. timeout if waiting for cluster has been there for more than X minutes - comp_tasks = await self._timeout_if_waiting_for_cluster_too_long( - user_id, project_id, comp_run.run_id, comp_tasks - ) + # 5. send a heartbeat await self._send_running_tasks_heartbeat( user_id, project_id, comp_run.run_id, iteration, dag @@ -649,55 +682,89 @@ async def apply( # 6. Update the run result pipeline_result = await self._update_run_result_from_tasks( - user_id, project_id, iteration, comp_tasks + user_id, project_id, iteration, comp_tasks, comp_run.result ) # 7. Are we done scheduling that pipeline? if not dag.nodes() or pipeline_result in COMPLETED_STATES: + await self._release_resources(comp_run) # there is nothing left, the run is completed, we're done here _logger.info( "pipeline %s scheduling completed with result %s", f"{project_id=}", f"{pipeline_result=}", ) - except PipelineNotFoundError: - _logger.warning( - "pipeline %s does not exist in comp_pipeline table, it will be removed from scheduler", - f"{project_id=}", + except PipelineNotFoundError as exc: + _logger.exception( + **create_troubleshooting_log_kwargs( + f"pipeline {project_id} is missing from `comp_pipelines` DB table, something is corrupted. Aborting scheduling", + error=exc, + error_context={ + "user_id": f"{user_id}", + "project_id": f"{project_id}", + "iteration": f"{iteration}", + }, + tip="Check that the project still exists", + ) ) + + # NOTE: no need to update task states here as pipeline is already broken await self._set_run_result( - user_id, project_id, iteration, RunningState.ABORTED + user_id, project_id, iteration, RunningState.FAILED ) except InvalidPipelineError as exc: - _logger.warning( - "pipeline %s appears to be misconfigured, it will be removed from scheduler. Please check pipeline:\n%s", - f"{project_id=}", - exc, + _logger.exception( + **create_troubleshooting_log_kwargs( + f"pipeline {project_id} appears to be misconfigured. Aborting scheduling", + error=exc, + error_context={ + "user_id": f"{user_id}", + "project_id": f"{project_id}", + "iteration": f"{iteration}", + }, + tip="Check that the project pipeline is valid and all tasks are present in the DB", + ), ) + # NOTE: no need to update task states here as pipeline is already broken await self._set_run_result( - user_id, project_id, iteration, RunningState.ABORTED + user_id, project_id, iteration, RunningState.FAILED ) - except (DaskClientAcquisisitonError, ClustersKeeperNotAvailableError): + except ( + DaskClientAcquisisitonError, + ComputationalBackendNotConnectedError, + ClustersKeeperNotAvailableError, + ) as exc: _logger.exception( - "Unexpected error while connecting with computational backend, aborting pipeline" - ) - tasks: dict[NodeIDStr, CompTaskAtDB] = await self._get_pipeline_tasks( - project_id, dag + **create_troubleshooting_log_kwargs( + "Unexpectedly lost connection to the computational backend. Tasks are set back to WAITING_FOR_CLUSTER state until we eventually reconnect", + error=exc, + error_context={ + "user_id": f"{user_id}", + "project_id": f"{project_id}", + "iteration": f"{iteration}", + }, + tip="Check network connection and the status of the computational backend (clusters-keeper, dask-scheduler, dask-workers)", + ) ) + processing_tasks = { + k: v + for k, v in ( + await self._get_pipeline_tasks(project_id, dag) + ).items() + if v.state in PROCESSING_STATES + } comp_tasks_repo = CompTasksRepository(self.db_engine) await comp_tasks_repo.update_project_tasks_state( project_id, comp_run.run_id, - [t.node_id for t in tasks.values()], - RunningState.FAILED, + [t.node_id for t in processing_tasks.values()], + RunningState.WAITING_FOR_CLUSTER, ) await self._set_run_result( - user_id, project_id, iteration, RunningState.FAILED + user_id, project_id, iteration, RunningState.WAITING_FOR_CLUSTER ) - except ComputationalBackendNotConnectedError: - _logger.exception("Computational backend is not connected!") finally: - await self._set_schedule_done(user_id, project_id, iteration) + await self._set_processing_done(user_id, project_id, iteration) async def _schedule_tasks_to_stop( self, @@ -705,21 +772,30 @@ async def _schedule_tasks_to_stop( project_id: ProjectID, comp_tasks: dict[NodeIDStr, CompTaskAtDB], comp_run: CompRunsAtDB, - ) -> None: - # get any running task and stop them + ) -> dict[NodeIDStr, CompTaskAtDB]: + # NOTE: tasks that were not yet started but can be marked as ABORTED straight away, + # the tasks that are already processing need some time to stop + # and we need to stop them in the backend + tasks_instantly_stopeable = [ + t for t in comp_tasks.values() if t.state in TASK_TO_START_STATES + ] comp_tasks_repo = CompTasksRepository.instance(self.db_engine) await ( comp_tasks_repo.mark_project_published_waiting_for_cluster_tasks_as_aborted( project_id, comp_run.run_id ) ) + for task in tasks_instantly_stopeable: + comp_tasks[f"{task.node_id}"].state = RunningState.ABORTED # stop any remaining running task, these are already submitted if tasks_to_stop := [ t for t in comp_tasks.values() if t.state in PROCESSING_STATES ]: await self._stop_tasks(user_id, tasks_to_stop, comp_run) - async def _schedule_tasks_to_start( # noqa: C901 + return comp_tasks + + async def _schedule_tasks_to_start( self, user_id: UserID, project_id: ProjectID, @@ -753,6 +829,13 @@ async def _schedule_tasks_to_start( # noqa: C901 # nothing to do return comp_tasks + log_error_context = { + "user_id": f"{user_id}", + "project_id": f"{project_id}", + "tasks_ready_to_start": f"{list(tasks_ready_to_start.keys())}", + "comp_run_use_on_demand_clusters": f"{comp_run.use_on_demand_clusters}", + "comp_run_run_id": f"{comp_run.run_id}", + } try: await self._start_tasks( user_id=user_id, @@ -761,28 +844,13 @@ async def _schedule_tasks_to_start( # noqa: C901 comp_run=comp_run, wake_up_callback=wake_up_callback, ) - except ( - ComputationalBackendNotConnectedError, - ComputationalSchedulerChangedError, - ): - _logger.exception( - "Issue with computational backend. Tasks are set back " - "to WAITING_FOR_CLUSTER state until scheduler comes back!", - ) - await CompTasksRepository.instance( - self.db_engine - ).update_project_tasks_state( - project_id, - comp_run.run_id, - list(tasks_ready_to_start.keys()), - RunningState.WAITING_FOR_CLUSTER, - ) - for task in tasks_ready_to_start: - comp_tasks[f"{task}"].state = RunningState.WAITING_FOR_CLUSTER - except ComputationalBackendOnDemandNotReadyError as exc: _logger.info( - "The on demand computational backend is not ready yet: %s", exc + **create_troubleshooting_log_kwargs( + "The on demand computational backend is not ready yet. Tasks are set to WAITING_FOR_CLUSTER state until the cluster is ready!", + error=exc, + error_context=log_error_context, + ) ) await publish_project_log( self.rabbitmq_client, @@ -791,7 +859,6 @@ async def _schedule_tasks_to_start( # noqa: C901 log=f"{exc}", log_level=logging.INFO, ) - await CompTasksRepository.instance( self.db_engine ).update_project_tasks_state( @@ -802,54 +869,47 @@ async def _schedule_tasks_to_start( # noqa: C901 ) for task in tasks_ready_to_start: comp_tasks[f"{task}"].state = RunningState.WAITING_FOR_CLUSTER - except ClustersKeeperNotAvailableError: - _logger.exception("Unexpected error while starting tasks:") + except ( + ComputationalBackendNotConnectedError, + ComputationalSchedulerChangedError, + ClustersKeeperNotAvailableError, + ) as exc: + _logger.exception( + **create_troubleshooting_log_kwargs( + "Computational backend is not connected. Tasks are set back " + "to WAITING_FOR_CLUSTER state until scheduler comes back!", + error=exc, + error_context=log_error_context, + ) + ) await publish_project_log( self.rabbitmq_client, user_id, project_id, - log="Unexpected error while scheduling computational tasks! TIP: contact osparc support.", + log=user_message( + "An unexpected error occurred during task scheduling. Please contact oSparc support if this issue persists.", + _version=1, + ), log_level=logging.ERROR, ) - await CompTasksRepository.instance( self.db_engine ).update_project_tasks_state( project_id, comp_run.run_id, list(tasks_ready_to_start.keys()), - RunningState.FAILED, - optional_progress=1.0, - optional_stopped=arrow.utcnow().datetime, + RunningState.WAITING_FOR_CLUSTER, ) for task in tasks_ready_to_start: - comp_tasks[f"{task}"].state = RunningState.FAILED - raise - except TaskSchedulingError as exc: - _logger.exception( - "Project '%s''s task '%s' could not be scheduled", - exc.project_id, - exc.node_id, - ) - await CompTasksRepository.instance( - self.db_engine - ).update_project_tasks_state( - project_id, - comp_run.run_id, - [exc.node_id], - RunningState.FAILED, - exc.get_errors(), - optional_progress=1.0, - optional_stopped=arrow.utcnow().datetime, - ) - comp_tasks[f"{exc.node_id}"].state = RunningState.FAILED - except Exception: + comp_tasks[f"{task}"].state = RunningState.WAITING_FOR_CLUSTER + + except Exception as exc: _logger.exception( - "Unexpected error for %s with %s on %s happened when scheduling %s:", - f"{comp_run.user_id=}", - f"{comp_run.project_uuid=}", - f"{comp_run.use_on_demand_clusters=}", - f"{tasks_ready_to_start.keys()=}", + **create_troubleshooting_log_kwargs( + "Unexpected error happened when scheduling tasks, all tasks to start are set to FAILED and the rest of the pipeline will be ABORTED", + error=exc, + error_context=log_error_context, + ) ) await CompTasksRepository.instance( self.db_engine @@ -871,39 +931,50 @@ async def _timeout_if_waiting_for_cluster_too_long( self, user_id: UserID, project_id: ProjectID, - run_id: PositiveInt, + comp_run: CompRunsAtDB, comp_tasks: dict[NodeIDStr, CompTaskAtDB], ) -> dict[NodeIDStr, CompTaskAtDB]: - if all( - c.state is RunningState.WAITING_FOR_CLUSTER for c in comp_tasks.values() - ): - # get latest modified task - latest_modified_of_all_tasks = max( - comp_tasks.values(), key=lambda task: task.modified - ).modified - - if ( - arrow.utcnow().datetime - latest_modified_of_all_tasks - ) > datetime.timedelta(minutes=_MAX_WAITING_FOR_CLUSTER_TIMEOUT_IN_MIN): - await CompTasksRepository.instance( - self.db_engine - ).update_project_tasks_state( - project_id, - run_id, - [NodeID(idstr) for idstr in comp_tasks], - RunningState.FAILED, - optional_progress=1.0, - optional_stopped=arrow.utcnow().datetime, - ) - for task in comp_tasks.values(): - task.state = RunningState.FAILED - msg = "Timed-out waiting for computational cluster! Please try again and/or contact Osparc support." - _logger.error(msg) - await publish_project_log( - self.rabbitmq_client, - user_id, - project_id, - log=msg, - log_level=logging.ERROR, - ) + if comp_run.result is not RunningState.WAITING_FOR_CLUSTER: + return comp_tasks + + tasks_waiting_for_cluster = [ + t + for t in comp_tasks.values() + if t.state is RunningState.WAITING_FOR_CLUSTER + ] + if not tasks_waiting_for_cluster: + return comp_tasks + + # get latest modified task + latest_modified_of_all_tasks = max( + tasks_waiting_for_cluster, key=lambda task: task.modified + ).modified + + if ( + arrow.utcnow().datetime - latest_modified_of_all_tasks + ) > self.settings.COMPUTATIONAL_BACKEND_MAX_WAITING_FOR_CLUSTER_TIMEOUT: + await CompTasksRepository.instance( + self.db_engine + ).update_project_tasks_state( + project_id, + comp_run.run_id, + [task.node_id for task in tasks_waiting_for_cluster], + RunningState.FAILED, + optional_progress=1.0, + optional_stopped=arrow.utcnow().datetime, + ) + for task in tasks_waiting_for_cluster: + task.state = RunningState.FAILED + msg = user_message( + "The system has timed out while waiting for computational resources. Please try running your project again or contact oSparc support if this issue persists.", + _version=1, + ) + _logger.error(msg) + await publish_project_log( + self.rabbitmq_client, + user_id, + project_id, + log=msg, + log_level=logging.ERROR, + ) return comp_tasks diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_dask.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_dask.py index 39a19d7cc5f3..56c436a7be36 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_dask.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_dask.py @@ -1,18 +1,17 @@ -import asyncio import contextlib import logging from collections.abc import AsyncIterator, Callable from contextlib import asynccontextmanager from dataclasses import dataclass -from typing import Any +from typing import Any, Final import arrow +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from dask_task_models_library.container_tasks.errors import TaskCancelledError from dask_task_models_library.container_tasks.events import ( TaskProgressEvent, ) from dask_task_models_library.container_tasks.io import TaskOutputData -from dask_task_models_library.container_tasks.utils import parse_dask_job_id from models_library.clusters import BaseCluster from models_library.errors import ErrorDict from models_library.projects import ProjectID @@ -23,12 +22,18 @@ from models_library.users import UserID from pydantic import PositiveInt from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE -from servicelib.logging_utils import log_catch +from servicelib.logging_utils import log_catch, log_context +from servicelib.redis._semaphore_decorator import ( + with_limited_concurrency_cm, +) +from servicelib.utils import limited_as_completed, limited_gather +from ..._meta import APP_NAME from ...core.errors import ( ComputationalBackendNotConnectedError, ComputationalBackendOnDemandNotReadyError, - TaskSchedulingError, + ComputationalBackendTaskResultsNotReadyError, + PortsValidationError, ) from ...models.comp_runs import CompRunsAtDB, Iteration, RunMetadataDict from ...models.comp_tasks import CompTaskAtDB @@ -43,12 +48,13 @@ publish_service_stopped_metrics, ) from ..clusters_keeper import get_or_create_on_demand_cluster -from ..dask_client import DaskClient, PublishedComputationTask +from ..dask_client import DaskClient from ..dask_clients_pool import DaskClientsPool from ..db.repositories.comp_runs import ( CompRunsRepository, ) from ..db.repositories.comp_tasks import CompTasksRepository +from ._models import TaskStateTracker from ._scheduler_base import BaseCompScheduler from ._utils import ( WAITING_FOR_START_STATES, @@ -56,6 +62,11 @@ _logger = logging.getLogger(__name__) +_DASK_CLIENT_RUN_REF: Final[str] = "{user_id}:{project_id}:{run_id}" +_TASK_RETRIEVAL_ERROR_TYPE: Final[str] = "task-result-retrieval-timeout" +_TASK_RETRIEVAL_ERROR_CONTEXT_TIME_KEY: Final[str] = "check_time" +_PUBLICATION_CONCURRENCY_LIMIT: Final[int] = 10 + @asynccontextmanager async def _cluster_dask_client( @@ -63,6 +74,8 @@ async def _cluster_dask_client( scheduler: "DaskScheduler", *, use_on_demand_clusters: bool, + project_id: ProjectID, + run_id: PositiveInt, run_metadata: RunMetadataDict, ) -> AsyncIterator[DaskClient]: cluster: BaseCluster = scheduler.settings.default_cluster @@ -72,7 +85,27 @@ async def _cluster_dask_client( user_id=user_id, wallet_id=run_metadata.get("wallet_id"), ) - async with scheduler.dask_clients_pool.acquire(cluster) as client: + + @with_limited_concurrency_cm( + scheduler.redis_client, + key=f"{APP_NAME}-cluster-user_id_{user_id}-wallet_id_{run_metadata.get('wallet_id')}", + capacity=scheduler.settings.COMPUTATIONAL_BACKEND_PER_CLUSTER_MAX_DISTRIBUTED_CONCURRENT_CONNECTIONS, + blocking=True, + blocking_timeout=None, + ) + @asynccontextmanager + async def _acquire_client( + user_id: UserID, scheduler: "DaskScheduler" + ) -> AsyncIterator[DaskClient]: + async with scheduler.dask_clients_pool.acquire( + cluster, + ref=_DASK_CLIENT_RUN_REF.format( + user_id=user_id, project_id=project_id, run_id=run_id + ), + ) as client: + yield client + + async with _acquire_client(user_id, scheduler) as client: yield client @@ -101,6 +134,8 @@ async def _start_tasks( user_id, self, use_on_demand_clusters=comp_run.use_on_demand_clusters, + project_id=comp_run.project_uuid, + run_id=comp_run.run_id, run_metadata=comp_run.metadata, ) as client: # Change the tasks state to PENDING @@ -112,33 +147,31 @@ async def _start_tasks( RunningState.PENDING, ) # each task is started independently - results: list[list[PublishedComputationTask]] = await asyncio.gather( - *( - client.send_computation_tasks( - user_id=user_id, - project_id=project_id, - tasks={node_id: task.image}, - hardware_info=task.hardware_info, - callback=wake_up_callback, - metadata=comp_run.metadata, - resource_tracking_run_id=ServiceRunID.get_resource_tracking_run_id_for_computational( - user_id, project_id, node_id, comp_run.iteration - ), - ) - for node_id, task in scheduled_tasks.items() - ), - ) - # update the database so we do have the correct job_ids there - await asyncio.gather( - *( - comp_tasks_repo.update_project_task_job_id( - project_id, task.node_id, comp_run.run_id, task.job_id - ) - for task_sents in results - for task in task_sents + for node_id, task in scheduled_tasks.items(): + published_tasks = await client.send_computation_tasks( + user_id=user_id, + project_id=project_id, + tasks={node_id: task.image}, + hardware_info=task.hardware_info, + callback=wake_up_callback, + metadata=comp_run.metadata, + resource_tracking_run_id=ServiceRunID.get_resource_tracking_run_id_for_computational( + user_id, project_id, node_id, comp_run.iteration + ), + ) + + # update the database so we do have the correct job_ids there + await limited_gather( + *( + comp_tasks_repo.update_project_task_job_id( + project_id, task.node_id, comp_run.run_id, task.job_id + ) + for task in published_tasks + ), + log=_logger, + limit=1, ) - ) async def _get_tasks_status( self, @@ -151,6 +184,8 @@ async def _get_tasks_status( user_id, self, use_on_demand_clusters=comp_run.use_on_demand_clusters, + project_id=comp_run.project_uuid, + run_id=comp_run.run_id, run_metadata=comp_run.metadata, ) as client: return await client.get_tasks_status([f"{t.job_id}" for t in tasks]) @@ -165,45 +200,43 @@ async def _process_executing_tasks( tasks: list[CompTaskAtDB], comp_run: CompRunsAtDB, ) -> None: - task_progresses = [] + task_progress_events = [] try: async with _cluster_dask_client( user_id, self, use_on_demand_clusters=comp_run.use_on_demand_clusters, + project_id=comp_run.project_uuid, + run_id=comp_run.run_id, run_metadata=comp_run.metadata, ) as client: - task_progresses = await client.get_tasks_progress( - [f"{t.job_id}" for t in tasks], - ) - for task_progress_event in task_progresses: - if task_progress_event: - await CompTasksRepository( - self.db_engine - ).update_project_task_progress( - task_progress_event.task_owner.project_id, - task_progress_event.task_owner.node_id, - comp_run.run_id, - task_progress_event.progress, + task_progress_events = [ + t + for t in await client.get_tasks_progress( + [f"{t.job_id}" for t in tasks], ) + if t is not None + ] + for progress_event in task_progress_events: + await CompTasksRepository(self.db_engine).update_project_task_progress( + progress_event.task_owner.project_id, + progress_event.task_owner.node_id, + comp_run.run_id, + progress_event.progress, + ) except ComputationalBackendOnDemandNotReadyError: _logger.info("The on demand computational backend is not ready yet...") comp_tasks_repo = CompTasksRepository(self.db_engine) - await asyncio.gather( - *( - comp_tasks_repo.update_project_task_progress( - t.task_owner.project_id, - t.task_owner.node_id, - comp_run.run_id, - t.progress, - ) - for t in task_progresses - if t + for task in task_progress_events: + await comp_tasks_repo.update_project_task_progress( + task.task_owner.project_id, + task.task_owner.node_id, + comp_run.run_id, + task.progress, ) - ) - await asyncio.gather( + await limited_gather( *( publish_service_progress( self.rabbitmq_client, @@ -212,11 +245,30 @@ async def _process_executing_tasks( node_id=t.task_owner.node_id, progress=t.progress, ) - for t in task_progresses - if t - ) + for t in task_progress_events + ), + log=_logger, + limit=_PUBLICATION_CONCURRENCY_LIMIT, ) + async def _release_resources(self, comp_run: CompRunsAtDB) -> None: + """release resources used by the scheduler for a given user and project""" + with ( + log_catch(_logger, reraise=False), + log_context( + _logger, + logging.INFO, + msg=f"releasing resources for {comp_run.user_id=}, {comp_run.project_uuid=}, {comp_run.run_id=}", + ), + ): + await self.dask_clients_pool.release_client_ref( + ref=_DASK_CLIENT_RUN_REF.format( + user_id=comp_run.user_id, + project_id=comp_run.project_uuid, + run_id=comp_run.run_id, + ) + ) + async def _stop_tasks( self, user_id: UserID, tasks: list[CompTaskAtDB], comp_run: CompRunsAtDB ) -> None: @@ -226,159 +278,301 @@ async def _stop_tasks( user_id, self, use_on_demand_clusters=comp_run.use_on_demand_clusters, + project_id=comp_run.project_uuid, + run_id=comp_run.run_id, run_metadata=comp_run.metadata, ) as client: - await asyncio.gather( - *[ - client.abort_computation_task(t.job_id) - for t in tasks - if t.job_id - ] - ) - # tasks that have no-worker must be unpublished as these are blocking forever - tasks_with_no_worker = [ - t for t in tasks if t.state is RunningState.WAITING_FOR_RESOURCES - ] - await asyncio.gather( - *[ - client.release_task_result(t.job_id) - for t in tasks_with_no_worker - if t.job_id - ] - ) + for t in tasks: + if not t.job_id: + _logger.warning("%s has no job_id, cannot be stopped", t) + continue + await client.abort_computation_task(t.job_id) + # tasks that have no-worker must be unpublished as these are blocking forever + if t.state is RunningState.WAITING_FOR_RESOURCES: + await client.release_task_result(t.job_id) async def _process_completed_tasks( self, user_id: UserID, - tasks: list[CompTaskAtDB], + tasks: list[TaskStateTracker], iteration: Iteration, comp_run: CompRunsAtDB, ) -> None: - try: - async with _cluster_dask_client( - user_id, - self, - use_on_demand_clusters=comp_run.use_on_demand_clusters, - run_metadata=comp_run.metadata, - ) as client: - tasks_results = await asyncio.gather( - *[client.get_task_result(t.job_id or "undefined") for t in tasks], - return_exceptions=True, - ) - await asyncio.gather( - *[ + async with _cluster_dask_client( + user_id, + self, + use_on_demand_clusters=comp_run.use_on_demand_clusters, + project_id=comp_run.project_uuid, + run_id=comp_run.run_id, + run_metadata=comp_run.metadata, + ) as client: + tasks_results = await limited_gather( + *( + client.get_task_result(t.current.job_id or "undefined") + for t in tasks + ), + reraise=False, + log=_logger, + limit=1, # to avoid overloading the dask scheduler + ) + async for future in limited_as_completed( + ( self._process_task_result( - task, result, comp_run.metadata, iteration, comp_run.run_id + task, + result, + iteration, + comp_run, ) for task, result in zip(tasks, tasks_results, strict=True) - ] + ), + limit=10, # this is not accessing the dask-scheduelr (only db) + ): + with log_catch(_logger, reraise=False): + task_can_be_cleaned, job_id = await future + if task_can_be_cleaned and job_id: + await client.release_task_result(job_id) + + async def _handle_successful_run( + self, + task: CompTaskAtDB, + result: TaskOutputData, + log_error_context: dict[str, Any], + ) -> tuple[RunningState, SimcorePlatformStatus, list[ErrorDict], bool]: + assert task.job_id # nosec + try: + await parse_output_data( + self.db_engine, + task.job_id, + result, ) - finally: - async with _cluster_dask_client( - user_id, - self, - use_on_demand_clusters=comp_run.use_on_demand_clusters, - run_metadata=comp_run.metadata, - ) as client: - await asyncio.gather( - *[client.release_task_result(t.job_id) for t in tasks if t.job_id] + return RunningState.SUCCESS, SimcorePlatformStatus.OK, [], True + except PortsValidationError as err: + _logger.exception( + **create_troubleshooting_log_kwargs( + "Unexpected error while parsing output data, comp_tasks/comp_pipeline is not in sync with what was started", + error=err, + error_context=log_error_context, ) + ) + # NOTE: simcore platform state is still OK as the task ran fine, the issue is likely due to the service labels + return RunningState.FAILED, SimcorePlatformStatus.OK, err.get_errors(), True - async def _process_task_result( + async def _handle_computational_retrieval_error( self, task: CompTaskAtDB, - result: BaseException | TaskOutputData, - run_metadata: RunMetadataDict, - iteration: Iteration, - run_id: PositiveInt, - ) -> None: - _logger.debug("received %s result: %s", f"{task=}", f"{result=}") - task_final_state = RunningState.FAILED - simcore_platform_status = SimcorePlatformStatus.OK - errors: list[ErrorDict] = [] - - if task.job_id is not None: - ( - _service_key, - _service_version, - user_id, - project_id, - node_id, - ) = parse_dask_job_id(task.job_id) - - assert task.project_id == project_id # nosec - assert task.node_id == node_id # nosec - - try: - if isinstance(result, TaskOutputData): - # success! - await parse_output_data( - self.db_engine, - task.job_id, - result, - ) - task_final_state = RunningState.SUCCESS - - else: - if isinstance(result, TaskCancelledError): - task_final_state = RunningState.ABORTED - else: - task_final_state = RunningState.FAILED - errors.append( - { - "loc": ( - f"{task.project_id}", - f"{task.node_id}", - ), - "msg": f"{result}", - "type": "runtime", - } - ) - if isinstance(result, ComputationalBackendNotConnectedError): - simcore_platform_status = SimcorePlatformStatus.BAD - # we need to remove any invalid files in the storage - await clean_task_output_and_log_files_if_invalid( - self.db_engine, user_id, project_id, node_id + result: ComputationalBackendTaskResultsNotReadyError, + log_error_context: dict[str, Any], + ) -> tuple[RunningState, SimcorePlatformStatus, list[ErrorDict], bool]: + _logger.warning( + **create_troubleshooting_log_kwargs( + f"Retrieval of task {task.job_id} result timed-out", + error=result, + error_context=log_error_context, + tip="This can happen if the computational backend is overloaded with requests. It will be automatically retried again.", + ) + ) + task_errors: list[ErrorDict] = [] + check_time = arrow.utcnow() + if task.errors: + for error in task.errors: + if error["type"] == _TASK_RETRIEVAL_ERROR_TYPE: + # already had a timeout error, let's keep it + task_errors.append(error) + assert "ctx" in error # nosec + assert ( + _TASK_RETRIEVAL_ERROR_CONTEXT_TIME_KEY in error["ctx"] + ) # nosec + check_time = arrow.get( + error["ctx"][_TASK_RETRIEVAL_ERROR_CONTEXT_TIME_KEY] ) - except TaskSchedulingError as err: - task_final_state = RunningState.FAILED - simcore_platform_status = SimcorePlatformStatus.BAD - errors = err.get_errors() - _logger.debug( - "Unexpected failure while processing results of %s: %s", - f"{task=}", - f"{errors=}", + break + if not task_errors: + # first time we have this error + task_errors.append( + ErrorDict( + loc=(f"{task.project_id}", f"{task.node_id}"), + msg=f"{result}", + type=_TASK_RETRIEVAL_ERROR_TYPE, + ctx={ + _TASK_RETRIEVAL_ERROR_CONTEXT_TIME_KEY: f"{check_time}", + **log_error_context, + }, ) + ) - # resource tracking - await publish_service_resource_tracking_stopped( - self.rabbitmq_client, - ServiceRunID.get_resource_tracking_run_id_for_computational( - user_id, project_id, node_id, iteration - ), - simcore_platform_status=simcore_platform_status, + # if the task has been running for too long, we consider it failed + elapsed_time = arrow.utcnow() - check_time + if ( + elapsed_time + > self.settings.COMPUTATIONAL_BACKEND_MAX_WAITING_FOR_RETRIEVING_RESULTS + ): + _logger.error( + **create_troubleshooting_log_kwargs( + f"Task {task.job_id} failed because results could not be retrieved after {elapsed_time}", + error=result, + error_context=log_error_context, + tip="Please try again later or contact support if the problem persists.", + ) ) - # instrumentation - await publish_service_stopped_metrics( - self.rabbitmq_client, - user_id=user_id, - simcore_user_agent=run_metadata.get( - "simcore_user_agent", UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE - ), - task=task, - task_final_state=task_final_state, + return RunningState.FAILED, SimcorePlatformStatus.BAD, task_errors, True + # state is kept as STARTED so it will be retried + return RunningState.STARTED, SimcorePlatformStatus.BAD, task_errors, False + + @staticmethod + async def _handle_computational_backend_not_connected_error( + task: CompTaskAtDB, + result: ComputationalBackendNotConnectedError, + log_error_context: dict[str, Any], + ) -> tuple[RunningState, SimcorePlatformStatus, list[ErrorDict], bool]: + _logger.warning( + **create_troubleshooting_log_kwargs( + f"Computational backend disconnected when retrieving task {task.job_id} result", + error=result, + error_context=log_error_context, + tip="This can happen if the computational backend is temporarily disconnected. It will be automatically retried again.", ) + ) + # NOTE: the task will be set to UNKNOWN on the next processing loop - await CompTasksRepository(self.db_engine).update_project_tasks_state( - task.project_id, - run_id, - [task.node_id], - task_final_state, - errors=errors, - optional_progress=1, - optional_stopped=arrow.utcnow().datetime, + # state is kept as STARTED so it will be retried + return RunningState.STARTED, SimcorePlatformStatus.BAD, [], False + + @staticmethod + async def _handle_task_error( + task: CompTaskAtDB, + result: BaseException, + log_error_context: dict[str, Any], + ) -> tuple[RunningState, SimcorePlatformStatus, list[ErrorDict], bool]: + # the task itself failed, check why + if isinstance(result, TaskCancelledError): + _logger.info( + **create_troubleshooting_log_kwargs( + f"Task {task.job_id} was cancelled", + error=result, + error_context=log_error_context, + ) + ) + return RunningState.ABORTED, SimcorePlatformStatus.OK, [], True + + _logger.info( + **create_troubleshooting_log_kwargs( + f"Task {task.job_id} completed with errors", + error=result, + error_context=log_error_context, + ) + ) + return ( + RunningState.FAILED, + SimcorePlatformStatus.OK, + [ + ErrorDict( + loc=(f"{task.project_id}", f"{task.node_id}"), + msg=f"{result}", + type="runtime", + ) + ], + True, ) + async def _process_task_result( + self, + task: TaskStateTracker, + result: BaseException | TaskOutputData, + iteration: Iteration, + comp_run: CompRunsAtDB, + ) -> tuple[bool, str | None]: + """Returns True and the job ID if the task was successfully processed and can be released from the Dask cluster.""" + with log_context( + _logger, logging.DEBUG, msg=f"{comp_run.run_id=}, {task=}, {result=}" + ): + log_error_context = { + "user_id": comp_run.user_id, + "project_id": f"{comp_run.project_uuid}", + "node_id": f"{task.current.node_id}", + "job_id": task.current.job_id, + } + + if isinstance(result, TaskOutputData): + ( + task_final_state, + simcore_platform_status, + task_errors, + task_completed, + ) = await self._handle_successful_run( + task.current, result, log_error_context + ) + + elif isinstance(result, ComputationalBackendTaskResultsNotReadyError): + ( + task_final_state, + simcore_platform_status, + task_errors, + task_completed, + ) = await self._handle_computational_retrieval_error( + task.current, result, log_error_context + ) + elif isinstance(result, ComputationalBackendNotConnectedError): + ( + task_final_state, + simcore_platform_status, + task_errors, + task_completed, + ) = await self._handle_computational_backend_not_connected_error( + task.current, result, log_error_context + ) + else: + ( + task_final_state, + simcore_platform_status, + task_errors, + task_completed, + ) = await self._handle_task_error( + task.current, result, log_error_context + ) + + # we need to remove any invalid files in the storage + await clean_task_output_and_log_files_if_invalid( + self.db_engine, + comp_run.user_id, + comp_run.project_uuid, + task.current.node_id, + ) + + if task_completed: + # resource tracking + await publish_service_resource_tracking_stopped( + self.rabbitmq_client, + ServiceRunID.get_resource_tracking_run_id_for_computational( + comp_run.user_id, + comp_run.project_uuid, + task.current.node_id, + iteration, + ), + simcore_platform_status=simcore_platform_status, + ) + # instrumentation + await publish_service_stopped_metrics( + self.rabbitmq_client, + user_id=comp_run.user_id, + simcore_user_agent=comp_run.metadata.get( + "simcore_user_agent", UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE + ), + task=task.current, + task_final_state=task_final_state, + ) + + await CompTasksRepository(self.db_engine).update_project_tasks_state( + task.current.project_id, + comp_run.run_id, + [task.current.node_id], + task_final_state if task_completed else task.previous.state, + errors=task_errors, + optional_progress=1 if task_completed else None, + optional_stopped=arrow.utcnow().datetime if task_completed else None, + ) + + return task_completed, task.current.job_id + async def _task_progress_change_handler( self, event: tuple[UnixTimestamp, Any] ) -> None: diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dask_client.py b/services/director-v2/src/simcore_service_director_v2/modules/dask_client.py index 849640eebfe5..169b7e522d48 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dask_client.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dask_client.py @@ -8,18 +8,17 @@ """ -import asyncio import logging -import traceback from collections.abc import Callable, Iterable -from copy import deepcopy from dataclasses import dataclass from http.client import HTTPException -from typing import Any, Final, cast +from typing import Final, cast import distributed +import distributed.client from aiohttp import ClientResponseError from common_library.json_serialization import json_dumps +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from dask_task_models_library.container_tasks.docker import DockerBasicAuth from dask_task_models_library.container_tasks.errors import TaskCancelledError from dask_task_models_library.container_tasks.events import TaskProgressEvent @@ -49,7 +48,6 @@ create_ec2_resource_constraint_key, ) from fastapi import FastAPI -from models_library.api_schemas_directorv2.clusters import ClusterDetails, Scheduler from models_library.clusters import ClusterAuthentication, ClusterTypeInModel from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID @@ -57,9 +55,10 @@ from models_library.resource_tracker import HardwareInfo from models_library.services import ServiceRunID from models_library.users import UserID -from pydantic import TypeAdapter, ValidationError +from pydantic import ValidationError from pydantic.networks import AnyUrl -from servicelib.logging_utils import log_catch, log_context +from servicelib.logging_utils import log_context +from servicelib.utils import limited_gather from settings_library.s3 import S3Settings from simcore_sdk.node_ports_common.exceptions import NodeportsException from simcore_sdk.node_ports_v2 import FileLinkType @@ -70,6 +69,7 @@ from ..core.errors import ( ComputationalBackendNoS3AccessError, + ComputationalBackendNotConnectedError, ComputationalBackendTaskNotFoundError, ComputationalBackendTaskResultsNotReadyError, TaskSchedulingError, @@ -90,10 +90,11 @@ _logger = logging.getLogger(__name__) -_DASK_DEFAULT_TIMEOUT_S: Final[int] = 5 +_DASK_DEFAULT_TIMEOUT_S: Final[int] = 10 _UserCallbackInSepThread = Callable[[], None] +_MAX_CONCURRENT_CLIENT_CONNECTIONS: Final[int] = 1 @dataclass(frozen=True, kw_only=True, slots=True) @@ -120,43 +121,42 @@ async def create( tasks_file_link_type: FileLinkType, cluster_type: ClusterTypeInModel, ) -> "DaskClient": - _logger.info( - "Initiating connection to %s with auth: %s, type: %s", - f"dask-scheduler at {endpoint}", - authentication, - cluster_type, - ) - async for attempt in AsyncRetrying( - reraise=True, - before_sleep=before_sleep_log(_logger, logging.INFO), - wait=wait_fixed(0.3), - stop=stop_after_attempt(3), + with log_context( + _logger, + logging.INFO, + msg=f"create dask client to dask-scheduler at {endpoint=} with {authentication=}, {cluster_type=}", ): - with attempt: - _logger.debug( - "Connecting to %s, attempt %s...", - endpoint, - attempt.retry_state.attempt_number, - ) - backend = await connect_to_dask_scheduler(endpoint, authentication) - dask_utils.check_scheduler_status(backend.client) - instance = cls( - app=app, - backend=backend, - settings=settings, - tasks_file_link_type=tasks_file_link_type, - cluster_type=cluster_type, - ) - _logger.info( - "Connection to %s succeeded [%s]", - f"dask-scheduler at {endpoint}", - json_dumps(attempt.retry_state.retry_object.statistics), - ) - _logger.info( - "Scheduler info:\n%s", - json_dumps(backend.client.scheduler_info(), indent=2), - ) - return instance + async for attempt in AsyncRetrying( + reraise=True, + before_sleep=before_sleep_log(_logger, logging.INFO), + wait=wait_fixed(0.3), + stop=stop_after_attempt(3), + ): + with attempt: + _logger.debug( + "Connecting to %s, attempt %s...", + endpoint, + attempt.retry_state.attempt_number, + ) + backend = await connect_to_dask_scheduler(endpoint, authentication) + dask_utils.check_scheduler_status(backend.client) + instance = cls( + app=app, + backend=backend, + settings=settings, + tasks_file_link_type=tasks_file_link_type, + cluster_type=cluster_type, + ) + _logger.info( + "Connection to %s succeeded [%s]", + f"dask-scheduler at {endpoint}", + json_dumps(attempt.retry_state.retry_object.statistics), + ) + _logger.info( + "Scheduler info:\n%s", + json_dumps(backend.client.scheduler_info(), indent=2), + ) + return instance # this is to satisfy pylance err_msg = "Could not create client" raise ValueError(err_msg) @@ -242,17 +242,14 @@ def _comp_sidecar_fct( ) # NOTE: the callback is running in a secondary thread, and takes a future as arg task_future.add_done_callback(lambda _: callback()) - await distributed.Variable(job_id, client=self.backend.client).set( - task_future - ) await dask_utils.wrap_client_async_routine( self.backend.client.publish_dataset(task_future, name=job_id) ) - _logger.debug( + _logger.info( "Dask task %s started [%s]", - f"{task_future.key=}", + f"{job_id=}", f"{node_image.command=}", ) return PublishedComputationTask(node_id=node_id, job_id=DaskJobID(job_id)) @@ -426,7 +423,11 @@ async def _get_task_progress(job_id: str) -> TaskProgressEvent | None: # we are interested in the last event return TaskProgressEvent.model_validate_json(dask_events[-1][1]) - return await asyncio.gather(*(_get_task_progress(job_id) for job_id in job_ids)) + return await limited_gather( + *(_get_task_progress(job_id) for job_id in job_ids), + log=_logger, + limit=_MAX_CONCURRENT_CLIENT_CONNECTIONS, + ) async def get_tasks_status(self, job_ids: Iterable[str]) -> list[RunningState]: dask_utils.check_scheduler_is_still_the_same( @@ -447,36 +448,69 @@ async def _get_task_state(job_id: str) -> RunningState: parsed_event = TaskLifeCycleState.model_validate(dask_events[-1][1]) if parsed_event.state == RunningState.FAILED: + log_error_context = { + "job_id": job_id, + "dask-scheduler": self.backend.scheduler_id, + } try: # find out if this was a cancellation - var = distributed.Variable(job_id, client=self.backend.client) - future: distributed.Future = await var.get( + task_future: distributed.Future = ( + await dask_utils.wrap_client_async_routine( + self.backend.client.get_dataset(name=job_id) + ) + ) + exception = await task_future.exception( timeout=_DASK_DEFAULT_TIMEOUT_S ) - exception = await future.exception(timeout=_DASK_DEFAULT_TIMEOUT_S) assert isinstance(exception, Exception) # nosec if isinstance(exception, TaskCancelledError): + _logger.info( + **create_troubleshooting_log_kwargs( + f"Task {job_id} was aborted by user", + error=exception, + error_context=log_error_context, + ) + ) return RunningState.ABORTED assert exception # nosec - _logger.warning( - "Task %s completed in error:\n%s\nTrace:\n%s", - job_id, - exception, - "".join(traceback.format_exception(exception)), + _logger.info( + **create_troubleshooting_log_kwargs( + f"Task {job_id} completed with an error", + error=exception, + error_context=log_error_context, + ) ) return RunningState.FAILED - except TimeoutError: + except TimeoutError as exc: + _logger.exception( + **create_troubleshooting_log_kwargs( + f"Task {job_id} exception could not be retrieved due to timeout", + error=exc, + error_context=log_error_context, + tip="The dask-scheduler is probably under load, this should resolve itself later.", + ), + ) + return RunningState.UNKNOWN + except KeyError as exc: + # the task does not exist _logger.warning( - "Task %s could not be retrieved from dask-scheduler, it is lost\n" - "TIP:If the task was unpublished this can happen, or if the dask-scheduler was restarted.", - job_id, + **create_troubleshooting_log_kwargs( + f"Task {job_id} not found. State is UNKNOWN.", + error=exc, + error_context=log_error_context, + tip="If the task is supposed to exist, the dask-schdeler has probably restarted. Check its status.", + ), ) return RunningState.UNKNOWN return parsed_event.state - return await asyncio.gather(*(_get_task_state(job_id) for job_id in job_ids)) + return await limited_gather( + *(_get_task_state(job_id) for job_id in job_ids), + log=_logger, + limit=_MAX_CONCURRENT_CLIENT_CONNECTIONS, + ) async def abort_computation_task(self, job_id: str) -> None: # Dask future may be cancelled, but only a future that was not already taken by @@ -504,6 +538,8 @@ async def abort_computation_task(self, job_id: str) -> None: async def get_task_result(self, job_id: str) -> TaskOutputData: _logger.debug("getting result of %s", f"{job_id=}") + dask_utils.check_communication_with_scheduler_is_open(self.backend.client) + dask_utils.check_scheduler_status(self.backend.client) try: task_future: distributed.Future = ( await dask_utils.wrap_client_async_routine( @@ -518,16 +554,15 @@ async def get_task_result(self, job_id: str) -> TaskOutputData: raise ComputationalBackendTaskNotFoundError(job_id=job_id) from exc except distributed.TimeoutError as exc: raise ComputationalBackendTaskResultsNotReadyError(job_id=job_id) from exc + except ( + distributed.client.FutureCancelledError, + distributed.client.FuturesCancelledError, + ) as exc: + raise ComputationalBackendNotConnectedError from exc async def release_task_result(self, job_id: str) -> None: _logger.debug("releasing results for %s", f"{job_id=}") try: - # NOTE: The distributed Variable holds the future of the tasks in the dask-scheduler - # Alas, deleting the variable is done asynchronously and there is no way to ensure - # the variable was effectively deleted. - # This is annoying as one can re-create the variable without error. - var = distributed.Variable(job_id, client=self.backend.client) - await asyncio.get_event_loop().run_in_executor(None, var.delete) # first check if the key exists await dask_utils.wrap_client_async_routine( self.backend.client.get_dataset(name=job_id) @@ -539,50 +574,3 @@ async def release_task_result(self, job_id: str) -> None: except KeyError: _logger.warning("Unknown task cannot be unpublished: %s", f"{job_id=}") - - async def get_cluster_details(self) -> ClusterDetails: - dask_utils.check_scheduler_is_still_the_same( - self.backend.scheduler_id, self.backend.client - ) - dask_utils.check_communication_with_scheduler_is_open(self.backend.client) - dask_utils.check_scheduler_status(self.backend.client) - scheduler_info = self.backend.client.scheduler_info() - scheduler_status = self.backend.client.status - dashboard_link = self.backend.client.dashboard_link - - def _get_worker_used_resources( - dask_scheduler: distributed.Scheduler, - ) -> dict[str, dict]: - used_resources = {} - for worker_name, worker_state in dask_scheduler.workers.items(): - used_resources[worker_name] = worker_state.used_resources - return used_resources - - with log_catch(_logger, reraise=False): - # NOTE: this runs directly on the dask-scheduler and may rise exceptions - used_resources_per_worker: dict[str, dict[str, Any]] = ( - await dask_utils.wrap_client_async_routine( - self.backend.client.run_on_scheduler(_get_worker_used_resources) - ) - ) - - # let's update the scheduler info, with default to 0s since sometimes - # workers are destroyed/created without us knowing right away - for worker_name, worker_info in scheduler_info.get("workers", {}).items(): - used_resources: dict[str, float] = deepcopy( - worker_info.get("resources", {}) - ) - # reset default values - for res_name in used_resources: - used_resources[res_name] = 0 - # if the scheduler has info, let's override them - used_resources = used_resources_per_worker.get( - worker_name, used_resources - ) - worker_info.update(used_resources=used_resources) - - assert dashboard_link # nosec - return ClusterDetails( - scheduler=Scheduler(status=scheduler_status, **scheduler_info), - dashboard_link=TypeAdapter(AnyUrl).validate_python(dashboard_link), - ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dask_clients_pool.py b/services/director-v2/src/simcore_service_director_v2/modules/dask_clients_pool.py index 31177b5a6162..b4f75c68d725 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dask_clients_pool.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dask_clients_pool.py @@ -1,5 +1,6 @@ import asyncio import logging +from collections import defaultdict from collections.abc import AsyncIterator from contextlib import asynccontextmanager from dataclasses import dataclass, field @@ -8,6 +9,7 @@ from fastapi import FastAPI from models_library.clusters import BaseCluster, ClusterTypeInModel from pydantic import AnyUrl +from servicelib.logging_utils import log_context from ..core.errors import ( ComputationalBackendNotConnectedError, @@ -19,10 +21,11 @@ from ..utils.dask_client_utils import TaskHandlers from .dask_client import DaskClient -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) _ClusterUrl: TypeAlias = AnyUrl +ClientRef: TypeAlias = str @dataclass @@ -32,6 +35,11 @@ class DaskClientsPool: _client_acquisition_lock: asyncio.Lock = field(init=False) _cluster_to_client_map: dict[_ClusterUrl, DaskClient] = field(default_factory=dict) _task_handlers: TaskHandlers | None = None + # Track references to each client by endpoint + _client_to_refs: defaultdict[_ClusterUrl, set[ClientRef]] = field( + default_factory=lambda: defaultdict(set) + ) + _ref_to_clients: dict[ClientRef, _ClusterUrl] = field(default_factory=dict) def __post_init__(self): # NOTE: to ensure the correct loop is used @@ -59,54 +67,102 @@ async def delete(self) -> None: *[client.delete() for client in self._cluster_to_client_map.values()], return_exceptions=True, ) + self._cluster_to_client_map.clear() + self._client_to_refs.clear() + self._ref_to_clients.clear() + + async def release_client_ref(self, ref: ClientRef) -> None: + """Release a dask client reference by its ref. + + If all the references to the client are released, + the client will be deleted from the pool. + This method is thread-safe and can be called concurrently. + """ + async with self._client_acquisition_lock: + # Find which endpoint this ref belongs to + if cluster_endpoint := self._ref_to_clients.pop(ref, None): + # we have a client, remove our reference and check if there are any more references + assert ref in self._client_to_refs[cluster_endpoint] # nosec + self._client_to_refs[cluster_endpoint].discard(ref) + + # If we found an endpoint with no more refs, clean it up + if not self._client_to_refs[cluster_endpoint] and ( + dask_client := self._cluster_to_client_map.pop( + cluster_endpoint, None + ) + ): + _logger.info( + "Last reference to client %s released, deleting client", + cluster_endpoint, + ) + await dask_client.delete() + _logger.debug( + "Remaining clients: %s", + [f"{k}" for k in self._cluster_to_client_map], + ) @asynccontextmanager - async def acquire(self, cluster: BaseCluster) -> AsyncIterator[DaskClient]: + async def acquire( + self, cluster: BaseCluster, *, ref: ClientRef + ) -> AsyncIterator[DaskClient]: + """Returns a dask client for the given cluster. + + This method is thread-safe and can be called concurrently. + If the cluster is not found in the pool, it will create a new dask client for it. + + The passed reference is used to track the client usage, user should call + `release_client_ref` to release the client reference when done. + """ + async def _concurently_safe_acquire_client() -> DaskClient: async with self._client_acquisition_lock: - dask_client = self._cluster_to_client_map.get(cluster.endpoint) - - # we create a new client if that cluster was never used before - logger.debug( - "acquiring connection to cluster %s:%s", - cluster.endpoint, - cluster.name, - ) - if not dask_client: - tasks_file_link_type = ( - self.settings.COMPUTATIONAL_BACKEND_DEFAULT_FILE_LINK_TYPE - ) - if cluster == self.settings.default_cluster: + with log_context( + _logger, + logging.DEBUG, + f"acquire dask client for {cluster.name=}:{cluster.endpoint}", + ): + dask_client = self._cluster_to_client_map.get(cluster.endpoint) + if not dask_client: tasks_file_link_type = ( - self.settings.COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_FILE_LINK_TYPE + self.settings.COMPUTATIONAL_BACKEND_DEFAULT_FILE_LINK_TYPE ) - if cluster.type == ClusterTypeInModel.ON_DEMAND.value: - tasks_file_link_type = ( - self.settings.COMPUTATIONAL_BACKEND_ON_DEMAND_CLUSTERS_FILE_LINK_TYPE + if cluster == self.settings.default_cluster: + tasks_file_link_type = ( + self.settings.COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_FILE_LINK_TYPE + ) + if cluster.type == ClusterTypeInModel.ON_DEMAND.value: + tasks_file_link_type = ( + self.settings.COMPUTATIONAL_BACKEND_ON_DEMAND_CLUSTERS_FILE_LINK_TYPE + ) + self._cluster_to_client_map[cluster.endpoint] = dask_client = ( + await DaskClient.create( + app=self.app, + settings=self.settings, + endpoint=cluster.endpoint, + authentication=cluster.authentication, + tasks_file_link_type=tasks_file_link_type, + cluster_type=cluster.type, + ) ) - self._cluster_to_client_map[ - cluster.endpoint - ] = dask_client = await DaskClient.create( - app=self.app, - settings=self.settings, - endpoint=cluster.endpoint, - authentication=cluster.authentication, - tasks_file_link_type=tasks_file_link_type, - cluster_type=cluster.type, - ) - if self._task_handlers: - dask_client.register_handlers(self._task_handlers) + if self._task_handlers: + dask_client.register_handlers(self._task_handlers) - logger.debug("created new client to cluster %s", f"{cluster=}") - logger.debug( - "list of clients: %s", f"{self._cluster_to_client_map=}" + # Track the reference + self._client_to_refs[cluster.endpoint].add(ref) + self._ref_to_clients[ref] = cluster.endpoint + + _logger.debug( + "Client %s now has %d references", + cluster.endpoint, + len(self._client_to_refs[cluster.endpoint]), ) - assert dask_client # nosec - return dask_client + assert dask_client # nosec + return dask_client try: dask_client = await _concurently_safe_acquire_client() + except Exception as exc: raise DaskClientAcquisisitonError(cluster=cluster, error=exc) from exc @@ -129,7 +185,7 @@ async def on_startup() -> None: app=app, settings=settings ) - logger.info( + _logger.info( "Default cluster is set to %s", f"{settings.default_cluster!r}", ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/__init__.py b/services/director-v2/src/simcore_service_director_v2/modules/db/__init__.py index 34a955cdfc25..852042c299d6 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/__init__.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/__init__.py @@ -6,10 +6,12 @@ from servicelib.fastapi.db_asyncpg_engine import get_engine as get_db_engine from settings_library.postgres import PostgresSettings +from ..._meta import APP_NAME + def setup(app: FastAPI, settings: PostgresSettings) -> None: async def on_startup() -> None: - await connect_to_db(app, settings) + await connect_to_db(app, settings, application_name=APP_NAME) async def on_shutdown() -> None: await close_db_connection(app) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_pipelines.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_pipelines.py index b01886564c98..d6faaf3d3624 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_pipelines.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_pipelines.py @@ -42,16 +42,12 @@ async def upsert_pipeline( **pipeline_at_db.model_dump(mode="json", by_alias=True) ) # FIXME: This is not a nice thing. this part of the information should be kept in comp_runs. - update_exclusion_policy = set() - if not dag_graph.nodes(): - update_exclusion_policy.add("dag_adjacency_list") on_update_stmt = insert_stmt.on_conflict_do_update( index_elements=[comp_pipeline.c.project_id], set_=pipeline_at_db.model_dump( mode="json", by_alias=True, exclude_unset=True, - exclude=update_exclusion_policy, ), ) async with self.db_engine.begin() as conn: diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py index 19989577215b..0a28464bb347 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py @@ -6,8 +6,12 @@ import asyncpg # type: ignore[import-untyped] import sqlalchemy as sa import sqlalchemy.exc as sql_exc -from models_library.api_schemas_directorv2.comp_runs import ComputationRunRpcGet +from models_library.api_schemas_directorv2.comp_runs import ( + ComputationCollectionRunRpcGet, + ComputationRunRpcGet, +) from models_library.basic_types import IDStr +from models_library.computations import CollectionRunID from models_library.projects import ProjectID from models_library.projects_state import RunningState from models_library.rest_ordering import OrderBy, OrderDirection @@ -89,6 +93,30 @@ def _handle_foreign_key_violation( raise exc_type(**{k: error_keys.get(k) for k in exc_keys}) +def _resolve_grouped_state(states: list[RunningState]) -> RunningState: + # If any state is not a final state, return STARTED + + final_states = { + RunningState.FAILED, + RunningState.ABORTED, + RunningState.SUCCESS, + RunningState.UNKNOWN, # NOTE: this is NOT a final state, but happens when tasks are missing + } + if any(state not in final_states for state in states): + return RunningState.STARTED + # All are final states + if all(state == RunningState.SUCCESS for state in states): + return RunningState.SUCCESS + if any(state == RunningState.FAILED for state in states): + return RunningState.FAILED + if any(state == RunningState.ABORTED for state in states): + return RunningState.ABORTED + if any(state == RunningState.UNKNOWN for state in states): + return RunningState.UNKNOWN + # Fallback (should not happen) + return RunningState.STARTED + + class CompRunsRepository(BaseRepository): async def get( self, @@ -118,6 +146,22 @@ async def get( raise ComputationalRunNotFoundError return CompRunsAtDB.model_validate(row) + async def get_latest_run_by_project( + self, + project_id: ProjectID, + ) -> CompRunsAtDB: + async with pass_or_acquire_connection(self.db_engine) as conn: + result = await conn.execute( + sa.select(comp_runs) + .where(comp_runs.c.project_uuid == f"{project_id}") + .order_by(desc(comp_runs.c.run_id)) + .limit(1) + ) + row = result.one_or_none() + if not row: + raise ComputationalRunNotFoundError + return CompRunsAtDB.model_validate(row) + async def list_( self, *, @@ -279,11 +323,14 @@ async def list_for_user__only_latest_iterations( total_count = await conn.scalar(count_query) items = [ - ComputationRunRpcGet.model_validate( - { - **row, - "state": DB_TO_RUNNING_STATE[row["state"]], - } + ComputationRunRpcGet( + project_uuid=row.project_uuid, + iteration=row.iteration, + state=DB_TO_RUNNING_STATE[row.state], + info=row.info, + submitted_at=row.submitted_at, + started_at=row.started_at, + ended_at=row.ended_at, ) async for row in await conn.stream(list_query) ] @@ -339,17 +386,134 @@ async def list_for_user_and_project_all_iterations( total_count = await conn.scalar(count_query) items = [ - ComputationRunRpcGet.model_validate( - { - **row, - "state": DB_TO_RUNNING_STATE[row["state"]], - } + ComputationRunRpcGet( + project_uuid=row.project_uuid, + iteration=row.iteration, + state=DB_TO_RUNNING_STATE[row.state], + info=row.info, + submitted_at=row.submitted_at, + started_at=row.started_at, + ended_at=row.ended_at, ) async for row in await conn.stream(list_query) ] return cast(int, total_count), items + async def list_all_collection_run_ids_for_user_currently_running_computations( + self, + *, + product_name: str, + user_id: UserID, + ) -> list[CollectionRunID]: + list_query = ( + sa.select( + comp_runs.c.collection_run_id, + ) + .where( + (comp_runs.c.user_id == user_id) + & ( + comp_runs.c.metadata["product_name"].astext == product_name + ) # <-- NOTE: We might create a separate column for this for fast retrieval + & ( + comp_runs.c.result.in_( + [ + RUNNING_STATE_TO_DB[item] + for item in RunningState.list_running_states() + ] + ) + ) + ) + .distinct() + ) + + async with pass_or_acquire_connection(self.db_engine) as conn: + return [ + CollectionRunID(row[0]) async for row in await conn.stream(list_query) + ] + + async def list_group_by_collection_run_id( + self, + *, + product_name: str, + user_id: UserID, + project_ids_or_none: list[ProjectID] | None = None, + collection_run_ids_or_none: list[CollectionRunID] | None = None, + # pagination + offset: int, + limit: int, + ) -> tuple[int, list[ComputationCollectionRunRpcGet]]: + base_select_query = sa.select( + comp_runs.c.collection_run_id, + sa.func.array_agg(comp_runs.c.project_uuid).label("project_ids"), + sa.func.array_agg(comp_runs.c.result).label("states"), + # For simplicity, we use any metadata from the collection (first one in aggregation order): + sa.literal_column("(jsonb_agg(comp_runs.metadata))[1]").label("info"), + sa.func.min(comp_runs.c.created).label("submitted_at"), + sa.func.min(comp_runs.c.started).label("started_at"), + sa.func.min(comp_runs.c.run_id).label("min_run_id"), + sa.case( + ( + sa.func.bool_or(comp_runs.c.ended.is_(None)), + None, + ), + else_=sa.func.max(comp_runs.c.ended), + ).label("ended_at"), + ).where( + (comp_runs.c.user_id == user_id) + & (comp_runs.c.metadata["product_name"].astext == product_name) + ) + + if project_ids_or_none is not None: + base_select_query = base_select_query.where( + comp_runs.c.project_uuid.in_( + [f"{project_id}" for project_id in project_ids_or_none] + ) + ) + if collection_run_ids_or_none is not None: + base_select_query = base_select_query.where( + comp_runs.c.collection_run_id.in_( + [ + f"{collection_run_id}" + for collection_run_id in collection_run_ids_or_none + ] + ) + ) + + base_select_query_with_group_by = base_select_query.group_by( + comp_runs.c.collection_run_id + ) + + count_query = sa.select(sa.func.count()).select_from( + base_select_query_with_group_by.subquery() + ) + + # Default ordering by min_run_id descending (biggest first) + list_query = base_select_query_with_group_by.order_by( + desc(literal_column("min_run_id")) + ) + + list_query = list_query.offset(offset).limit(limit) + + async with pass_or_acquire_connection(self.db_engine) as conn: + total_count = await conn.scalar(count_query) + items = [] + async for row in await conn.stream(list_query): + db_states = [DB_TO_RUNNING_STATE[s] for s in row.states] + resolved_state = _resolve_grouped_state(db_states) + items.append( + ComputationCollectionRunRpcGet( + collection_run_id=row.collection_run_id, + project_ids=row.project_ids, + state=resolved_state, + info={} if row.info is None else row.info, + submitted_at=row.submitted_at, + started_at=row.started_at, + ended_at=row.ended_at, + ) + ) + return cast(int, total_count), items + async def create( self, *, @@ -359,6 +523,7 @@ async def create( metadata: RunMetadataDict, use_on_demand_clusters: bool, dag_adjacency_list: dict[str, list[str]], + collection_run_id: CollectionRunID, ) -> CompRunsAtDB: try: async with transaction_context(self.db_engine) as conn: @@ -366,7 +531,7 @@ async def create( iteration = await _get_next_iteration(conn, user_id, project_id) result = await conn.execute( - comp_runs.insert() # pylint: disable=no-value-for-parameter + comp_runs.insert() .values( user_id=user_id, project_uuid=f"{project_id}", @@ -375,6 +540,7 @@ async def create( metadata=jsonable_encoder(metadata), use_on_demand_clusters=use_on_demand_clusters, dag_adjacency_list=dag_adjacency_list, + collection_run_id=f"{collection_run_id}", ) .returning(literal_column("*")) ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs_snapshot_tasks.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs_snapshot_tasks.py index fc3c096e09bd..53bfb47f0d4f 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs_snapshot_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs_snapshot_tasks.py @@ -1,5 +1,11 @@ import logging +from typing import cast +import sqlalchemy as sa +from models_library.basic_types import IDStr +from models_library.computations import CollectionRunID +from models_library.products import ProductName +from models_library.rest_ordering import OrderBy, OrderDirection from simcore_postgres_database.utils_comp_run_snapshot_tasks import ( COMP_RUN_SNAPSHOT_TASKS_DB_COLS, ) @@ -7,14 +13,14 @@ transaction_context, ) -from ..tables import comp_run_snapshot_tasks +from ....models.comp_run_snapshot_tasks import CompRunSnapshotTaskDBGet +from ..tables import comp_run_snapshot_tasks, comp_runs from ._base import BaseRepository logger = logging.getLogger(__name__) class CompRunsSnapshotTasksRepository(BaseRepository): - async def batch_create( self, *, data: list[dict] ) -> None: # list[CompRunSnapshotTaskAtDBGet]: @@ -25,7 +31,6 @@ async def batch_create( return async with transaction_context(self.db_engine) as conn: - try: await conn.execute( comp_run_snapshot_tasks.insert().returning( @@ -36,3 +41,74 @@ async def batch_create( except Exception: logger.exception("Failed to batch create comp run snapshot tasks") raise + + async def list_computation_collection_run_tasks( + self, + *, + product_name: ProductName, + user_id: int, + collection_run_id: CollectionRunID, + # pagination + offset: int = 0, + limit: int = 20, + # ordering + order_by: OrderBy | None = None, + ) -> tuple[int, list[CompRunSnapshotTaskDBGet]]: + if order_by is None: + order_by = OrderBy(field=IDStr("snapshot_task_id")) # default ordering + + prefiltered_comp_runs = ( + sa.select( + comp_runs.c.run_id, + comp_runs.c.iteration, + ).where( + (comp_runs.c.user_id == user_id) + & (comp_runs.c.metadata["product_name"].astext == product_name) + & (comp_runs.c.collection_run_id == f"{collection_run_id}") + ) + ).subquery("prefiltered_comp_runs") + + base_select_query = sa.select( + comp_run_snapshot_tasks.c.snapshot_task_id, + comp_run_snapshot_tasks.c.run_id, + comp_run_snapshot_tasks.c.project_id.label("project_uuid"), + comp_run_snapshot_tasks.c.node_id, + comp_run_snapshot_tasks.c.state, + comp_run_snapshot_tasks.c.progress, + comp_run_snapshot_tasks.c.image, + comp_run_snapshot_tasks.c.start.label("started_at"), + comp_run_snapshot_tasks.c.end.label("ended_at"), + prefiltered_comp_runs.c.iteration, + ).select_from( + comp_run_snapshot_tasks.join( + prefiltered_comp_runs, + comp_run_snapshot_tasks.c.run_id == prefiltered_comp_runs.c.run_id, + ) + ) + + # Select total count from base_query + count_query = sa.select(sa.func.count()).select_from( + base_select_query.subquery() + ) + + # Ordering and pagination + if order_by.direction == OrderDirection.ASC: + list_query = base_select_query.order_by( + sa.asc(getattr(comp_run_snapshot_tasks.c, order_by.field)), + comp_run_snapshot_tasks.c.snapshot_task_id, + ) + else: + list_query = base_select_query.order_by( + sa.desc(getattr(comp_run_snapshot_tasks.c, order_by.field)), + comp_run_snapshot_tasks.c.snapshot_task_id, + ) + list_query = list_query.offset(offset).limit(limit) + + async with self.db_engine.connect() as conn: + total_count = await conn.scalar(count_query) + + items = [ + CompRunSnapshotTaskDBGet.model_validate(row, from_attributes=True) + async for row in await conn.stream(list_query) + ] + return cast(int, total_count), items diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_core.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_core.py index 31fe149ffdbb..1195930d8bd7 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_core.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_core.py @@ -23,7 +23,7 @@ from .....models.comp_tasks import CompTaskAtDB, ComputationTaskForRpcDBGet from .....modules.resource_usage_tracker_client import ResourceUsageTrackerClient from .....utils.computations import to_node_class -from .....utils.db import DB_TO_RUNNING_STATE, RUNNING_STATE_TO_DB +from .....utils.db import RUNNING_STATE_TO_DB from ....catalog import CatalogClient from ...tables import NodeClass, StateType, comp_run_snapshot_tasks, comp_tasks from .._base import BaseRepository @@ -130,12 +130,7 @@ async def list_computational_tasks_rpc_domain( total_count = await conn.scalar(count_query) items = [ - ComputationTaskForRpcDBGet.model_validate( - { - **row, - "state": DB_TO_RUNNING_STATE[row["state"]], # Convert the state - } - ) + ComputationTaskForRpcDBGet.model_validate(row, from_attributes=True) async for row in await conn.stream(list_query) ] return cast(int, total_count), items @@ -165,6 +160,8 @@ async def upsert_tasks_from_project( # NOTE: really do an upsert here because of issue https://github.com/ITISFoundation/osparc-simcore/issues/2125 async with self.db_engine.begin() as conn: list_of_comp_tasks_in_project: list[CompTaskAtDB] = ( + # WARNING: this is NOT a real repository method, it is a utility function + # that calls backend services to generate the tasks list!! Refactoring needed!! await _utils.generate_tasks_list_from_project( project=project, catalog_client=catalog_client, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py index 7b23eb3451a5..10103909a631 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks/_utils.py @@ -1,7 +1,7 @@ import asyncio import logging from decimal import Decimal -from typing import Any, Final, cast +from typing import Any, Final import arrow from dask_task_models_library.container_tasks.protocol import ContainerEnvsDict @@ -19,7 +19,6 @@ from models_library.resource_tracker import HardwareInfo from models_library.service_settings_labels import ( SimcoreServiceLabels, - SimcoreServiceSettingsLabel, ) from models_library.services import ( ServiceKey, @@ -118,7 +117,7 @@ def _compute_node_boot_mode(node_resources: ServiceResourcesDict) -> BootMode: def _compute_node_envs(node_labels: SimcoreServiceLabels) -> ContainerEnvsDict: node_envs = {} - for service_setting in cast(SimcoreServiceSettingsLabel, node_labels.settings): + for service_setting in node_labels.settings: if service_setting.name == "env": for complete_env in service_setting.value: parts = complete_env.split("=") @@ -401,6 +400,9 @@ async def generate_tasks_list_from_project( raise WalletNotEnoughCreditsError( wallet_name=wallet_info.wallet_name, wallet_credit_amount=wallet_info.wallet_credit_amount, + user_id=user_id, + product_name=product_name, + project_id=project.uuid, ) assert rabbitmq_rpc_client # nosec diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/users.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/users.py index 80118e2f1b6a..647f8bd6ccc6 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/users.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/users.py @@ -1,3 +1,5 @@ +from typing import cast + from models_library.users import UserID from pydantic import EmailStr, TypeAdapter from simcore_postgres_database.models.users import UserRole @@ -7,11 +9,12 @@ class UsersRepository(BaseRepository): + def _repo(self): + return UsersRepo(self.db_engine) + async def get_user_email(self, user_id: UserID) -> EmailStr: - async with self.db_engine.connect() as conn: - email = await UsersRepo.get_email(conn, user_id) - return TypeAdapter(EmailStr).validate_python(email) + email = await self._repo().get_email(user_id=user_id) + return TypeAdapter(EmailStr).validate_python(email) async def get_user_role(self, user_id: UserID) -> UserRole: - async with self.db_engine.connect() as conn: - return await UsersRepo().get_role(conn, user_id=user_id) + return cast(UserRole, await self._repo().get_role(user_id=user_id)) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/tables.py b/services/director-v2/src/simcore_service_director_v2/modules/db/tables.py index f47250b651e9..6e11cf8b40c2 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/tables.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/tables.py @@ -10,17 +10,21 @@ ) from simcore_postgres_database.models.projects import ProjectType, projects from simcore_postgres_database.models.projects_networks import projects_networks +from simcore_postgres_database.models.projects_nodes import projects_nodes -__all__ = [ +__all__: tuple[str, ...] = ( + "NodeClass", + "ProjectType", + "StateType", "comp_pipeline", + "comp_run_snapshot_tasks", "comp_runs", "comp_tasks", "groups_extra_properties", - "NodeClass", - "projects_networks", "projects", - "ProjectType", - "StateType", + "projects_networks", + "projects_nodes", "user_to_groups", - "comp_run_snapshot_tasks", -] +) + +# nopycln: file diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py index 5945e07b8e3d..3e43febefc4d 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py @@ -23,14 +23,16 @@ UnexpectedStatusError, ) from servicelib.fastapi.long_running_tasks.client import ( - Client, + HttpClient, + periodic_task_result, +) +from servicelib.logging_utils import log_context, log_decorator +from servicelib.long_running_tasks.models import ( ProgressCallback, ProgressMessage, ProgressPercent, TaskId, - periodic_task_result, ) -from servicelib.logging_utils import log_context, log_decorator from servicelib.utils import logged_gather from ....core.dynamic_services_settings.scheduler import ( @@ -289,8 +291,8 @@ async def submit_docker_compose_spec( dynamic_sidecar_endpoint, compose_spec=compose_spec ) - def _get_client(self, dynamic_sidecar_endpoint: AnyHttpUrl) -> Client: - return Client( + def _get_client(self, dynamic_sidecar_endpoint: AnyHttpUrl) -> HttpClient: + return HttpClient( app=self._app, async_client=self._async_client, base_url=f"{dynamic_sidecar_endpoint}", diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_api/_core.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_api/_core.py index 8a7e5d152d4d..60eeab3f9555 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_api/_core.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_api/_core.py @@ -11,12 +11,14 @@ from models_library.api_schemas_directorv2.services import ( DYNAMIC_SIDECAR_SERVICE_PREFIX, ) -from models_library.docker import DockerNodeID, to_simcore_runtime_docker_label_key +from models_library.docker import DockerNodeID from models_library.projects import ProjectID from models_library.projects_networks import DockerNetworkName from models_library.projects_nodes_io import NodeID from models_library.services_enums import ServiceState +from models_library.services_metadata_runtime import to_simcore_runtime_docker_label_key from servicelib.utils import logged_gather +from settings_library.docker_registry import RegistrySettings from starlette import status from tenacity import TryAgain, retry from tenacity.asyncio import AsyncRetrying @@ -98,6 +100,7 @@ def _to_snake_case(string: str) -> str: async def create_service_and_get_id( create_service_data: AioDockerServiceSpec | dict[str, Any], + registry_settings: RegistrySettings | None, ) -> ServiceId: # NOTE: ideally the argument should always be AioDockerServiceSpec # but for that we need get_dynamic_proxy_spec to return that type @@ -106,6 +109,13 @@ async def create_service_and_get_id( create_service_data, by_alias=True, exclude_unset=True ) kwargs = {_to_snake_case(k): v for k, v in kwargs.items()} + if registry_settings: + kwargs["auth"] = { + "username": registry_settings.REGISTRY_USER, + "password": registry_settings.REGISTRY_PW.get_secret_value(), + "serveraddress": registry_settings.resolved_registry_url, + } + kwargs["registry"] = registry_settings.resolved_registry_url logging.debug("Creating service with\n%s", json_dumps(kwargs, indent=1)) service_start_result = await client.services.create(**kwargs) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_compose_specs.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_compose_specs.py index 7ed0736d3cdb..5366190f13b0 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_compose_specs.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_compose_specs.py @@ -4,7 +4,7 @@ from common_library.json_serialization import json_dumps from fastapi.applications import FastAPI -from models_library.docker import DockerGenericTag, StandardSimcoreDockerLabels +from models_library.docker import DockerGenericTag from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID @@ -14,6 +14,7 @@ SimcoreServiceLabels, ) from models_library.services import ServiceKey, ServiceVersion +from models_library.services_metadata_runtime import SimcoreContainerLabels from models_library.services_resources import ( DEFAULT_SINGLE_SERVICE_NAME, ResourcesDict, @@ -129,7 +130,7 @@ def _update_paths_mappings( env_vars["DY_SIDECAR_PATH_INPUTS"] = f"{path_mappings.inputs_path}" env_vars["DY_SIDECAR_PATH_OUTPUTS"] = f"{path_mappings.outputs_path}" env_vars["DY_SIDECAR_STATE_PATHS"] = ( - f"{json_dumps( { f'{p}' for p in path_mappings.state_paths } )}" + f"{json_dumps({f'{p}' for p in path_mappings.state_paths})}" ) service_content["environment"] = _EnvironmentSection.export_as_list(env_vars) @@ -202,7 +203,7 @@ def _update_resource_limits_and_reservations( ] resource_limits = [ - f"{CPU_RESOURCE_LIMIT_KEY}={int(nano_cpu_limits*_NANO)}", + f"{CPU_RESOURCE_LIMIT_KEY}={int(nano_cpu_limits * _NANO)}", f"{MEM_RESOURCE_LIMIT_KEY}={mem_limits}", ] @@ -241,7 +242,7 @@ def _update_container_labels( spec_service_key, default_limits ) - label_keys = StandardSimcoreDockerLabels.model_validate( + label_keys = SimcoreContainerLabels.model_validate( { "user_id": user_id, "project_id": project_id, @@ -295,9 +296,7 @@ async def assemble_spec( # pylint: disable=too-many-arguments # noqa: PLR0913 app.state.settings.DIRECTOR_V2_DOCKER_REGISTRY ) - docker_compose_version = ( - app.state.settings.DYNAMIC_SERVICES.DYNAMIC_SCHEDULER.DYNAMIC_SIDECAR_DOCKER_COMPOSE_VERSION - ) + docker_compose_version = app.state.settings.DYNAMIC_SERVICES.DYNAMIC_SCHEDULER.DYNAMIC_SIDECAR_DOCKER_COMPOSE_VERSION egress_proxy_settings: EgressProxySettings = ( app.state.settings.DYNAMIC_SERVICES.DYNAMIC_SIDECAR_EGRESS_PROXY_SETTINGS @@ -347,19 +346,21 @@ async def assemble_spec( # pylint: disable=too-many-arguments # noqa: PLR0913 service_version=service_version, product_name=product_name, ) - simcore_service_labels = await resolve_and_substitute_session_variables_in_model( - app=app, - model=simcore_service_labels, - # NOTE: at this point all OsparcIdentifiers have to be replaced - # an error will be raised otherwise - safe=False, - user_id=user_id, - product_name=product_name, - product_api_base_url=product_api_base_url, - project_id=project_id, - node_id=node_id, - service_run_id=service_run_id, - wallet_id=wallet_id, + simcore_service_labels = ( + await resolve_and_substitute_session_variables_in_model( + app=app, + model=simcore_service_labels, + # NOTE: at this point all OsparcIdentifiers have to be replaced + # an error will be raised otherwise + safe=False, + user_id=user_id, + product_name=product_name, + product_api_base_url=product_api_base_url, + project_id=project_id, + node_id=node_id, + service_run_id=service_run_id, + wallet_id=wallet_id, + ) ) add_egress_configuration( diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/proxy.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/proxy.py index eb06fa02b79a..50d0ad43072b 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/proxy.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/proxy.py @@ -1,6 +1,6 @@ from typing import Any -from models_library.docker import StandardSimcoreDockerLabels +from models_library.services_metadata_runtime import SimcoreContainerLabels from models_library.services_resources import ( CPU_10_PERCENT, CPU_100_PERCENT, @@ -35,9 +35,9 @@ def get_dynamic_proxy_spec( The proxy is used to create network isolation from the rest of the platform. """ - assert ( - scheduler_data.product_name is not None - ), "ONLY for legacy. This function should not be called with product_name==None" # nosec + assert scheduler_data.product_name is not None, ( + "ONLY for legacy. This function should not be called with product_name==None" + ) # nosec proxy_settings: DynamicSidecarProxySettings = ( dynamic_services_settings.DYNAMIC_SIDECAR_PROXY_SETTINGS @@ -48,8 +48,8 @@ def get_dynamic_proxy_spec( dynamic_services_scheduler_settings: DynamicServicesSchedulerSettings = ( dynamic_services_settings.DYNAMIC_SCHEDULER ) - webserver_settings: webserver.WebServerSettings = ( - dynamic_services_settings.WEBSERVER_SETTINGS + wb_auth_settings: webserver.WebServerSettings = ( + dynamic_services_settings.WEBSERVER_AUTH_SETTINGS ) mounts = [ @@ -99,7 +99,7 @@ def get_dynamic_proxy_spec( f"traefik.http.middlewares.{scheduler_data.proxy_service_name}-security-headers.headers.accesscontrolmaxage": "100", f"traefik.http.middlewares.{scheduler_data.proxy_service_name}-security-headers.headers.addvaryheader": "true", # auth - f"traefik.http.middlewares.{scheduler_data.proxy_service_name}-auth.forwardauth.address": f"{webserver_settings.api_base_url}/auth:check", + f"traefik.http.middlewares.{scheduler_data.proxy_service_name}-auth.forwardauth.address": f"{wb_auth_settings.api_base_url}/auth:check", f"traefik.http.middlewares.{scheduler_data.proxy_service_name}-auth.forwardauth.trustForwardHeader": "true", f"traefik.http.middlewares.{scheduler_data.proxy_service_name}-auth.forwardauth.authResponseHeaders": f"Set-Cookie,{DEFAULT_SESSION_COOKIE_NAME}", # routing @@ -116,7 +116,7 @@ def get_dynamic_proxy_spec( ), "dynamic_type": "dynamic-sidecar", # tagged as dynamic service } - | StandardSimcoreDockerLabels( + | SimcoreContainerLabels( user_id=scheduler_data.user_id, project_id=scheduler_data.project_id, node_id=scheduler_data.node_uuid, @@ -134,7 +134,7 @@ def get_dynamic_proxy_spec( "Hosts": [], "Image": f"caddy:{proxy_settings.DYNAMIC_SIDECAR_CADDY_VERSION}", "Init": True, - "Labels": StandardSimcoreDockerLabels( + "Labels": SimcoreContainerLabels( user_id=scheduler_data.user_id, project_id=scheduler_data.project_id, node_id=scheduler_data.node_uuid, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py index 49b9e0c56704..5dae32951c97 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/settings.py @@ -1,19 +1,19 @@ import logging from collections import deque -from typing import Any, cast +from typing import Any from common_library.json_serialization import json_dumps, json_loads from models_library.basic_types import EnvVarKey, PortInt from models_library.boot_options import BootOption from models_library.docker import ( DockerPlacementConstraint, - to_simcore_runtime_docker_label_key, ) from models_library.service_settings_labels import ( SimcoreServiceLabels, SimcoreServiceSettingLabelEntry, SimcoreServiceSettingsLabel, ) +from models_library.services_metadata_runtime import to_simcore_runtime_docker_label_key from models_library.services_resources import ( CPU_100_PERCENT, DEFAULT_SINGLE_SERVICE_NAME, @@ -495,9 +495,7 @@ async def merge_settings_before_use( # merge the settings from the all the involved services for compose_spec_key, service_labels in labels_for_involved_services.items(): - service_settings: SimcoreServiceSettingsLabel = cast( - SimcoreServiceSettingsLabel, service_labels.settings - ) + service_settings: SimcoreServiceSettingsLabel = service_labels.settings settings.extend( # inject compose spec key, used to target container specific services _add_compose_destination_containers_to_settings_entries( diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py index d7d013208cb2..3bbe927f20b9 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py @@ -8,14 +8,16 @@ from models_library.basic_types import BootModeEnum, PortInt from models_library.callbacks_mapping import CallbacksMapping from models_library.docker import ( - DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY, DockerLabelKey, DockerPlacementConstraint, - StandardSimcoreDockerLabels, - to_simcore_runtime_docker_label_key, ) from models_library.resource_tracker import HardwareInfo from models_library.service_settings_labels import SimcoreServiceSettingsLabel +from models_library.services_metadata_runtime import ( + DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY, + SimcoreContainerLabels, + to_simcore_runtime_docker_label_key, +) from pydantic import ByteSize, TypeAdapter from servicelib.rabbitmq import RabbitMQRPCClient from servicelib.rabbitmq.rpc_interfaces.efs_guardian import efs_manager @@ -91,6 +93,7 @@ def _get_environment_variables( telemetry_enabled: bool, ) -> dict[str, str]: rabbit_settings = app_settings.DIRECTOR_V2_RABBITMQ + redis_settings = app_settings.REDIS r_clone_settings = ( app_settings.DYNAMIC_SERVICES.DYNAMIC_SIDECAR.DYNAMIC_SIDECAR_R_CLONE_SETTINGS ) @@ -149,7 +152,6 @@ def _get_environment_variables( "DYNAMIC_SIDECAR_LOG_LEVEL": app_settings.DYNAMIC_SERVICES.DYNAMIC_SIDECAR.DYNAMIC_SIDECAR_LOG_LEVEL, "DY_SIDECAR_LOG_FORMAT_LOCAL_DEV_ENABLED": f"{app_settings.DIRECTOR_V2_LOG_FORMAT_LOCAL_DEV_ENABLED}", "POSTGRES_DB": f"{app_settings.POSTGRES.POSTGRES_DB}", - "POSTGRES_ENDPOINT": f"{app_settings.POSTGRES.POSTGRES_HOST}:{app_settings.POSTGRES.POSTGRES_PORT}", "POSTGRES_HOST": f"{app_settings.POSTGRES.POSTGRES_HOST}", "POSTGRES_PASSWORD": f"{app_settings.POSTGRES.POSTGRES_PASSWORD.get_secret_value()}", "POSTGRES_PORT": f"{app_settings.POSTGRES.POSTGRES_PORT}", @@ -163,6 +165,9 @@ def _get_environment_variables( "RABBIT_PORT": f"{rabbit_settings.RABBIT_PORT}", "RABBIT_USER": f"{rabbit_settings.RABBIT_USER}", "RABBIT_SECURE": f"{rabbit_settings.RABBIT_SECURE}", + "REDIS_SETTINGS": json_dumps( + model_dump_with_secrets(redis_settings, show_secrets=True) + ), "DY_DEPLOYMENT_REGISTRY_SETTINGS": ( json_dumps( model_dump_with_secrets( @@ -456,18 +461,16 @@ async def get_dynamic_sidecar_spec( # pylint:disable=too-many-arguments# noqa: scheduler_data.product_name is not None ), "ONLY for legacy. This function should not be called with product_name==None" # nosec - standard_simcore_docker_labels: dict[DockerLabelKey, str] = ( - StandardSimcoreDockerLabels( - user_id=scheduler_data.user_id, - project_id=scheduler_data.project_id, - node_id=scheduler_data.node_uuid, - product_name=scheduler_data.product_name, - simcore_user_agent=scheduler_data.request_simcore_user_agent, - swarm_stack_name=dynamic_services_scheduler_settings.SWARM_STACK_NAME, - memory_limit=ByteSize(0), # this should get overwritten - cpu_limit=0, # this should get overwritten - ).to_simcore_runtime_docker_labels() - ) + standard_simcore_docker_labels: dict[DockerLabelKey, str] = SimcoreContainerLabels( + user_id=scheduler_data.user_id, + project_id=scheduler_data.project_id, + node_id=scheduler_data.node_uuid, + product_name=scheduler_data.product_name, + simcore_user_agent=scheduler_data.request_simcore_user_agent, + swarm_stack_name=dynamic_services_scheduler_settings.SWARM_STACK_NAME, + memory_limit=ByteSize(0), # this should get overwritten + cpu_limit=0, # this should get overwritten + ).to_simcore_runtime_docker_labels() service_labels: dict[str, str] = ( { diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_states.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_states.py index afd44dc0f598..7978d6d57a22 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_states.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_states.py @@ -1,6 +1,7 @@ """ States from Docker Tasks and docker Containers are mapped to ServiceState. """ + import logging from models_library.generated_models.docker_rest_api import ContainerState @@ -8,7 +9,7 @@ from ...models.dynamic_services_scheduler import DockerContainerInspect -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) # For all available task states SEE # https://docs.docker.com/engine/swarm/how-swarm-mode-works/swarm-task-states/ @@ -62,7 +63,7 @@ def extract_task_state(task_status: dict[str, str]) -> tuple[ServiceState, str]: - last_task_error_msg = task_status["Err"] if "Err" in task_status else "" + last_task_error_msg = task_status.get("Err", "") task_state = _TASK_STATE_TO_SERVICE_STATE[task_status["State"]] return (task_state, last_task_error_msg) @@ -89,7 +90,7 @@ def extract_containers_minimum_statuses( the lowest (considered worst) state will be forwarded to the frontend. `ServiceState` defines the order of the states. """ - logger.info("containers_inspect=%s", containers_inspect) + _logger.debug("containers_inspect=%s", containers_inspect) remapped_service_statuses = { index: _extract_container_status(value.container_state) for index, value in enumerate(containers_inspect) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/module_setup.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/module_setup.py index d1fd90644afd..5381566045c4 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/module_setup.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/module_setup.py @@ -1,12 +1,21 @@ from fastapi import FastAPI from servicelib.fastapi import long_running_tasks +from ..._meta import APP_NAME +from ...core.settings import AppSettings from . import api_client, scheduler def setup(app: FastAPI) -> None: + settings: AppSettings = app.state.settings + long_running_tasks.client.setup(app) - long_running_tasks.server.setup(app) + long_running_tasks.server.setup( + app, + redis_settings=settings.REDIS, + rabbit_settings=settings.DIRECTOR_V2_RABBITMQ, + lrt_namespace=APP_NAME, + ) async def on_startup() -> None: await api_client.setup(app) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_abc.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_abc.py index fc550e6a74db..c5e47cc3c6b6 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_abc.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_abc.py @@ -15,8 +15,7 @@ from models_library.services_types import ServicePortKey from models_library.users import UserID from models_library.wallets import WalletID -from servicelib.fastapi.long_running_tasks.client import ProgressCallback -from servicelib.fastapi.long_running_tasks.server import TaskProgress +from servicelib.long_running_tasks.models import ProgressCallback, TaskProgress class SchedulerInternalsInterface(ABC): diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_event_create_sidecars.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_event_create_sidecars.py index 6ea9efc4e370..594e0e754007 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_event_create_sidecars.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_event_create_sidecars.py @@ -30,6 +30,7 @@ DynamicSidecarSettings, PlacementSettings, ) +from .....core.settings import AppSettings from .....models.dynamic_services_scheduler import NetworkId, SchedulerData from .....utils.db import get_repository from .....utils.dict_utils import nested_update @@ -93,32 +94,35 @@ async def _create_proxy_service( swarm_network_id: NetworkId, swarm_network_name: str, ): + app_settings: AppSettings = app.state.settings proxy_settings: DynamicSidecarProxySettings = ( app.state.settings.DYNAMIC_SERVICES.DYNAMIC_SIDECAR_PROXY_SETTINGS ) scheduler_data.proxy_admin_api_port = ( proxy_settings.DYNAMIC_SIDECAR_CADDY_ADMIN_API_PORT ) - dynamic_services_settings: DynamicServicesSettings = ( app.state.settings.DYNAMIC_SERVICES ) - dynamic_sidecar_proxy_create_service_params: dict[ - str, Any - ] = get_dynamic_proxy_spec( - scheduler_data=scheduler_data, - dynamic_services_settings=dynamic_services_settings, - dynamic_sidecar_network_id=dynamic_sidecar_network_id, - swarm_network_id=swarm_network_id, - swarm_network_name=swarm_network_name, + dynamic_sidecar_proxy_create_service_params: dict[str, Any] = ( + get_dynamic_proxy_spec( + scheduler_data=scheduler_data, + dynamic_services_settings=dynamic_services_settings, + dynamic_sidecar_network_id=dynamic_sidecar_network_id, + swarm_network_id=swarm_network_id, + swarm_network_name=swarm_network_name, + ) ) _logger.debug( "dynamic-sidecar-proxy create_service_params %s", json_dumps(dynamic_sidecar_proxy_create_service_params), ) - await create_service_and_get_id(dynamic_sidecar_proxy_create_service_params) + await create_service_and_get_id( + dynamic_sidecar_proxy_create_service_params, + app_settings.DIRECTOR_V2_DOCKER_HUB_REGISTRY, + ) class CreateSidecars(DynamicSchedulerEvent): @@ -156,6 +160,7 @@ async def action(cls, app: FastAPI, scheduler_data: SchedulerData) -> None: rabbitmq_client: RabbitMQClient = app.state.rabbitmq_client await rabbitmq_client.publish(message.channel_name, message) + app_settings: AppSettings = app.state.settings dynamic_sidecar_settings: DynamicSidecarSettings = ( app.state.settings.DYNAMIC_SERVICES.DYNAMIC_SIDECAR ) @@ -244,18 +249,20 @@ async def action(cls, app: FastAPI, scheduler_data: SchedulerData) -> None: # WARNING: do NOT log, this structure has secrets in the open # If you want to log, please use an obfuscator - dynamic_sidecar_service_spec_base: AioDockerServiceSpec = await get_dynamic_sidecar_spec( - scheduler_data=scheduler_data, - dynamic_sidecar_settings=dynamic_sidecar_settings, - dynamic_services_scheduler_settings=dynamic_services_scheduler_settings, - swarm_network_id=swarm_network_id, - settings=settings, - app_settings=app.state.settings, - hardware_info=scheduler_data.hardware_info, - has_quota_support=dynamic_services_scheduler_settings.DYNAMIC_SIDECAR_ENABLE_VOLUME_LIMITS, - metrics_collection_allowed=metrics_collection_allowed, - user_extra_properties=user_extra_properties, - rpc_client=rpc_client, + dynamic_sidecar_service_spec_base: AioDockerServiceSpec = ( + await get_dynamic_sidecar_spec( + scheduler_data=scheduler_data, + dynamic_sidecar_settings=dynamic_sidecar_settings, + dynamic_services_scheduler_settings=dynamic_services_scheduler_settings, + swarm_network_id=swarm_network_id, + settings=settings, + app_settings=app.state.settings, + hardware_info=scheduler_data.hardware_info, + has_quota_support=dynamic_services_scheduler_settings.DYNAMIC_SIDECAR_ENABLE_VOLUME_LIMITS, + metrics_collection_allowed=metrics_collection_allowed, + user_extra_properties=user_extra_properties, + rpc_client=rpc_client, + ) ) user_specific_service_spec = ( @@ -278,7 +285,8 @@ async def action(cls, app: FastAPI, scheduler_data: SchedulerData) -> None: ) await rabbitmq_client.publish(rabbit_message.channel_name, rabbit_message) dynamic_sidecar_id = await create_service_and_get_id( - dynamic_sidecar_service_final_spec + dynamic_sidecar_service_final_spec, + app_settings.DIRECTOR_V2_DOCKER_HUB_REGISTRY, ) # constrain service to the same node scheduler_data.dynamic_sidecar.docker_node_id = ( diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_user_services.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_user_services.py index d7dd034134bf..a0f543a83a33 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_user_services.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_user_services.py @@ -8,7 +8,7 @@ from models_library.services import ServiceVersion from models_library.services_creation import CreateServiceMetricsAdditionalParams from pydantic import TypeAdapter -from servicelib.fastapi.long_running_tasks.client import TaskId +from servicelib.long_running_tasks.models import TaskId from tenacity import RetryError from tenacity.asyncio import AsyncRetrying from tenacity.before_sleep import before_sleep_log diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_utils.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_utils.py index af3c094a57ee..8f2900570aad 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_utils.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_events_utils.py @@ -20,9 +20,9 @@ from models_library.user_preferences import FrontendUserPreference from models_library.users import UserID from servicelib.fastapi.http_client_thin import BaseHttpClientError -from servicelib.fastapi.long_running_tasks.client import ProgressCallback -from servicelib.fastapi.long_running_tasks.server import TaskProgress from servicelib.logging_utils import log_context +from servicelib.long_running_tasks.errors import TaskExceptionError +from servicelib.long_running_tasks.models import ProgressCallback, TaskProgress from servicelib.rabbitmq import RabbitMQClient from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient from servicelib.rabbitmq._errors import RemoteMethodNotRegisteredError @@ -65,6 +65,7 @@ UserPreferencesFrontendRepository, ) from ....director_v0 import DirectorV0Client +from ....long_running_tasks import get_long_running_client_helper from ....osparc_variables._api_auth_rpc import delete_api_key_by_key from ...api_client import ( SidecarsClient, @@ -135,7 +136,7 @@ async def service_remove_containers( await sidecars_client.stop_service( scheduler_data.endpoint, progress_callback=progress_callback ) - except BaseHttpClientError as e: + except (BaseHttpClientError, TaskExceptionError) as e: _logger.info( ( "Could not remove service containers for %s. " @@ -152,7 +153,7 @@ async def service_free_reserved_disk_space( scheduler_data: SchedulerData = _get_scheduler_data(app, node_id) try: await sidecars_client.free_reserved_disk_space(scheduler_data.endpoint) - except BaseHttpClientError as e: + except (BaseHttpClientError, TaskExceptionError) as e: _logger.info( ( "Could not remove service containers for %s. " @@ -217,9 +218,10 @@ async def service_remove_sidecar_proxy_docker_networks_and_volumes( if set_were_state_and_outputs_saved is not None: scheduler_data.dynamic_sidecar.were_state_and_outputs_saved = True - task_progress.update( + await task_progress.update( message="removing dynamic sidecar stack", percent=ProgressPercent(0.1) ) + await remove_dynamic_sidecar_stack( node_uuid=scheduler_data.node_uuid, swarm_stack_name=swarm_stack_name, @@ -232,7 +234,7 @@ async def service_remove_sidecar_proxy_docker_networks_and_volumes( node_id=scheduler_data.node_uuid, ) - task_progress.update(message="removing network", percent=ProgressPercent(0.2)) + await task_progress.update(message="removing network", percent=ProgressPercent(0.2)) await remove_dynamic_sidecar_network(scheduler_data.dynamic_sidecar_network_name) if scheduler_data.dynamic_sidecar.were_state_and_outputs_saved: @@ -243,7 +245,7 @@ async def service_remove_sidecar_proxy_docker_networks_and_volumes( ) else: # Remove all dy-sidecar associated volumes from node - task_progress.update( + await task_progress.update( message="removing volumes", percent=ProgressPercent(0.3) ) with log_context(_logger, logging.DEBUG, f"removing volumes '{node_uuid}'"): @@ -265,7 +267,7 @@ async def service_remove_sidecar_proxy_docker_networks_and_volumes( scheduler_data.service_name, ) - task_progress.update( + await task_progress.update( message="removing project networks", percent=ProgressPercent(0.8) ) used_projects_networks = await get_projects_networks_containers( @@ -284,10 +286,19 @@ async def service_remove_sidecar_proxy_docker_networks_and_volumes( await app.state.dynamic_sidecar_scheduler.scheduler.remove_service_from_observation( scheduler_data.node_uuid ) - task_progress.update( + await task_progress.update( message="finished removing resources", percent=ProgressPercent(1) ) + await _cleanup_long_running_tasks(app, scheduler_data.node_uuid) + + +async def _cleanup_long_running_tasks(app: FastAPI, node_id: NodeID) -> None: + long_running_client_helper = get_long_running_client_helper(app) + + sidecar_namespace = f"SIMCORE-SERVICE-DYNAMIC-SIDECAR-{node_id}" + await long_running_client_helper.cleanup(sidecar_namespace) + async def attempt_pod_removal_and_data_saving( app: FastAPI, scheduler_data: SchedulerData @@ -370,7 +381,7 @@ async def attempt_pod_removal_and_data_saving( scheduler_data.dynamic_sidecar.were_state_and_outputs_saved = True _logger.info("dynamic-sidecar saved: state and output ports") - except BaseHttpClientError as e: + except (BaseHttpClientError, TaskExceptionError) as e: _logger.error( # noqa: TRY400 ( "Could not contact dynamic-sidecar to save service " @@ -389,7 +400,10 @@ async def attempt_pod_removal_and_data_saving( raise await service_remove_sidecar_proxy_docker_networks_and_volumes( - TaskProgress.create(), app, scheduler_data.node_uuid, settings.SWARM_STACK_NAME + TaskProgress.create(), + app, + scheduler_data.node_uuid, + settings.SWARM_STACK_NAME, ) # remove sidecar's api client diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_observer.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_observer.py index 949ba98f4fe4..3ebe33ced687 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_observer.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_observer.py @@ -1,13 +1,12 @@ # pylint: disable=relative-beyond-top-level -import asyncio import logging from copy import deepcopy from math import floor from common_library.error_codes import create_error_code +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from fastapi import FastAPI -from servicelib.logging_errors import create_troubleshotting_log_kwargs from .....core.dynamic_services_settings.scheduler import ( DynamicServicesSchedulerSettings, @@ -138,8 +137,6 @@ async def observing_single_service( try: await _apply_observation_cycle(scheduler, scheduler_data) logger.debug("completed observation cycle of %s", f"{service_name=}") - except asyncio.CancelledError: # pylint: disable=try-except-raise - raise # pragma: no cover except Exception as exc: # pylint: disable=broad-except service_name = scheduler_data.service_name @@ -153,7 +150,7 @@ async def observing_single_service( error_code = create_error_code(exc) logger.exception( - **create_troubleshotting_log_kwargs( + **create_troubleshooting_log_kwargs( user_error_msg, error=exc, error_context={ diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_scheduler.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_scheduler.py index 6860717238df..dfd99bced264 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_scheduler.py @@ -14,7 +14,6 @@ """ import asyncio -import contextlib import functools import logging import time @@ -23,6 +22,7 @@ from typing import Final import arrow +from common_library.async_tools import cancel_wait_task from fastapi import FastAPI from models_library.api_schemas_directorv2.dynamic_services import ( DynamicServiceCreate, @@ -41,10 +41,8 @@ from models_library.users import UserID from models_library.wallets import WalletID from pydantic import NonNegativeFloat -from servicelib.async_utils import cancel_wait_task from servicelib.background_task import create_periodic_task -from servicelib.fastapi.long_running_tasks.client import ProgressCallback -from servicelib.fastapi.long_running_tasks.server import TaskProgress +from servicelib.long_running_tasks.models import ProgressCallback, TaskProgress from servicelib.redis import RedisClientsManager, exclusive from settings_library.redis import RedisDatabase @@ -132,9 +130,7 @@ async def shutdown(self) -> None: if self._trigger_observation_queue_task is not None: await self._trigger_observation_queue.put(None) - self._trigger_observation_queue_task.cancel() - with contextlib.suppress(asyncio.CancelledError): - await self._trigger_observation_queue_task + await cancel_wait_task(self._trigger_observation_queue_task, max_delay=None) self._trigger_observation_queue_task = None self._trigger_observation_queue = Queue() @@ -272,9 +268,9 @@ async def add_service_from_scheduler_data( ) raise DynamicSidecarError(msg=msg) - self._inverse_search_mapping[ - scheduler_data.node_uuid - ] = scheduler_data.service_name + self._inverse_search_mapping[scheduler_data.node_uuid] = ( + scheduler_data.service_name + ) self._to_observe[scheduler_data.service_name] = scheduler_data self._enqueue_observation_from_service_name(scheduler_data.service_name) logger.debug("Added service '%s' to observe", scheduler_data.service_name) @@ -374,9 +370,9 @@ async def mark_service_for_removal( dynamic_scheduler: DynamicServicesSchedulerSettings = ( self.app.state.settings.DYNAMIC_SERVICES.DYNAMIC_SCHEDULER ) - self._service_observation_task[ - service_name - ] = self.__create_observation_task(dynamic_scheduler, service_name) + self._service_observation_task[service_name] = ( + self.__create_observation_task(dynamic_scheduler, service_name) + ) logger.debug("Service '%s' marked for removal from scheduler", service_name) @@ -575,9 +571,9 @@ async def _run_trigger_observation_queue_task(self) -> None: if self._service_observation_task.get(service_name) is None: logger.info("Create observation task for service %s", service_name) - self._service_observation_task[ - service_name - ] = self.__create_observation_task(dynamic_scheduler, service_name) + self._service_observation_task[service_name] = ( + self.__create_observation_task(dynamic_scheduler, service_name) + ) logger.info("Scheduler 'trigger observation queue task' was shut down") diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_scheduler_utils.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_scheduler_utils.py index 5a4a011a8747..2c524a4216d9 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_scheduler_utils.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_core/_scheduler_utils.py @@ -7,7 +7,7 @@ ) from models_library.projects_nodes_io import NodeID from models_library.services_enums import ServiceBootType, ServiceState -from servicelib.fastapi.long_running_tasks.client import ProgressCallback +from servicelib.long_running_tasks.models import ProgressCallback from .....core.dynamic_services_settings.scheduler import ( DynamicServicesSchedulerSettings, @@ -88,6 +88,7 @@ def create_model_from_scheduler_data( "service_port": scheduler_data.service_port, "service_state": service_state.value, "service_message": service_message, + "is_collaborative": scheduler_data.is_collaborative, } ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_task.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_task.py index e712958a32a8..555ea16a9587 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_task.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/_task.py @@ -17,8 +17,7 @@ from models_library.services_types import ServicePortKey from models_library.users import UserID from models_library.wallets import WalletID -from servicelib.fastapi.long_running_tasks.client import ProgressCallback -from servicelib.fastapi.long_running_tasks.server import TaskProgress +from servicelib.long_running_tasks.models import ProgressCallback, TaskProgress from ....core.dynamic_services_settings.scheduler import ( DynamicServicesSchedulerSettings, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/long_running_tasks.py b/services/director-v2/src/simcore_service_director_v2/modules/long_running_tasks.py new file mode 100644 index 000000000000..c2583262d696 --- /dev/null +++ b/services/director-v2/src/simcore_service_director_v2/modules/long_running_tasks.py @@ -0,0 +1,28 @@ +from fastapi import FastAPI +from servicelib.long_running_tasks.long_running_client_helper import ( + LongRunningClientHelper, +) + + +def setup(app: FastAPI): + async def _on_startup() -> None: + long_running_client_helper = app.state.long_running_client_helper = ( + LongRunningClientHelper(redis_settings=app.state.settings.REDIS) + ) + await long_running_client_helper.setup() + + async def _on_shutdown() -> None: + long_running_client_helper: LongRunningClientHelper = ( + app.state.long_running_client_helper + ) + await long_running_client_helper.shutdown() + + app.add_event_handler("startup", _on_startup) + app.add_event_handler("shutdown", _on_shutdown) + + +def get_long_running_client_helper(app: FastAPI) -> LongRunningClientHelper: + assert isinstance( + app.state.long_running_client_helper, LongRunningClientHelper + ) # nosec + return app.state.long_running_client_helper diff --git a/services/director-v2/src/simcore_service_director_v2/modules/osparc_variables/_api_auth.py b/services/director-v2/src/simcore_service_director_v2/modules/osparc_variables/_api_auth.py index 18fb5f4ff176..7f6e2ad19257 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/osparc_variables/_api_auth.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/osparc_variables/_api_auth.py @@ -4,6 +4,7 @@ from uuid import uuid5 from fastapi import FastAPI +from models_library.auth import API_KEY_AUTOGENERATED_DISPLAY_NAME_PREFIX from models_library.products import ProductName from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID @@ -27,7 +28,7 @@ def create_unique_api_name_for( ) -> str: # NOTE: The namespace chosen doesn't significantly impact the resulting UUID # as long as it's consistently used across the same context - return f"__auto_{uuid5(uuid.NAMESPACE_DNS, f'{product_name}/{user_id}/{project_id}/{node_id}')}" + return f"{API_KEY_AUTOGENERATED_DISPLAY_NAME_PREFIX}{uuid5(uuid.NAMESPACE_DNS, f'{product_name}/{user_id}/{project_id}/{node_id}')}" async def create_user_api_key( diff --git a/services/director-v2/src/simcore_service_director_v2/modules/osparc_variables/_api_auth_rpc.py b/services/director-v2/src/simcore_service_director_v2/modules/osparc_variables/_api_auth_rpc.py index d623305229d6..dafa77013fa0 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/osparc_variables/_api_auth_rpc.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/osparc_variables/_api_auth_rpc.py @@ -1,19 +1,16 @@ from datetime import timedelta from fastapi import FastAPI -from models_library.api_schemas_webserver import WEBSERVER_RPC_NAMESPACE from models_library.products import ProductName -from models_library.rabbitmq_basic_types import RPCMethodName -from models_library.rpc.webserver.auth.api_keys import ApiKeyGet +from models_library.rpc.webserver.auth.api_keys import ApiKeyCreate, ApiKeyGet from models_library.users import UserID -from pydantic import TypeAdapter +from servicelib.rabbitmq.rpc_interfaces.webserver.auth import ( + api_keys as webserver_auth_api_keys_rpc, +) +from ...core.settings import get_application_settings from ..rabbitmq import get_rabbitmq_rpc_client -# -# RPC interface -# - async def create_api_key( app: FastAPI, @@ -24,15 +21,15 @@ async def create_api_key( expiration: timedelta | None = None, ) -> ApiKeyGet: rpc_client = get_rabbitmq_rpc_client(app) - result = await rpc_client.request( - WEBSERVER_RPC_NAMESPACE, - TypeAdapter(RPCMethodName).validate_python("create_api_key"), - product_name=product_name, + rpc_namespace = get_application_settings(app).DIRECTOR_V2_WEBSERVER_RPC_NAMESPACE + + return await webserver_auth_api_keys_rpc.create_api_key( + rpc_client, + rpc_namespace, user_id=user_id, - display_name=display_name, - expiration=expiration, + product_name=product_name, + api_key=ApiKeyCreate(display_name=display_name, expiration=expiration), ) - return ApiKeyGet.model_validate(result) async def delete_api_key_by_key( @@ -43,10 +40,14 @@ async def delete_api_key_by_key( api_key: str, ) -> None: rpc_client = get_rabbitmq_rpc_client(app) - await rpc_client.request( - WEBSERVER_RPC_NAMESPACE, - TypeAdapter(RPCMethodName).validate_python("delete_api_key_by_key"), + rpc_namespace = get_application_settings(app).DIRECTOR_V2_WEBSERVER_RPC_NAMESPACE + + result = await webserver_auth_api_keys_rpc.delete_api_key_by_key( + rpc_client, + rpc_namespace, product_name=product_name, user_id=user_id, api_key=api_key, ) + + assert result is None diff --git a/services/director-v2/src/simcore_service_director_v2/modules/redis.py b/services/director-v2/src/simcore_service_director_v2/modules/redis.py index 5928cc78e97c..9e02e403ab66 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/redis.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/redis.py @@ -14,11 +14,7 @@ async def on_startup() -> None: app.state.redis_clients_manager = redis_clients_manager = RedisClientsManager( databases_configs={ - RedisManagerDBConfig(database=db) - for db in ( - RedisDatabase.LOCKS, - RedisDatabase.DISTRIBUTED_IDENTIFIERS, - ) + RedisManagerDBConfig(database=db) for db in (RedisDatabase.LOCKS,) }, settings=settings.REDIS, client_name=APP_NAME, diff --git a/services/director-v2/src/simcore_service_director_v2/utils/base_distributed_identifier.py b/services/director-v2/src/simcore_service_director_v2/utils/base_distributed_identifier.py deleted file mode 100644 index ea685777a0dc..000000000000 --- a/services/director-v2/src/simcore_service_director_v2/utils/base_distributed_identifier.py +++ /dev/null @@ -1,286 +0,0 @@ -import logging -from abc import ABC, abstractmethod -from asyncio import Task -from datetime import timedelta -from typing import Final, Generic, TypeVar - -from pydantic import NonNegativeInt -from servicelib.async_utils import cancel_wait_task -from servicelib.background_task import create_periodic_task -from servicelib.logging_utils import log_catch, log_context -from servicelib.redis import RedisClientSDK -from servicelib.utils import logged_gather -from settings_library.redis import RedisDatabase - -_logger = logging.getLogger(__name__) - -_REDIS_MAX_CONCURRENCY: Final[NonNegativeInt] = 10 -_DEFAULT_CLEANUP_INTERVAL: Final[timedelta] = timedelta(minutes=1) - -Identifier = TypeVar("Identifier") -ResourceObject = TypeVar("ResourceObject") -CleanupContext = TypeVar("CleanupContext") - - -class BaseDistributedIdentifierManager( - ABC, Generic[Identifier, ResourceObject, CleanupContext] -): - """Used to implement managers for resources that require book keeping - in a distributed system. - - NOTE: that ``Identifier`` and ``CleanupContext`` are serialized and deserialized - to and from Redis. - - Generics: - Identifier -- a user defined object: used to uniquely identify the resource - ResourceObject -- a user defined object: referring to an existing resource - CleanupContext -- a user defined object: contains all necessary - arguments used for removal and cleanup. - """ - - def __init__( - self, - redis_client_sdk: RedisClientSDK, - *, - cleanup_interval: timedelta = _DEFAULT_CLEANUP_INTERVAL, - ) -> None: - """ - Arguments: - redis_client_sdk -- client connecting to Redis - - Keyword Arguments: - cleanup_interval -- interval at which cleanup for unused - resources runs (default: {_DEFAULT_CLEANUP_INTERVAL}) - """ - - if not redis_client_sdk.redis_dsn.endswith( - f"{RedisDatabase.DISTRIBUTED_IDENTIFIERS}" - ): - msg = ( - f"Redis endpoint {redis_client_sdk.redis_dsn} contains the wrong database." - f"Expected {RedisDatabase.DISTRIBUTED_IDENTIFIERS}" - ) - raise TypeError(msg) - - self._redis_client_sdk = redis_client_sdk - self.cleanup_interval = cleanup_interval - - self._cleanup_task: Task | None = None - - async def setup(self) -> None: - self._cleanup_task = create_periodic_task( - self._cleanup_unused_identifiers, - interval=self.cleanup_interval, - task_name="cleanup_unused_identifiers_task", - ) - - async def shutdown(self) -> None: - if self._cleanup_task: - await cancel_wait_task(self._cleanup_task, max_delay=5) - - @classmethod - def class_path(cls) -> str: - return f"{cls.__module__}.{cls.__name__}" - - @classmethod - def _redis_key_prefix(cls) -> str: - return f"{cls.class_path()}:" - - @classmethod - def _to_redis_key(cls, identifier: Identifier) -> str: - return f"{cls._redis_key_prefix()}{cls._serialize_identifier(identifier)}" - - @classmethod - def _from_redis_key(cls, redis_key: str) -> Identifier: - sad = redis_key.removeprefix(cls._redis_key_prefix()) - return cls._deserialize_identifier(sad) - - async def _get_identifier_context( - self, identifier: Identifier - ) -> CleanupContext | None: - raw: str | None = await self._redis_client_sdk.redis.get( - self._to_redis_key(identifier) - ) - return self._deserialize_cleanup_context(raw) if raw else None - - async def _get_tracked(self) -> dict[Identifier, CleanupContext]: - identifiers: list[Identifier] = [ - self._from_redis_key(redis_key) - for redis_key in await self._redis_client_sdk.redis.keys( - f"{self._redis_key_prefix()}*" - ) - ] - - cleanup_contexts: list[CleanupContext | None] = await logged_gather( - *(self._get_identifier_context(identifier) for identifier in identifiers), - max_concurrency=_REDIS_MAX_CONCURRENCY, - ) - - return { - identifier: cleanup_context - for identifier, cleanup_context in zip( - identifiers, cleanup_contexts, strict=True - ) - # NOTE: cleanup_context will be None if the key was removed before - # recovering all the cleanup_contexts - if cleanup_context is not None - } - - async def _cleanup_unused_identifiers(self) -> None: - # removes no longer used identifiers - tracked_data: dict[Identifier, CleanupContext] = await self._get_tracked() - _logger.info("Will remove unused %s", list(tracked_data.keys())) - - for identifier, cleanup_context in tracked_data.items(): - if await self.is_used(identifier, cleanup_context): - continue - - await self.remove(identifier) - - async def create( - self, *, cleanup_context: CleanupContext, **extra_kwargs - ) -> tuple[Identifier, ResourceObject]: - """Used for creating the resources - - Arguments: - cleanup_context -- user defined CleanupContext object - **extra_kwargs -- can be overloaded by the user - - Returns: - tuple[identifier for the resource, resource object] - """ - identifier, result = await self._create(**extra_kwargs) - await self._redis_client_sdk.redis.set( - self._to_redis_key(identifier), - self._serialize_cleanup_context(cleanup_context), - ) - return identifier, result - - async def remove(self, identifier: Identifier, *, reraise: bool = False) -> None: - """Attempts to remove the resource, if an error occurs it is logged. - - Arguments: - identifier -- user chosen identifier for the resource - reraise -- when True raises any exception raised by ``destroy`` (default: {False}) - """ - - cleanup_context = await self._get_identifier_context(identifier) - if cleanup_context is None: - _logger.warning( - "Something went wrong, did not find any context for %s", identifier - ) - return - - with ( - log_context( - _logger, logging.DEBUG, f"{self.__class__}: removing {identifier}" - ), - log_catch(_logger, reraise=reraise), - ): - await self._destroy(identifier, cleanup_context) - - await self._redis_client_sdk.redis.delete(self._to_redis_key(identifier)) - - @classmethod - @abstractmethod - def _deserialize_identifier(cls, raw: str) -> Identifier: - """User provided deserialization for the identifier - - Arguments: - raw -- stream to be deserialized - - Returns: - an identifier object - """ - - @classmethod - @abstractmethod - def _serialize_identifier(cls, identifier: Identifier) -> str: - """User provided serialization for the identifier - - Arguments: - cleanup_context -- user defined identifier object - - Returns: - object encoded as string - """ - - @classmethod - @abstractmethod - def _deserialize_cleanup_context(cls, raw: str) -> CleanupContext: - """User provided deserialization for the context - - Arguments: - raw -- stream to be deserialized - - Returns: - an object of the type chosen by the user - """ - - @classmethod - @abstractmethod - def _serialize_cleanup_context(cls, cleanup_context: CleanupContext) -> str: - """User provided serialization for the context - - Arguments: - cleanup_context -- user defined cleanup context object - - Returns: - object encoded as string - """ - - @abstractmethod - async def is_used( - self, identifier: Identifier, cleanup_context: CleanupContext - ) -> bool: - """Check if the resource associated to the ``identifier`` is - still being used. - # NOTE: a resource can be created but not in use. - - Arguments: - identifier -- user chosen identifier for the resource - cleanup_context -- user defined CleanupContext object - - Returns: - True if ``identifier`` is still being used - """ - - @abstractmethod - async def _create(self, **extra_kwargs) -> tuple[Identifier, ResourceObject]: - """Used INTERNALLY for creating the resources. - # NOTE: should not be used directly, use the public - version ``create`` instead. - - Arguments: - **extra_kwargs -- can be overloaded by the user - - Returns: - tuple[identifier for the resource, resource object] - """ - - @abstractmethod - async def get( - self, identifier: Identifier, **extra_kwargs - ) -> ResourceObject | None: - """If exists, returns the resource. - - Arguments: - identifier -- user chosen identifier for the resource - **extra_kwargs -- can be overloaded by the user - - Returns: - None if the resource does not exit - """ - - @abstractmethod - async def _destroy( - self, identifier: Identifier, cleanup_context: CleanupContext - ) -> None: - """Used to destroy an existing resource - # NOTE: should not be used directly, use the public - version ``remove`` instead. - - Arguments: - identifier -- user chosen identifier for the resource - cleanup_context -- user defined CleanupContext object - """ diff --git a/services/director-v2/src/simcore_service_director_v2/utils/computations.py b/services/director-v2/src/simcore_service_director_v2/utils/computations.py index bd04303dc02a..54557ac59065 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/computations.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/computations.py @@ -3,7 +3,7 @@ from typing import Any import arrow -from models_library.projects_state import RunningState +from models_library.projects_state import RUNNING_STATE_COMPLETED_STATES, RunningState from models_library.services import ServiceKeyVersion from models_library.services_regex import SERVICE_KEY_RE from models_library.users import UserID @@ -15,7 +15,7 @@ _logger = logging.getLogger(__name__) -_COMPLETED_STATES = (RunningState.ABORTED, RunningState.FAILED, RunningState.SUCCESS) + _RUNNING_STATES = (RunningState.STARTED,) _TASK_TO_PIPELINE_CONVERSIONS = { # tasks are initially in NOT_STARTED state, then they transition to published @@ -50,21 +50,22 @@ RunningState.NOT_STARTED, ): RunningState.NOT_STARTED, # if there are only completed states with FAILED --> FAILED - (*_COMPLETED_STATES,): RunningState.FAILED, + (*RUNNING_STATE_COMPLETED_STATES,): RunningState.FAILED, # if there are only completed states with FAILED and not started ones --> NOT_STARTED ( - *_COMPLETED_STATES, + *RUNNING_STATE_COMPLETED_STATES, RunningState.NOT_STARTED, ): RunningState.NOT_STARTED, # the generic case where we have a combination of completed states, running states, # or published/pending tasks, not_started is a started pipeline ( - *_COMPLETED_STATES, + *RUNNING_STATE_COMPLETED_STATES, *_RUNNING_STATES, RunningState.PUBLISHED, RunningState.PENDING, RunningState.NOT_STARTED, RunningState.WAITING_FOR_CLUSTER, + RunningState.WAITING_FOR_RESOURCES, ): RunningState.STARTED, } diff --git a/services/director-v2/src/simcore_service_director_v2/utils/computations_tasks.py b/services/director-v2/src/simcore_service_director_v2/utils/computations_tasks.py new file mode 100644 index 000000000000..f37ec66936cf --- /dev/null +++ b/services/director-v2/src/simcore_service_director_v2/utils/computations_tasks.py @@ -0,0 +1,66 @@ +from typing import NamedTuple + +import networkx as nx +from models_library.projects import ProjectID +from simcore_service_director_v2.core.errors import PipelineTaskMissingError + +from ..models.comp_pipelines import CompPipelineAtDB +from ..models.comp_tasks import CompTaskAtDB +from ..modules.db.repositories.comp_pipelines import CompPipelinesRepository +from ..modules.db.repositories.comp_tasks import CompTasksRepository + + +class PipelineInfo(NamedTuple): + pipeline_dag: nx.DiGraph + all_tasks: list[CompTaskAtDB] + filtered_tasks: list[CompTaskAtDB] + + +async def _get_pipeline_info( + *, + project_id: ProjectID, + comp_pipelines_repo: CompPipelinesRepository, + comp_tasks_repo: CompTasksRepository, +) -> PipelineInfo: + + # NOTE: Here it is assumed the project exists in comp_tasks/comp_pipeline + # get the project pipeline + pipeline_at_db: CompPipelineAtDB = await comp_pipelines_repo.get_pipeline( + project_id + ) + pipeline_dag: nx.DiGraph = pipeline_at_db.get_graph() + + # get the project task states + all_tasks: list[CompTaskAtDB] = await comp_tasks_repo.list_tasks(project_id) + + # filter the tasks by the effective pipeline + filtered_tasks = [ + t for t in all_tasks if f"{t.node_id}" in set(pipeline_dag.nodes()) + ] + + return PipelineInfo(pipeline_dag, all_tasks, filtered_tasks) + + +async def validate_pipeline( + project_id: ProjectID, + comp_pipelines_repo: CompPipelinesRepository, + comp_tasks_repo: CompTasksRepository, +) -> PipelineInfo: + """ + Loads and validates data from pipelines and tasks tables and + reports it back as PipelineInfo + + raises PipelineTaskMissingError + """ + + pipeline_info = await _get_pipeline_info( + project_id=project_id, + comp_pipelines_repo=comp_pipelines_repo, + comp_tasks_repo=comp_tasks_repo, + ) + + # check that we have the expected tasks + if len(pipeline_info.filtered_tasks) != len(pipeline_info.pipeline_dag): + raise PipelineTaskMissingError(project_id=project_id) + + return pipeline_info diff --git a/services/director-v2/src/simcore_service_director_v2/utils/dags.py b/services/director-v2/src/simcore_service_director_v2/utils/dags.py index a1ae47622786..f0a55669c836 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/dags.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/dags.py @@ -29,6 +29,7 @@ def create_complete_dag(workbench: NodesDict) -> nx.DiGraph: dag_graph: nx.DiGraph = nx.DiGraph() for node_id, node in workbench.items(): assert node.state # nosec + dag_graph.add_node( node_id, name=node.label, @@ -42,7 +43,10 @@ def create_complete_dag(workbench: NodesDict) -> nx.DiGraph: ) if node.input_nodes: for input_node_id in node.input_nodes: - predecessor_node = workbench.get(NodeIDStr(input_node_id)) + predecessor_node = workbench.get(f"{input_node_id}") + assert ( # nosec + predecessor_node + ), f"Node {input_node_id} not found in workbench" if predecessor_node: dag_graph.add_edge(str(input_node_id), node_id) @@ -95,9 +99,7 @@ async def get_node_io_payload_cb(node_id: NodeID) -> dict[str, Any]: return result computed_hash = await compute_node_hash(node_id, get_node_io_payload_cb) - if computed_hash != node["run_hash"]: - return True - return False + return bool(computed_hash != node["run_hash"]) async def _compute_node_dependencies_state(graph_data, node_id) -> set[NodeID]: @@ -105,9 +107,10 @@ async def _compute_node_dependencies_state(graph_data, node_id) -> set[NodeID]: # check if the previous node is outdated or waits for dependencies... in which case this one has to wait non_computed_dependencies: set[NodeID] = set() for input_port in node.get("inputs", {}).values(): - if isinstance(input_port, PortLink): - if _node_needs_computation(graph_data, input_port.node_uuid): - non_computed_dependencies.add(input_port.node_uuid) + if isinstance(input_port, PortLink) and _node_needs_computation( + graph_data, input_port.node_uuid + ): + non_computed_dependencies.add(input_port.node_uuid) # all good. ready return non_computed_dependencies @@ -188,14 +191,14 @@ def compute_pipeline_started_timestamp( if not pipeline_dag.nodes: return None node_id_to_comp_task: dict[NodeIDStr, CompTaskAtDB] = { - NodeIDStr(f"{task.node_id}"): task for task in comp_tasks + f"{task.node_id}": task for task in comp_tasks } - TOMORROW = arrow.utcnow().shift(days=1).datetime + tomorrow = arrow.utcnow().shift(days=1).datetime pipeline_started_at: datetime.datetime | None = min( - node_id_to_comp_task[node_id].start or TOMORROW + node_id_to_comp_task[node_id].start or tomorrow for node_id in pipeline_dag.nodes ) - if pipeline_started_at == TOMORROW: + if pipeline_started_at == tomorrow: pipeline_started_at = None return pipeline_started_at @@ -206,13 +209,13 @@ def compute_pipeline_stopped_timestamp( if not pipeline_dag.nodes: return None node_id_to_comp_task: dict[NodeIDStr, CompTaskAtDB] = { - NodeIDStr(f"{task.node_id}"): task for task in comp_tasks + f"{task.node_id}": task for task in comp_tasks } - TOMORROW = arrow.utcnow().shift(days=1).datetime + tomorrow = arrow.utcnow().shift(days=1).datetime pipeline_stopped_at: datetime.datetime | None = max( - node_id_to_comp_task[node_id].end or TOMORROW for node_id in pipeline_dag.nodes + node_id_to_comp_task[node_id].end or tomorrow for node_id in pipeline_dag.nodes ) - if pipeline_stopped_at == TOMORROW: + if pipeline_stopped_at == tomorrow: pipeline_stopped_at = None return pipeline_stopped_at @@ -227,15 +230,15 @@ async def compute_pipeline_details( # NOTE: the latest progress is available in comp_tasks only node_id_to_comp_task: dict[NodeIDStr, CompTaskAtDB] = { - NodeIDStr(f"{task.node_id}"): task for task in comp_tasks + f"{task.node_id}": task for task in comp_tasks } pipeline_progress = None if len(pipeline_dag.nodes) > 0: - pipeline_progress = sum( (node_id_to_comp_task[node_id].progress or 0) / len(pipeline_dag.nodes) for node_id in pipeline_dag.nodes - if node_id_to_comp_task[node_id].progress is not None + if node_id in node_id_to_comp_task + and node_id_to_comp_task[node_id].progress is not None ) pipeline_progress = max(0.0, min(pipeline_progress, 1.0)) @@ -246,10 +249,15 @@ async def compute_pipeline_details( node_id: NodeState( modified=node_data.get(kNODE_MODIFIED_STATE, False), dependencies=node_data.get(kNODE_DEPENDENCIES_TO_COMPUTE, set()), - current_status=node_id_to_comp_task[node_id].state, + current_status=( + node_id_to_comp_task[node_id].state + if node_id in node_id_to_comp_task + else RunningState.UNKNOWN + ), progress=( node_id_to_comp_task[node_id].progress - if node_id_to_comp_task[node_id].progress is not None + if node_id in node_id_to_comp_task + and node_id_to_comp_task[node_id].progress is not None else None ), ) @@ -261,12 +269,13 @@ async def compute_pipeline_details( def find_computational_node_cycles(dag: nx.DiGraph) -> list[list[str]]: """returns a list of nodes part of a cycle and computational, which is currently forbidden.""" - computational_node_cycles = [] + list_potential_cycles = nx.algorithms.cycles.simple_cycles(dag) - for cycle in list_potential_cycles: + return [ + deepcopy(cycle) + for cycle in list_potential_cycles if any( dag.nodes[node_id]["node_class"] is NodeClass.COMPUTATIONAL for node_id in cycle - ): - computational_node_cycles.append(deepcopy(cycle)) - return computational_node_cycles + ) + ] diff --git a/services/director-v2/src/simcore_service_director_v2/utils/dask.py b/services/director-v2/src/simcore_service_director_v2/utils/dask.py index 7ffbcd6e9f80..7a0d1fff7b9e 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/dask.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/dask.py @@ -20,11 +20,12 @@ from fastapi import FastAPI from models_library.api_schemas_directorv2.computations import TaskLogFileGet from models_library.api_schemas_directorv2.services import NodeRequirements -from models_library.docker import DockerLabelKey, StandardSimcoreDockerLabels +from models_library.docker import DockerLabelKey from models_library.errors import ErrorDict -from models_library.projects import ProjectID, ProjectIDStr +from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, NodeIDStr from models_library.services import ServiceKey, ServiceVersion +from models_library.services_metadata_runtime import SimcoreContainerLabels from models_library.services_types import ServiceRunID from models_library.users import UserID from models_library.wallets import WalletID @@ -40,7 +41,8 @@ from simcore_sdk.node_ports_v2.links import ItemValue as _NPItemValue from sqlalchemy.ext.asyncio import AsyncEngine -from ..constants import UNDEFINED_API_BASE_URL, UNDEFINED_DOCKER_LABEL +from .._meta import APP_NAME +from ..constants import LOGS_FILE_NAME, UNDEFINED_API_BASE_URL, UNDEFINED_DOCKER_LABEL from ..core.errors import ( ComputationalBackendNotConnectedError, ComputationalSchedulerChangedError, @@ -88,10 +90,10 @@ async def create_node_ports( :raises PortsValidationError: if any of the ports assigned values are invalid """ try: - db_manager = node_ports_v2.DBManager(db_engine) + db_manager = node_ports_v2.DBManager(db_engine, application_name=APP_NAME) return await node_ports_v2.ports( user_id=user_id, - project_id=ProjectIDStr(f"{project_id}"), + project_id=f"{project_id}", node_uuid=TypeAdapter(NodeIDStr).validate_python(f"{node_id}"), db_manager=db_manager, ) @@ -261,9 +263,6 @@ async def compute_output_data_schema( return TaskOutputDataSchema.model_validate(output_data_schema) -_LOGS_FILE_NAME = "logs.zip" - - async def compute_service_log_file_upload_link( user_id: UserID, project_id: ProjectID, @@ -274,7 +273,7 @@ async def compute_service_log_file_upload_link( user_id=user_id, project_id=f"{project_id}", node_id=f"{node_id}", - file_name=_LOGS_FILE_NAME, + file_name=LOGS_FILE_NAME, link_type=file_link_type, file_size=ByteSize(0), # will create a single presigned link sha256_checksum=None, @@ -296,7 +295,7 @@ def compute_task_labels( ValidationError """ product_name = run_metadata.get("product_name", UNDEFINED_DOCKER_LABEL) - standard_simcore_labels = StandardSimcoreDockerLabels.model_validate( + standard_simcore_labels = SimcoreContainerLabels.model_validate( { "user_id": user_id, "project_id": project_id, @@ -375,7 +374,7 @@ async def _get_service_log_file_download_link( user_id=user_id, project_id=f"{project_id}", node_id=f"{node_id}", - file_name=_LOGS_FILE_NAME, + file_name=LOGS_FILE_NAME, link_type=file_link_type, ) return value_link @@ -444,10 +443,10 @@ async def clean_task_output_and_log_files_if_invalid( user_id=user_id, project_id=f"{project_id}", node_id=f"{node_id}", - file_name=_LOGS_FILE_NAME, + file_name=LOGS_FILE_NAME, ): await port_utils.delete_target_link( - user_id, f"{project_id}", f"{node_id}", _LOGS_FILE_NAME + user_id, f"{project_id}", f"{node_id}", LOGS_FILE_NAME ) @@ -482,7 +481,7 @@ def check_scheduler_is_still_the_same( ) -def check_communication_with_scheduler_is_open(client: distributed.Client): +def check_communication_with_scheduler_is_open(client: distributed.Client) -> None: if ( client.scheduler_comm and client.scheduler_comm.comm is not None @@ -491,12 +490,9 @@ def check_communication_with_scheduler_is_open(client: distributed.Client): raise ComputationalBackendNotConnectedError -def check_scheduler_status(client: distributed.Client): +def check_scheduler_status(client: distributed.Client) -> None: client_status = client.status if client_status not in "running": - _logger.error( - "The computational backend is not connected!", - ) raise ComputationalBackendNotConnectedError diff --git a/services/director-v2/src/simcore_service_director_v2/utils/db.py b/services/director-v2/src/simcore_service_director_v2/utils/db.py index 43e3a3710893..09544dcc96ea 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/db.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/db.py @@ -16,11 +16,10 @@ StateType.ABORTED: RunningState.ABORTED, StateType.WAITING_FOR_RESOURCES: RunningState.WAITING_FOR_RESOURCES, StateType.WAITING_FOR_CLUSTER: RunningState.WAITING_FOR_CLUSTER, + StateType.UNKNOWN: RunningState.UNKNOWN, } -RUNNING_STATE_TO_DB = {v: k for k, v in DB_TO_RUNNING_STATE.items()} | { - RunningState.UNKNOWN: StateType.FAILED -} +RUNNING_STATE_TO_DB = {v: k for k, v in DB_TO_RUNNING_STATE.items()} _logger = logging.getLogger(__name__) diff --git a/services/director-v2/src/simcore_service_director_v2/utils/rabbitmq.py b/services/director-v2/src/simcore_service_director_v2/utils/rabbitmq.py index 6f6e16931938..fb20fc0ee2e2 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/rabbitmq.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/rabbitmq.py @@ -5,6 +5,7 @@ from models_library.projects_nodes_io import NodeID from models_library.projects_state import RunningState from models_library.rabbitmq_messages import ( + ComputationalPipelineStatusMessage, InstrumentationRabbitMessage, LoggerRabbitMessage, ProgressRabbitMessageNode, @@ -197,3 +198,17 @@ async def publish_project_log( log_level=log_level, ) await rabbitmq_client.publish(message.channel_name, message) + + +async def publish_pipeline_scheduling_state( + rabbitmq_client: RabbitMQClient, + user_id: UserID, + project_id: ProjectID, + state: RunningState, +) -> None: + message = ComputationalPipelineStatusMessage.model_construct( + user_id=user_id, + project_id=project_id, + run_result=state, + ) + await rabbitmq_client.publish(message.channel_name, message) diff --git a/services/director-v2/tests/conftest.py b/services/director-v2/tests/conftest.py index 8335706ad7b3..831b34e286c6 100644 --- a/services/director-v2/tests/conftest.py +++ b/services/director-v2/tests/conftest.py @@ -32,19 +32,22 @@ from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.rabbitmq import RabbitMQRPCClient from settings_library.rabbit import RabbitSettings -from simcore_service_director_v2.core.application import init_app +from simcore_service_director_v2.core.application import create_app from simcore_service_director_v2.core.settings import AppSettings from starlette.testclient import ASGI3App, TestClient pytest_plugins = [ + "pytest_simcore.asyncio_event_loops", "pytest_simcore.dask_scheduler", "pytest_simcore.db_entries_mocks", "pytest_simcore.docker_compose", "pytest_simcore.docker_registry", "pytest_simcore.docker_swarm", "pytest_simcore.environment_configs", + "pytest_simcore.faker_products_data", "pytest_simcore.faker_projects_data", "pytest_simcore.faker_users_data", + "pytest_simcore.logging", "pytest_simcore.minio_service", "pytest_simcore.postgres_service", "pytest_simcore.pydantic_models", @@ -199,7 +202,7 @@ def mock_env( @pytest.fixture() async def initialized_app(mock_env: EnvVarsDict) -> AsyncIterable[FastAPI]: settings = AppSettings.create_from_envs() - app = init_app(settings) + app = create_app(settings) print("Application settings\n", settings.model_dump_json(indent=2)) async with LifespanManager(app): yield app @@ -210,7 +213,7 @@ async def client(mock_env: EnvVarsDict) -> AsyncIterator[TestClient]: # NOTE: this way we ensure the events are run in the application # since it starts the app on a test server settings = AppSettings.create_from_envs() - app = init_app(settings) + app = create_app(settings) # NOTE: we cannot use the initialized_app fixture here as the TestClient also creates it print("Application settings\n", settings.model_dump_json(indent=2)) with TestClient(app, raise_server_exceptions=True) as test_client: @@ -355,7 +358,6 @@ async def wrapper(*args, **kwargs): @pytest.fixture def mock_osparc_variables_api_auth_rpc(mocker: MockerFixture) -> None: - fake_data = ApiKeyGet.model_validate(ApiKeyGet.model_json_schema()["examples"][0]) async def _create( diff --git a/services/director-v2/tests/integration/01/test_computation_api.py b/services/director-v2/tests/integration/01/test_computation_api.py index f16977bc1cf5..f5dce1567de6 100644 --- a/services/director-v2/tests/integration/01/test_computation_api.py +++ b/services/director-v2/tests/integration/01/test_computation_api.py @@ -7,6 +7,7 @@ import asyncio import json +import uuid from collections.abc import Awaitable, Callable from copy import deepcopy from dataclasses import dataclass @@ -192,13 +193,14 @@ def test_invalid_computation( async def test_start_empty_computation_is_refused( async_client: httpx.AsyncClient, create_registered_user: Callable, - project: Callable[..., Awaitable[ProjectAtDB]], + with_product: dict[str, Any], + create_project: Callable[..., Awaitable[ProjectAtDB]], osparc_product_name: str, osparc_product_api_base_url: str, create_pipeline: Callable[..., Awaitable[ComputationGet]], ): user = create_registered_user() - empty_project = await project(user) + empty_project = await create_project(user) with pytest.raises( httpx.HTTPStatusError, match=f"{status.HTTP_422_UNPROCESSABLE_ENTITY}" ): @@ -397,7 +399,8 @@ async def test_run_partial_computation( wait_for_catalog_service: Callable[[UserID, str], Awaitable[None]], async_client: httpx.AsyncClient, create_registered_user: Callable, - project: Callable[..., Awaitable[ProjectAtDB]], + with_product: dict[str, Any], + create_project: Callable[..., Awaitable[ProjectAtDB]], update_project_workbench_with_comp_tasks: Callable, fake_workbench_without_outputs: dict[str, Any], params: PartialComputationParams, @@ -407,7 +410,7 @@ async def test_run_partial_computation( ): user = create_registered_user() await wait_for_catalog_service(user["id"], osparc_product_name) - sleepers_project: ProjectAtDB = await project( + sleepers_project: ProjectAtDB = await create_project( user, workbench=fake_workbench_without_outputs ) @@ -549,7 +552,8 @@ async def test_run_computation( wait_for_catalog_service: Callable[[UserID, str], Awaitable[None]], async_client: httpx.AsyncClient, create_registered_user: Callable, - project: Callable[..., Awaitable[ProjectAtDB]], + with_product: dict[str, Any], + create_project: Callable[..., Awaitable[ProjectAtDB]], fake_workbench_without_outputs: dict[str, Any], update_project_workbench_with_comp_tasks: Callable, fake_workbench_computational_pipeline_details: PipelineDetails, @@ -560,7 +564,9 @@ async def test_run_computation( ): user = create_registered_user() await wait_for_catalog_service(user["id"], osparc_product_name) - sleepers_project = await project(user, workbench=fake_workbench_without_outputs) + sleepers_project = await create_project( + user, workbench=fake_workbench_without_outputs + ) # send a valid project with sleepers task_out = await create_pipeline( async_client, @@ -667,7 +673,8 @@ async def test_run_computation( async def test_abort_computation( async_client: httpx.AsyncClient, create_registered_user: Callable, - project: Callable[..., Awaitable[ProjectAtDB]], + with_product: dict[str, Any], + create_project: Callable[..., Awaitable[ProjectAtDB]], fake_workbench_without_outputs: dict[str, Any], fake_workbench_computational_pipeline_details: PipelineDetails, osparc_product_name: str, @@ -681,7 +688,9 @@ async def test_abort_computation( node["inputs"].setdefault("in_2", 120) if not isinstance(node["inputs"]["in_2"], dict): node["inputs"]["in_2"] = 120 - sleepers_project = await project(user, workbench=fake_workbench_without_outputs) + sleepers_project = await create_project( + user, workbench=fake_workbench_without_outputs + ) # send a valid project with sleepers task_out = await create_pipeline( async_client, @@ -746,7 +755,8 @@ async def test_abort_computation( async def test_update_and_delete_computation( async_client: httpx.AsyncClient, create_registered_user: Callable, - project: Callable[..., Awaitable[ProjectAtDB]], + with_product: dict[str, Any], + create_project: Callable[..., Awaitable[ProjectAtDB]], fake_workbench_without_outputs: dict[str, Any], fake_workbench_computational_pipeline_details_not_started: PipelineDetails, fake_workbench_computational_pipeline_details: PipelineDetails, @@ -755,7 +765,9 @@ async def test_update_and_delete_computation( create_pipeline: Callable[..., Awaitable[ComputationGet]], ): user = create_registered_user() - sleepers_project = await project(user, workbench=fake_workbench_without_outputs) + sleepers_project = await create_project( + user, workbench=fake_workbench_without_outputs + ) # send a valid project with sleepers task_out = await create_pipeline( async_client, @@ -874,7 +886,8 @@ async def test_update_and_delete_computation( async def test_pipeline_with_no_computational_services_still_create_correct_comp_tasks_in_db( async_client: httpx.AsyncClient, create_registered_user: Callable, - project: Callable[..., Awaitable[ProjectAtDB]], + with_product: dict[str, Any], + create_project: Callable[..., Awaitable[ProjectAtDB]], jupyter_service: dict[str, Any], osparc_product_name: str, osparc_product_api_base_url: str, @@ -882,7 +895,7 @@ async def test_pipeline_with_no_computational_services_still_create_correct_comp ): user = create_registered_user() # create a workbench with just a dynamic service - project_with_dynamic_node = await project( + project_with_dynamic_node = await create_project( user, workbench={ "39e92f80-9286-5612-85d1-639fa47ec57d": { @@ -920,14 +933,15 @@ async def test_pipeline_with_no_computational_services_still_create_correct_comp async def test_pipeline_with_control_loop_made_of_dynamic_services_is_allowed( client: TestClient, create_registered_user: Callable, - project: Callable[..., Awaitable[ProjectAtDB]], + with_product: dict[str, Any], + create_project: Callable[..., Awaitable[ProjectAtDB]], jupyter_service: dict[str, Any], osparc_product_name: str, osparc_product_api_base_url: str, ): user = create_registered_user() # create a workbench with just 2 dynamic service in a cycle - project_with_dynamic_node = await project( + project_with_dynamic_node = await create_project( user, workbench={ "39e92f80-9286-5612-85d1-639fa47ec57d": { @@ -966,6 +980,7 @@ async def test_pipeline_with_control_loop_made_of_dynamic_services_is_allowed( "start_pipeline": True, "product_name": osparc_product_name, "product_api_base_url": osparc_product_api_base_url, + "collection_run_id": str(uuid.uuid4()), }, ) assert ( @@ -991,7 +1006,8 @@ async def test_pipeline_with_control_loop_made_of_dynamic_services_is_allowed( async def test_pipeline_with_cycle_containing_a_computational_service_is_forbidden( client: TestClient, create_registered_user: Callable, - project: Callable[..., Awaitable[ProjectAtDB]], + with_product: dict[str, Any], + create_project: Callable[..., Awaitable[ProjectAtDB]], sleeper_service: dict[str, Any], jupyter_service: dict[str, Any], osparc_product_name: str, @@ -999,7 +1015,7 @@ async def test_pipeline_with_cycle_containing_a_computational_service_is_forbidd ): user = create_registered_user() # create a workbench with just 2 dynamic service in a cycle - project_with_cycly_and_comp_service = await project( + project_with_cycly_and_comp_service = await create_project( user, workbench={ "39e92f80-9286-5612-85d1-639fa47ec57d": { @@ -1050,6 +1066,7 @@ async def test_pipeline_with_cycle_containing_a_computational_service_is_forbidd "start_pipeline": True, "product_name": osparc_product_name, "product_api_base_url": osparc_product_api_base_url, + "collection_run_id": str(uuid.uuid4()), }, ) assert ( @@ -1075,7 +1092,8 @@ async def test_pipeline_with_cycle_containing_a_computational_service_is_forbidd async def test_burst_create_computations( async_client: httpx.AsyncClient, create_registered_user: Callable, - project: Callable[..., Awaitable[ProjectAtDB]], + with_product: dict[str, Any], + create_project: Callable[..., Awaitable[ProjectAtDB]], fake_workbench_without_outputs: dict[str, Any], update_project_workbench_with_comp_tasks: Callable, fake_workbench_computational_pipeline_details: PipelineDetails, @@ -1085,8 +1103,12 @@ async def test_burst_create_computations( create_pipeline: Callable[..., Awaitable[ComputationGet]], ): user = create_registered_user() - sleepers_project = await project(user, workbench=fake_workbench_without_outputs) - sleepers_project2 = await project(user, workbench=fake_workbench_without_outputs) + sleepers_project = await create_project( + user, workbench=fake_workbench_without_outputs + ) + sleepers_project2 = await create_project( + user, workbench=fake_workbench_without_outputs + ) NUMBER_OF_CALLS = 4 diff --git a/services/director-v2/tests/integration/02/test_dynamic_services_routes.py b/services/director-v2/tests/integration/02/test_dynamic_services_routes.py index e4a5cc390476..dd1ebb2e2f73 100644 --- a/services/director-v2/tests/integration/02/test_dynamic_services_routes.py +++ b/services/director-v2/tests/integration/02/test_dynamic_services_routes.py @@ -36,7 +36,7 @@ ) from settings_library.rabbit import RabbitSettings from settings_library.redis import RedisSettings -from simcore_service_director_v2.core.application import init_app +from simcore_service_director_v2.core.application import create_app from simcore_service_director_v2.core.settings import AppSettings from tenacity.asyncio import AsyncRetrying from tenacity.retry import retry_if_exception_type @@ -97,9 +97,11 @@ def user_id(user_db: dict[str, Any]) -> UserID: @pytest.fixture async def project_id( - user_db: dict[str, Any], project: Callable[..., Awaitable[ProjectAtDB]] + user_db: dict[str, Any], + create_project: Callable[..., Awaitable[ProjectAtDB]], + with_product: dict[str, Any], ) -> str: - prj = await project(user=user_db) + prj = await create_project(user=user_db) return f"{prj.uuid}" @@ -195,7 +197,7 @@ async def director_v2_client( settings = AppSettings.create_from_envs() - app = init_app(settings) + app = create_app(settings) async with TestClient(app) as client: yield client @@ -274,11 +276,13 @@ async def _mocked_context_manger(*args, **kwargs) -> AsyncIterator[None]: async def key_version_expected( dy_static_file_server_dynamic_sidecar_service: dict, dy_static_file_server_service: dict, - docker_registry_image_injector: Callable, + docker_registry_image_injector: Callable[ + [str, str, str | None], Awaitable[dict[str, Any]] + ], ) -> list[tuple[ServiceKeyVersion, bool]]: results: list[tuple[ServiceKeyVersion, bool]] = [] - sleeper_service = docker_registry_image_injector( + sleeper_service = await docker_registry_image_injector( "itisfoundation/sleeper", "2.1.1", "user@e.mail" ) diff --git a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py index 30563157d6d4..aa70662048c9 100644 --- a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py +++ b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py @@ -28,12 +28,12 @@ ) from models_library.api_schemas_directorv2.computations import ComputationGet from models_library.clusters import ClusterAuthentication +from models_library.products import ProductName from models_library.projects import ( Node, NodesDict, ProjectAtDB, ProjectID, - ProjectIDStr, ) from models_library.projects_networks import ( PROJECT_NETWORK_PREFIX, @@ -51,11 +51,13 @@ from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.fastapi.long_running_tasks.client import ( - Client, + HttpClient, + periodic_task_result, +) +from servicelib.long_running_tasks.models import ( ProgressMessage, ProgressPercent, TaskId, - periodic_task_result, ) from servicelib.progress_bar import ProgressBarData from servicelib.sequences_utils import pairwise @@ -70,6 +72,7 @@ from simcore_sdk.node_data import data_manager from simcore_sdk.node_ports_common.file_io_utils import LogRedirectCB from simcore_sdk.node_ports_v2 import DBManager, Nodeports, Port +from simcore_service_director_v2._meta import APP_NAME from simcore_service_director_v2.constants import DYNAMIC_SIDECAR_SERVICE_PREFIX from simcore_service_director_v2.core.dynamic_services_settings.sidecar import ( RCloneSettings, @@ -165,6 +168,7 @@ async def minimal_configuration( ensure_swarm_and_networks: None, minio_s3_settings_envs: EnvVarsDict, current_user: dict[str, Any], + with_product: dict[str, Any], osparc_product_name: str, ) -> AsyncIterator[None]: await wait_for_catalog_service(current_user["id"], osparc_product_name) @@ -261,14 +265,45 @@ def current_user(create_registered_user: Callable) -> dict[str, Any]: @pytest.fixture async def current_study( current_user: dict[str, Any], - project: Callable[..., Awaitable[ProjectAtDB]], + create_project: Callable[..., Awaitable[ProjectAtDB]], fake_dy_workbench: dict[str, Any], + sleeper_service: dict, + dy_static_file_server_dynamic_sidecar_service: dict, + dy_static_file_server_dynamic_sidecar_compose_spec_service: dict, async_client: httpx.AsyncClient, osparc_product_name: str, osparc_product_api_base_url: str, create_pipeline: Callable[..., Awaitable[ComputationGet]], + grant_service_access_rights: Callable[..., dict[str, Any]], ) -> ProjectAtDB: - project_at_db = await project(current_user, workbench=fake_dy_workbench) + # 1. grant current_user execution access to services in this study + grant_service_access_rights( + group_id=current_user["primary_gid"], + service_key=sleeper_service["schema"]["key"], + service_version=sleeper_service["schema"]["version"], + product_name=osparc_product_name, + ) + grant_service_access_rights( + group_id=current_user["primary_gid"], + service_key=dy_static_file_server_dynamic_sidecar_service["schema"]["key"], + service_version=dy_static_file_server_dynamic_sidecar_service["schema"][ + "version" + ], + product_name=osparc_product_name, + ) + grant_service_access_rights( + group_id=current_user["primary_gid"], + service_key=dy_static_file_server_dynamic_sidecar_compose_spec_service[ + "schema" + ]["key"], + service_version=dy_static_file_server_dynamic_sidecar_compose_spec_service[ + "schema" + ]["version"], + product_name=osparc_product_name, + ) + + # create project for this user + project_at_db = await create_project(current_user, workbench=fake_dy_workbench) # create entries in comp_task table in order to pull output ports await create_pipeline( @@ -295,7 +330,7 @@ def workbench_dynamic_services( @pytest.fixture async def db_manager(sqlalchemy_async_engine: AsyncEngine) -> DBManager: - return DBManager(sqlalchemy_async_engine) + return DBManager(sqlalchemy_async_engine, application_name=APP_NAME) def _is_docker_r_clone_plugin_installed() -> bool: @@ -478,7 +513,7 @@ async def _get_mapped_nodeports_values( for node_uuid in workbench: PORTS: Nodeports = await node_ports_v2.ports( user_id=user_id, - project_id=ProjectIDStr(project_id), + project_id=project_id, node_uuid=TypeAdapter(NodeIDStr).validate_python(node_uuid), db_manager=db_manager, ) @@ -831,7 +866,7 @@ async def _debug_progress_callback( logger.debug("%s: %.2f %s", task_id, percent, message) async with periodic_task_result( - Client( + HttpClient( app=initialized_app, async_client=director_v2_client, base_url=TypeAdapter(AnyHttpUrl).validate_python( @@ -887,6 +922,13 @@ async def _assert_retrieve_completed( ), "TIP: Message missing suggests that the data was never uploaded: look in services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py" +def product_name(osparc_product_name: ProductName) -> ProductName: + """ + override the product name to be used in these tests + """ + return osparc_product_name + + @pytest.mark.flaky(max_runs=3) async def test_nodeports_integration( cleanup_services_and_networks: None, diff --git a/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py b/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py index 61af3dd5823d..2d0abc8e74f9 100644 --- a/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py +++ b/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py @@ -139,7 +139,8 @@ def user_dict(create_registered_user: Callable) -> dict[str, Any]: async def dy_static_file_server_project( minimal_configuration: None, user_dict: dict[str, Any], - project: Callable[..., Awaitable[ProjectAtDB]], + with_product: dict[str, Any], + create_project: Callable[..., Awaitable[ProjectAtDB]], dy_static_file_server_service: dict, dy_static_file_server_dynamic_sidecar_service: dict, dy_static_file_server_dynamic_sidecar_compose_spec_service: dict, @@ -154,7 +155,7 @@ def _assemble_node_data(spec: dict, label: str) -> dict[str, str]: "label": label, } - return await project( + return await create_project( user=user_dict, workbench={ uuid_legacy: _assemble_node_data( diff --git a/services/director-v2/tests/integration/conftest.py b/services/director-v2/tests/integration/conftest.py index cc4c32899aef..13a56f99e987 100644 --- a/services/director-v2/tests/integration/conftest.py +++ b/services/director-v2/tests/integration/conftest.py @@ -3,6 +3,7 @@ # pylint: disable=unused-import import asyncio +import uuid from collections.abc import AsyncIterator, Awaitable, Callable from unittest.mock import AsyncMock @@ -99,6 +100,9 @@ async def _creator( "start_pipeline": start_pipeline, "product_name": product_name, "product_api_base_url": product_api_base_url, + "collection_run_id": ( + str(uuid.uuid4()) if start_pipeline is True else None + ), **kwargs, }, ) diff --git a/services/director-v2/tests/unit/test_api_route_dynamic_scheduler.py b/services/director-v2/tests/unit/test_api_route_dynamic_scheduler.py index 41abda858bbb..b573f4292fa8 100644 --- a/services/director-v2/tests/unit/test_api_route_dynamic_scheduler.py +++ b/services/director-v2/tests/unit/test_api_route_dynamic_scheduler.py @@ -7,6 +7,8 @@ import pytest import respx +from common_library.json_serialization import json_dumps +from common_library.serialization import model_dump_with_secrets from faker import Faker from fastapi import status from httpx import Response @@ -15,6 +17,8 @@ from models_library.service_settings_labels import SimcoreServiceLabels from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict +from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings from simcore_service_director_v2.models.dynamic_services_scheduler import SchedulerData from simcore_service_director_v2.modules.dynamic_sidecar.errors import ( DynamicSidecarNotFoundError, @@ -24,11 +28,16 @@ ) from starlette.testclient import TestClient +pytest_simcore_core_services_selection = [ + "rabbit", +] + @pytest.fixture def mock_env( + use_in_memory_redis: RedisSettings, mock_exclusive: None, - disable_rabbitmq: None, + rabbit_service: RabbitSettings, disable_postgres: None, mock_env: EnvVarsDict, monkeypatch: pytest.MonkeyPatch, @@ -48,6 +57,10 @@ def mock_env( monkeypatch.setenv("S3_REGION", faker.pystr()) monkeypatch.setenv("S3_SECRET_KEY", faker.pystr()) monkeypatch.setenv("S3_BUCKET_NAME", faker.pystr()) + monkeypatch.setenv( + "DIRECTOR_V2_RABBITMQ", + json_dumps(model_dump_with_secrets(rabbit_service, show_secrets=True)), + ) @pytest.fixture @@ -201,9 +214,7 @@ async def test_409_response( ) assert response.status_code == status.HTTP_202_ACCEPTED task_id = response.json() - assert task_id.startswith( - f"simcore_service_director_v2.api.routes.dynamic_scheduler.{task_name}." - ) + assert f"director-v2.functools.{task_name}" in task_id response = client.request( method, diff --git a/services/director-v2/tests/unit/test_core_settings.py b/services/director-v2/tests/unit/test_core_settings.py index 2151d64cfa51..6da0d9772a5a 100644 --- a/services/director-v2/tests/unit/test_core_settings.py +++ b/services/director-v2/tests/unit/test_core_settings.py @@ -27,7 +27,7 @@ def _get_backend_type_options() -> set[str]: def test_supported_backends_did_not_change() -> None: _EXPECTED = {"AWS", "CEPH", "MINIO"} - assert _EXPECTED == _get_backend_type_options(), ( + assert _get_backend_type_options() == _EXPECTED, ( "Backend configuration change, please code support for " "it in volumes_resolver -> _get_s3_volume_driver_config. " "When done, adjust above list." diff --git a/services/director-v2/tests/unit/test_models_comp_runs.py b/services/director-v2/tests/unit/test_models_comp_runs.py index 5505982f2d1c..af71c92c9e61 100644 --- a/services/director-v2/tests/unit/test_models_comp_runs.py +++ b/services/director-v2/tests/unit/test_models_comp_runs.py @@ -8,35 +8,33 @@ import pytest from models_library.projects_state import RunningState from pydantic.main import BaseModel +from pytest_simcore.pydantic_models import ( + assert_validation_model, + iter_model_examples_in_class, +) from simcore_service_director_v2.models.comp_runs import CompRunsAtDB @pytest.mark.parametrize( - "model_cls", - [ - CompRunsAtDB, - ], + "model_cls, example_name, example_data", + iter_model_examples_in_class(CompRunsAtDB), ) def test_computation_run_model_examples( - model_cls: type[BaseModel], model_cls_examples: dict[str, dict[str, Any]] + model_cls: type[BaseModel], example_name: str, example_data: dict[str, Any] ): - for name, example in model_cls_examples.items(): - print(name, ":", pformat(example)) - model_instance = model_cls(**example) - assert model_instance, f"Failed with {name}" + assert_validation_model( + model_cls, example_name=example_name, example_data=example_data + ) @pytest.mark.parametrize( - "model_cls", - [ - CompRunsAtDB, - ], + "model_cls, example_name, example_data", + iter_model_examples_in_class(CompRunsAtDB), ) def test_computation_run_model_with_run_result_value_field( - model_cls: type[BaseModel], model_cls_examples: dict[str, dict[str, Any]] + model_cls: type[BaseModel], example_name: str, example_data: dict[str, Any] ): - for name, example in model_cls_examples.items(): - example["result"] = RunningState.WAITING_FOR_RESOURCES.value - print(name, ":", pformat(example)) - model_instance = model_cls(**example) - assert model_instance, f"Failed with {name}" + example_data["result"] = RunningState.WAITING_FOR_RESOURCES.value + print(example_name, ":", pformat(example_data)) + model_instance = model_cls(**example_data) + assert model_instance, f"Failed with {example_name}" diff --git a/services/director-v2/tests/unit/test_models_comp_tasks.py b/services/director-v2/tests/unit/test_models_comp_tasks.py index 6898acface49..f03e103e728b 100644 --- a/services/director-v2/tests/unit/test_models_comp_tasks.py +++ b/services/director-v2/tests/unit/test_models_comp_tasks.py @@ -2,73 +2,58 @@ # pylint:disable=unused-argument # pylint:disable=redefined-outer-name -from pprint import pformat from typing import Any import pytest from models_library.projects_state import RunningState from pydantic.main import BaseModel +from pytest_simcore.pydantic_models import ( + assert_validation_model, + iter_model_examples_in_class, +) from simcore_postgres_database.models.comp_pipeline import StateType from simcore_service_director_v2.models.comp_tasks import CompTaskAtDB @pytest.mark.parametrize( - "model_cls", - (CompTaskAtDB,), + "model_cls, example_name, example_data", + iter_model_examples_in_class(CompTaskAtDB), ) def test_computation_task_model_examples( - model_cls: type[BaseModel], model_cls_examples: dict[str, dict[str, Any]] -): - for name, example in model_cls_examples.items(): - print(name, ":", pformat(example)) - model_instance = model_cls(**example) - assert model_instance, f"Failed with {name}" - - -@pytest.mark.parametrize( - "model_cls", - [CompTaskAtDB], -) -def test_computation_task_model_export_to_db_model( - model_cls: type[BaseModel], model_cls_examples: dict[str, dict[str, Any]] + model_cls: type[BaseModel], example_name: str, example_data: dict[str, Any] ): - for name, example in model_cls_examples.items(): - print(name, ":", pformat(example)) - model_instance = model_cls(**example) - assert model_instance, f"Failed with {name}" + model_instance = assert_validation_model( + model_cls, example_name=example_name, example_data=example_data + ) - assert isinstance(model_instance, CompTaskAtDB) - db_model = model_instance.to_db_model() + assert isinstance(model_instance, CompTaskAtDB) + db_model = model_instance.to_db_model() - assert isinstance(db_model, dict) - assert StateType(db_model["state"]) + assert isinstance(db_model, dict) + assert StateType(db_model["state"]) @pytest.mark.parametrize( - "model_cls", - [CompTaskAtDB], + "model_cls, example_name, example_data", + iter_model_examples_in_class(CompTaskAtDB), ) def test_computation_task_model_with_running_state_value_field( - model_cls: type[BaseModel], model_cls_examples: dict[str, dict[str, Any]] + model_cls: type[BaseModel], example_name: str, example_data: dict[str, Any] ): - for name, example in model_cls_examples.items(): - example["state"] = RunningState.WAITING_FOR_RESOURCES.value - print(name, ":", pformat(example)) - model_instance = model_cls(**example) - assert model_instance, f"Failed with {name}" + example_data["state"] = RunningState.WAITING_FOR_RESOURCES.value + model_instance = model_cls(**example_data) + assert model_instance, f"Failed with {example_name}" @pytest.mark.parametrize( - "model_cls", - [CompTaskAtDB], + "model_cls, example_name, example_data", + iter_model_examples_in_class(CompTaskAtDB), ) def test_computation_task_model_with_wrong_default_value_field( - model_cls: type[BaseModel], model_cls_examples: dict[str, dict[str, Any]] + model_cls: type[BaseModel], example_name: str, example_data: dict[str, Any] ): - for name, example in model_cls_examples.items(): - for output_schema in example.get("schema", {}).get("outputs", {}).values(): - output_schema["defaultValue"] = None + for output_schema in example_data.get("schema", {}).get("outputs", {}).values(): + output_schema["defaultValue"] = None - print(name, ":", pformat(example)) - model_instance = model_cls(**example) - assert model_instance, f"Failed with {name}" + model_instance = model_cls(**example_data) + assert model_instance, f"Failed with {example_name}" diff --git a/services/director-v2/tests/unit/test_models_dynamic_services.py b/services/director-v2/tests/unit/test_models_dynamic_services.py index 99a22ece3bb7..6981b03c5bb5 100644 --- a/services/director-v2/tests/unit/test_models_dynamic_services.py +++ b/services/director-v2/tests/unit/test_models_dynamic_services.py @@ -153,6 +153,7 @@ def test_running_service_details_make_status( "service_host": scheduler_data.service_name, "user_id": scheduler_data.user_id, "service_port": scheduler_data.service_port, + "is_collaborative": scheduler_data.is_collaborative, } assert running_service_details_dict == expected_running_service_details diff --git a/services/director-v2/tests/unit/test_modules_dask_client.py b/services/director-v2/tests/unit/test_modules_dask_client.py index 909c3c238de7..d9b205b5cba9 100644 --- a/services/director-v2/tests/unit/test_modules_dask_client.py +++ b/services/director-v2/tests/unit/test_modules_dask_client.py @@ -41,11 +41,11 @@ from fastapi.applications import FastAPI from models_library.api_schemas_directorv2.services import NodeRequirements from models_library.clusters import ClusterTypeInModel, NoAuthentication -from models_library.docker import to_simcore_runtime_docker_label_key from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.projects_state import RunningState from models_library.resource_tracker import HardwareInfo +from models_library.services_metadata_runtime import to_simcore_runtime_docker_label_key from models_library.services_types import ServiceRunID from models_library.users import UserID from pydantic import AnyUrl, ByteSize, TypeAdapter @@ -1162,108 +1162,3 @@ def fake_remote_fct( (mock.ANY, "my name is progress") ) await _assert_wait_for_cb_call(mocked_user_completed_cb) - - -async def test_get_cluster_details( - dask_client: DaskClient, - user_id: UserID, - project_id: ProjectID, - image_params: ImageParams, - _mocked_node_ports: None, - mocked_user_completed_cb: mock.AsyncMock, - mocked_storage_service_api: respx.MockRouter, - comp_run_metadata: RunMetadataDict, - empty_hardware_info: HardwareInfo, - faker: Faker, - resource_tracking_run_id: ServiceRunID, -): - cluster_details = await dask_client.get_cluster_details() - assert cluster_details - - _DASK_EVENT_NAME = faker.pystr() - - # send a fct that uses resources - def fake_sidecar_fct( - task_parameters: ContainerTaskParameters, - docker_auth: DockerBasicAuth, - log_file_url: LogFileUploadURL, - s3_settings: S3Settings | None, - expected_annotations, - ) -> TaskOutputData: - # get the task data - worker = get_worker() - task = worker.state.tasks.get(worker.get_current_task()) - assert task is not None - assert task.annotations == expected_annotations - assert task_parameters.command == ["run"] - event = distributed.Event(_DASK_EVENT_NAME) - event.wait(timeout=25) - - return TaskOutputData.model_validate({"some_output_key": 123}) - - # NOTE: We pass another fct so it can run in our localy created dask cluster - published_computation_task = await dask_client.send_computation_tasks( - user_id=user_id, - project_id=project_id, - tasks=image_params.fake_tasks, - callback=mocked_user_completed_cb, - remote_fct=functools.partial( - fake_sidecar_fct, expected_annotations=image_params.expected_annotations - ), - metadata=comp_run_metadata, - hardware_info=empty_hardware_info, - resource_tracking_run_id=resource_tracking_run_id, - ) - assert published_computation_task - assert len(published_computation_task) == 1 - - assert published_computation_task[0].node_id in image_params.fake_tasks - - # check status goes to PENDING/STARTED - await _assert_wait_for_task_status( - published_computation_task[0].job_id, - dask_client, - expected_status=RunningState.STARTED, - ) - - # check we have one worker using the resources - # one of the workers should now get the job and use the resources - worker_with_the_task: AnyUrl | None = None - async for attempt in AsyncRetrying(reraise=True, stop=stop_after_delay(10)): - with attempt: - cluster_details = await dask_client.get_cluster_details() - assert cluster_details - assert ( - cluster_details.scheduler.workers - ), f"there are no workers in {cluster_details.scheduler=!r}" - for worker_url, worker_data in cluster_details.scheduler.workers.items(): - if all( - worker_data.used_resources.get(res_name) == res_value - for res_name, res_value in image_params.expected_used_resources.items() - ): - worker_with_the_task = worker_url - assert ( - worker_with_the_task is not None - ), f"there is no worker in {cluster_details.scheduler.workers.keys()=} consuming {image_params.expected_annotations=!r}" - - # using the event we let the remote fct continue - event = distributed.Event(_DASK_EVENT_NAME, client=dask_client.backend.client) - await event.set() # type: ignore - - # wait for the task to complete - await _assert_wait_for_task_status( - published_computation_task[0].job_id, - dask_client, - expected_status=RunningState.SUCCESS, - ) - - # check the resources are released - cluster_details = await dask_client.get_cluster_details() - assert cluster_details - assert cluster_details.scheduler.workers - assert worker_with_the_task - currently_used_resources = cluster_details.scheduler.workers[ - worker_with_the_task - ].used_resources - - assert all(res == 0.0 for res in currently_used_resources.values()) diff --git a/services/director-v2/tests/unit/test_modules_dask_clients_pool.py b/services/director-v2/tests/unit/test_modules_dask_clients_pool.py index d3c6274fa7c4..3bd19025251b 100644 --- a/services/director-v2/tests/unit/test_modules_dask_clients_pool.py +++ b/services/director-v2/tests/unit/test_modules_dask_clients_pool.py @@ -22,7 +22,7 @@ ) from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict -from simcore_service_director_v2.core.application import init_app +from simcore_service_director_v2.core.application import create_app from simcore_service_director_v2.core.errors import ( ConfigurationError, DaskClientAcquisisitonError, @@ -54,7 +54,7 @@ def test_dask_clients_pool_missing_raises_configuration_error( ): monkeypatch.setenv("COMPUTATIONAL_BACKEND_DASK_CLIENT_ENABLED", "0") settings = AppSettings.create_from_envs() - app = init_app(settings) + app = create_app(settings) with TestClient(app, raise_server_exceptions=True): # noqa: SIM117 with pytest.raises(ConfigurationError): @@ -70,7 +70,7 @@ def test_dask_clients_pool_properly_setup_and_deleted( ) mocked_dask_clients_pool.create.return_value = mocked_dask_clients_pool settings = AppSettings.create_from_envs() - app = init_app(settings) + app = create_app(settings) with TestClient(app, raise_server_exceptions=True): mocked_dask_clients_pool.create.assert_called_once() @@ -164,11 +164,11 @@ async def test_dask_clients_pool_acquisition_creates_client_on_demand( cluster_type=ClusterTypeInModel.ON_PREMISE, ) ) - async with clients_pool.acquire(cluster): + async with clients_pool.acquire(cluster, ref=f"test-ref-{cluster.name}"): # on start it is created mocked_dask_client.create.assert_has_calls(mocked_creation_calls) - async with clients_pool.acquire(cluster): + async with clients_pool.acquire(cluster, ref=f"test-ref-{cluster.name}-2"): # the connection already exists, so there is no new call to create mocked_dask_client.create.assert_has_calls(mocked_creation_calls) @@ -196,7 +196,9 @@ async def test_acquiring_wrong_cluster_raises_exception( non_existing_cluster = fake_clusters(1)[0] with pytest.raises(DaskClientAcquisisitonError): - async with clients_pool.acquire(non_existing_cluster): + async with clients_pool.acquire( + non_existing_cluster, ref="test-non-existing-ref" + ): ... @@ -239,7 +241,9 @@ async def test_acquire_default_cluster( dask_scheduler_settings = the_app.state.settings.DIRECTOR_V2_COMPUTATIONAL_BACKEND default_cluster = dask_scheduler_settings.default_cluster assert default_cluster - async with dask_clients_pool.acquire(default_cluster) as dask_client: + async with dask_clients_pool.acquire( + default_cluster, ref="test-default-cluster-ref" + ) as dask_client: def just_a_quick_fct(x, y): return x + y @@ -252,3 +256,63 @@ def just_a_quick_fct(x, y): assert future result = await future.result(timeout=10) assert result == 35 + + +async def test_dask_clients_pool_reference_counting( + minimal_dask_config: None, + mocker: MockerFixture, + client: TestClient, + fake_clusters: Callable[[int], list[BaseCluster]], +): + """Test that the reference counting mechanism works correctly.""" + assert client.app + the_app = cast(FastAPI, client.app) + mocked_dask_client = mocker.patch( + "simcore_service_director_v2.modules.dask_clients_pool.DaskClient", + autospec=True, + ) + mocked_dask_client.create.return_value = mocked_dask_client + clients_pool = DaskClientsPool.instance(the_app) + + # Create a cluster + cluster = fake_clusters(1)[0] + + # Acquire the client with first reference + ref1 = "test-ref-1" + async with clients_pool.acquire(cluster, ref=ref1): + # Client should be created + mocked_dask_client.create.assert_called_once() + # Reset the mock to check the next call + mocked_dask_client.create.reset_mock() + mocked_dask_client.delete.assert_not_called() + + # calling again with the same reference should not create a new client + async with clients_pool.acquire(cluster, ref=ref1): + # Client should NOT be re-created + mocked_dask_client.create.assert_not_called() + + mocked_dask_client.delete.assert_not_called() + + # Acquire the same client with second reference + ref2 = "test-ref-2" + async with clients_pool.acquire(cluster, ref=ref2): + # No new client should be created + mocked_dask_client.create.assert_not_called() + mocked_dask_client.delete.assert_not_called() + + # Release first reference, client should still exist + await clients_pool.release_client_ref(ref1) + mocked_dask_client.delete.assert_not_called() + + # Release second reference, which should delete the client + await clients_pool.release_client_ref(ref2) + mocked_dask_client.delete.assert_called_once() + + # calling again should not raise and not delete more + await clients_pool.release_client_ref(ref2) + mocked_dask_client.delete.assert_called_once() + + # Acquire again should create a new client + mocked_dask_client.create.reset_mock() + async with clients_pool.acquire(cluster, ref="test-ref-3"): + mocked_dask_client.create.assert_called_once() diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_compose_specs.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_compose_specs.py index 947261733950..c10b69c3ca1d 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_compose_specs.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_compose_specs.py @@ -8,13 +8,13 @@ import pytest import yaml -from models_library.docker import to_simcore_runtime_docker_label_key from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.service_settings_labels import ( ComposeSpecLabelDict, SimcoreServiceLabels, ) +from models_library.services_metadata_runtime import to_simcore_runtime_docker_label_key from models_library.services_resources import ( DEFAULT_SINGLE_SERVICE_NAME, ResourcesDict, @@ -131,7 +131,7 @@ async def test_inject_resource_limits_and_reservations( assert spec["deploy"]["resources"]["limits"]["memory"] == f"{memory.limit}" assert ( - f"{CPU_RESOURCE_LIMIT_KEY}={int(float(cpu.limit)*10**9)}" + f"{CPU_RESOURCE_LIMIT_KEY}={int(float(cpu.limit) * 10**9)}" in spec["environment"] ) assert f"{MEM_RESOURCE_LIMIT_KEY}={memory.limit}" in spec["environment"] @@ -143,7 +143,7 @@ async def test_inject_resource_limits_and_reservations( assert spec["cpus"] == max(cpu.limit, cpu.reservation) assert ( - f"{CPU_RESOURCE_LIMIT_KEY}={int(max(cpu.limit, cpu.reservation)*10**9)}" + f"{CPU_RESOURCE_LIMIT_KEY}={int(max(cpu.limit, cpu.reservation) * 10**9)}" in spec["environment"] ) assert f"{MEM_RESOURCE_LIMIT_KEY}={memory.limit}" in spec["environment"] diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_service_specs_sidecar.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_service_specs_sidecar.py index 27cdb8319144..d03cf3a929a7 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_service_specs_sidecar.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_service_specs_sidecar.py @@ -41,7 +41,6 @@ "DYNAMIC_SIDECAR_TRACING", "NODE_PORTS_400_REQUEST_TIMEOUT_ATTEMPTS", "POSTGRES_DB", - "POSTGRES_ENDPOINT", "POSTGRES_HOST", "POSTGRES_PASSWORD", "POSTGRES_PORT", @@ -55,6 +54,7 @@ "RABBIT_PORT", "RABBIT_SECURE", "RABBIT_USER", + "REDIS_SETTINGS", "S3_ACCESS_KEY", "S3_BUCKET_NAME", "S3_ENDPOINT", diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler.py index 77c1e033ef6c..e2c79a8287d1 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler.py @@ -12,6 +12,8 @@ import pytest import respx +from common_library.json_serialization import json_dumps +from common_library.serialization import model_dump_with_secrets from faker import Faker from fastapi import FastAPI from models_library.api_schemas_directorv2.dynamic_services_service import ( @@ -23,6 +25,8 @@ from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict from respx.router import MockRouter +from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings from simcore_service_director_v2.models.dynamic_services_scheduler import ( DockerContainerInspect, DynamicSidecarStatus, @@ -53,10 +57,12 @@ # and ensure faster tests _TEST_SCHEDULER_INTERVAL_SECONDS: Final[NonNegativeFloat] = 0.1 -log = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) -pytest_simcore_core_services_selection = ["postgres"] +pytest_simcore_core_services_selection = [ + "rabbit", +] pytest_simcore_ops_services_selection = ["adminer"] @@ -124,9 +130,10 @@ async def _assert_get_dynamic_services_mocked( @pytest.fixture def mock_env( + use_in_memory_redis: RedisSettings, mock_exclusive: None, disable_postgres: None, - disable_rabbitmq: None, + rabbit_service: RabbitSettings, mock_env: EnvVarsDict, monkeypatch: pytest.MonkeyPatch, simcore_services_network_name: str, @@ -144,6 +151,10 @@ def mock_env( monkeypatch.setenv("S3_REGION", faker.pystr()) monkeypatch.setenv("S3_SECRET_KEY", faker.pystr()) monkeypatch.setenv("S3_BUCKET_NAME", faker.pystr()) + monkeypatch.setenv( + "DIRECTOR_V2_RABBITMQ", + json_dumps(model_dump_with_secrets(rabbit_service, show_secrets=True)), + ) @pytest.fixture @@ -164,7 +175,7 @@ async def action( scheduler_data: SchedulerData, # noqa: ARG003 ) -> None: message = f"{cls.__name__} action triggered" - log.warning(message) + _logger.warning(message) # replace REGISTERED EVENTS mocker.patch( diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler_task.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler_task.py index fd328bd66aae..4ce2d40ed545 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler_task.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler_task.py @@ -12,6 +12,8 @@ import httpx import pytest import respx +from common_library.json_serialization import json_dumps +from common_library.serialization import model_dump_with_secrets from faker import Faker from fastapi import FastAPI from models_library.docker import DockerNodeID @@ -20,6 +22,8 @@ from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from respx.router import MockRouter +from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings from simcore_service_director_v2.models.dynamic_services_scheduler import SchedulerData from simcore_service_director_v2.modules.dynamic_sidecar.api_client._public import ( SidecarsClient, @@ -38,13 +42,18 @@ DynamicSidecarsScheduler, ) +pytest_simcore_core_services_selection = [ + "rabbit", +] + SCHEDULER_INTERVAL_SECONDS: Final[float] = 0.1 @pytest.fixture def mock_env( + use_in_memory_redis: RedisSettings, disable_postgres: None, - disable_rabbitmq: None, + rabbit_service: RabbitSettings, mock_env: EnvVarsDict, monkeypatch: pytest.MonkeyPatch, simcore_services_network_name: str, @@ -62,7 +71,11 @@ def mock_env( "POSTGRES_USER": "", "POSTGRES_PASSWORD": "", "POSTGRES_DB": "", + "DIRECTOR_V2_RABBITMQ": json_dumps( + model_dump_with_secrets(rabbit_service, show_secrets=True) + ), } + setenvs_from_dict(monkeypatch, disabled_services_envs) monkeypatch.setenv("DIRECTOR_V2_DYNAMIC_SCHEDULER_ENABLED", "true") diff --git a/services/director-v2/tests/unit/test_modules_notifier.py b/services/director-v2/tests/unit/test_modules_notifier.py index 357edc68af8b..f30091676c55 100644 --- a/services/director-v2/tests/unit/test_modules_notifier.py +++ b/services/director-v2/tests/unit/test_modules_notifier.py @@ -23,6 +23,7 @@ from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from servicelib.utils import logged_gather from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings from simcore_service_director_v2.core.settings import AppSettings from simcore_service_director_v2.modules.notifier import ( publish_shutdown_no_more_credits, @@ -34,6 +35,7 @@ pytest_simcore_core_services_selection = [ "rabbit", + "redis", ] @@ -127,6 +129,7 @@ async def _assert_call_count(mock: AsyncMock, *, call_count: int) -> None: async def test_notifier_publish_message( + redis_service: RedisSettings, socketio_server_events: dict[str, AsyncMock], initialized_app: FastAPI, user_id: UserID, diff --git a/services/director-v2/tests/unit/test_utils_computation.py b/services/director-v2/tests/unit/test_utils_computation.py index ef276d5bc695..bd10cb74a1f4 100644 --- a/services/director-v2/tests/unit/test_utils_computation.py +++ b/services/director-v2/tests/unit/test_utils_computation.py @@ -266,6 +266,24 @@ def fake_task(fake_task_file: Path) -> CompTaskAtDB: RunningState.WAITING_FOR_RESOURCES, id="published and waiting for resources = waiting for resources", ), + pytest.param( + [ + (RunningState.SUCCESS), + (RunningState.WAITING_FOR_RESOURCES), + (RunningState.PUBLISHED), + ], + RunningState.STARTED, + id="success, published and waiting for resources = waiting for resources", + ), + pytest.param( + [ + (RunningState.SUCCESS), + (RunningState.WAITING_FOR_CLUSTER), + (RunningState.PUBLISHED), + ], + RunningState.STARTED, + id="success, published and waiting for cluster = waiting for resources", + ), ], ) def test_get_pipeline_state_from_task_states( @@ -278,9 +296,9 @@ def test_get_pipeline_state_from_task_states( ] pipeline_state: RunningState = get_pipeline_state_from_task_states(tasks) - assert ( - pipeline_state == exp_pipeline_state - ), f"task states are: {task_states}, got {pipeline_state} instead of {exp_pipeline_state}" + assert pipeline_state == exp_pipeline_state, ( + f"task states are: {task_states}, got {pipeline_state} instead of {exp_pipeline_state}" + ) @pytest.mark.parametrize( @@ -297,7 +315,7 @@ def test_get_pipeline_state_from_task_states( ], ) def test_is_pipeline_running(state, exp: bool): - assert ( - is_pipeline_running(state) is exp - ), f"pipeline in {state}, i.e. running state should be {exp}" + assert is_pipeline_running(state) is exp, ( + f"pipeline in {state}, i.e. running state should be {exp}" + ) assert is_pipeline_stopped is not exp diff --git a/services/director-v2/tests/unit/test_utils_dags.py b/services/director-v2/tests/unit/test_utils_dags.py index 0fc17030a2d4..3229c98c2ff2 100644 --- a/services/director-v2/tests/unit/test_utils_dags.py +++ b/services/director-v2/tests/unit/test_utils_dags.py @@ -390,13 +390,6 @@ def pipeline_test_params( list_comp_tasks: list[CompTaskAtDB], expected_pipeline_details_output: PipelineDetails, ) -> PipelineDetailsTestParams: - # check the inputs make sense - assert len(set(dag_adjacency)) == len(node_keys) == len(list_comp_tasks) - assert dag_adjacency.keys() == node_keys.keys() - assert len( - {t.node_id for t in list_comp_tasks}.intersection(node_keys.keys()) - ) == len(set(dag_adjacency)) - # resolve the naming node_name_to_uuid_map = {} resolved_dag_adjacency: dict[str, list[str]] = {} @@ -596,3 +589,95 @@ async def test_compute_pipeline_details( received_details.model_dump() == pipeline_test_params.expected_pipeline_details.model_dump() ) + + +@pytest.mark.parametrize( + "dag_adjacency, node_keys, list_comp_tasks, expected_pipeline_details_output", + [ + pytest.param( + {"node_1": ["node_2", "node_3"], "node_2": ["node_3"], "node_3": []}, + { + "node_1": { + "key": "simcore/services/comp/fake", + "node_class": NodeClass.COMPUTATIONAL, + "state": RunningState.NOT_STARTED, + "outputs": None, + }, + "node_2": { + "key": "simcore/services/comp/fake", + "node_class": NodeClass.COMPUTATIONAL, + "state": RunningState.NOT_STARTED, + "outputs": None, + }, + "node_3": { + "key": "simcore/services/comp/fake", + "node_class": NodeClass.COMPUTATIONAL, + "state": RunningState.NOT_STARTED, + "outputs": None, + }, + }, + [ + # NOTE: we use construct here to be able to use non uuid names to simplify test setup + CompTaskAtDB.model_construct( + project_id=uuid4(), + node_id="node_1", + schema=NodeSchema(inputs={}, outputs={}), + inputs=None, + image=Image(name="simcore/services/comp/fake", tag="1.3.4"), + state=RunningState.NOT_STARTED, + internal_id=2, + node_class=NodeClass.COMPUTATIONAL, + created=datetime.datetime.now(tz=datetime.UTC), + modified=datetime.datetime.now(tz=datetime.UTC), + last_heartbeat=None, + ), + CompTaskAtDB.model_construct( + project_id=uuid4(), + node_id="node_2", + schema=NodeSchema(inputs={}, outputs={}), + inputs=None, + image=Image(name="simcore/services/comp/fake", tag="1.3.4"), + state=RunningState.NOT_STARTED, + internal_id=3, + node_class=NodeClass.COMPUTATIONAL, + created=datetime.datetime.now(tz=datetime.UTC), + modified=datetime.datetime.now(tz=datetime.UTC), + last_heartbeat=None, + ), + ], + PipelineDetails.model_construct( + adjacency_list={ + "node_1": ["node_2", "node_3"], + "node_2": ["node_3"], + "node_3": [], + }, + progress=0.0, + node_states={ + "node_1": NodeState(modified=True, progress=None), + "node_2": NodeState(modified=True, progress=None), + "node_3": NodeState( + modified=True, + progress=None, + current_status=RunningState.UNKNOWN, + ), + }, + ), + id="dag with missing tasks (node 3 is missing, so it is not skipped in the pipeline details)", + ) + ], +) +@pytest.mark.acceptance_test( + "For https://github.com/ITISFoundation/osparc-simcore/issues/8172" +) +async def test_compute_pipeline_details_with_missing_tasks( + pipeline_test_params: PipelineDetailsTestParams, +): + received_details = await compute_pipeline_details( + pipeline_test_params.complete_dag, + pipeline_test_params.pipeline_dag, + pipeline_test_params.comp_tasks, + ) + assert ( + received_details.model_dump() + == pipeline_test_params.expected_pipeline_details.model_dump() + ) diff --git a/services/director-v2/tests/unit/test_utils_distributed_identifier.py b/services/director-v2/tests/unit/test_utils_distributed_identifier.py deleted file mode 100644 index c7ad46b74a9f..000000000000 --- a/services/director-v2/tests/unit/test_utils_distributed_identifier.py +++ /dev/null @@ -1,359 +0,0 @@ -# pylint:disable=protected-access -# pylint:disable=redefined-outer-name - -import asyncio -import string -from collections.abc import AsyncIterable, AsyncIterator -from dataclasses import dataclass -from secrets import choice -from typing import Final -from uuid import UUID, uuid4 - -import pytest -from pydantic import BaseModel, NonNegativeInt -from pytest_mock import MockerFixture -from servicelib.redis import RedisClientSDK -from servicelib.utils import logged_gather -from settings_library.redis import RedisDatabase, RedisSettings -from simcore_service_director_v2.utils.base_distributed_identifier import ( - BaseDistributedIdentifierManager, -) - -pytest_simcore_core_services_selection = [ - "redis", -] - -pytest_simcore_ops_services_selection = [ - # "redis-commander", -] - -# if this goes too high, max open file limit is reached -_MAX_REDIS_CONCURRENCY: Final[NonNegativeInt] = 1000 - - -class UserDefinedID: - # define a custom type of ID for the API - # by choice it is hard to serialize/deserialize - - def __init__(self, uuid: UUID | None = None) -> None: - self._id = uuid if uuid else uuid4() - - def __eq__(self, other: "UserDefinedID") -> bool: - return self._id == other._id - - # only necessary for nice looking IDs in the logs - def __repr__(self) -> str: - return f"" - - # only necessary for RandomTextAPI - def __hash__(self): - return hash(str(self._id)) - - -class RandomTextEntry(BaseModel): - text: str - - @classmethod - def create(cls, length: int) -> "RandomTextEntry": - letters_and_digits = string.ascii_letters + string.digits - text = "".join(choice(letters_and_digits) for _ in range(length)) - return cls(text=text) - - -class RandomTextAPI: - # Emulates an external API - # used to create resources - - def __init__(self) -> None: - self._created: dict[UserDefinedID, RandomTextEntry] = {} - - def create(self, length: int) -> tuple[UserDefinedID, RandomTextEntry]: - identifier = UserDefinedID(uuid4()) - self._created[identifier] = RandomTextEntry.create(length) - return identifier, self._created[identifier] - - def delete(self, identifier: UserDefinedID) -> None: - del self._created[identifier] - - def get(self, identifier: UserDefinedID) -> RandomTextEntry | None: - return self._created.get(identifier, None) - - -@dataclass -class ComponentUsingRandomText: - # Emulates another component in the system - # using the created resources - - _in_use: bool = True - - def is_used(self, an_id: UserDefinedID) -> bool: - _ = an_id - return self._in_use - - def toggle_usage(self, in_use: bool) -> None: - self._in_use = in_use - - -class AnEmptyTextCleanupContext(BaseModel): - # nothing is required during cleanup, so the context - # is an empty object. - # A ``pydantic.BaseModel`` is used for convenience - # this could have inherited from ``object`` - ... - - -class RandomTextResourcesManager( - BaseDistributedIdentifierManager[ - UserDefinedID, RandomTextEntry, AnEmptyTextCleanupContext - ] -): - # Implements a resource manager for handling the lifecycle of - # resources created by a service. - # It also comes in with automatic cleanup in case the service owing - # the resources failed to removed them in the past. - - def __init__( - self, - redis_client_sdk: RedisClientSDK, - component_using_random_text: ComponentUsingRandomText, - ) -> None: - # THESE two systems would normally come stored in the `app` context - self.api = RandomTextAPI() - self.component_using_random_text = component_using_random_text - - super().__init__(redis_client_sdk) - - @classmethod - def _deserialize_identifier(cls, raw: str) -> UserDefinedID: - return UserDefinedID(UUID(raw)) - - @classmethod - def _serialize_identifier(cls, identifier: UserDefinedID) -> str: - return f"{identifier._id}" # noqa: SLF001 - - @classmethod - def _deserialize_cleanup_context( - cls, raw: str | bytes - ) -> AnEmptyTextCleanupContext: - return AnEmptyTextCleanupContext.model_validate_json(raw) - - @classmethod - def _serialize_cleanup_context( - cls, cleanup_context: AnEmptyTextCleanupContext - ) -> str: - return cleanup_context.model_dump_json() - - async def is_used( - self, identifier: UserDefinedID, cleanup_context: AnEmptyTextCleanupContext - ) -> bool: - _ = cleanup_context - return self.component_using_random_text.is_used(identifier) - - # NOTE: it is intended for the user to overwrite the **kwargs with custom names - # to provide a cleaner interface, tooling will complain slightly - async def _create( # pylint:disable=arguments-differ # type:ignore [override] - self, length: int - ) -> tuple[UserDefinedID, RandomTextEntry]: - return self.api.create(length) - - async def get(self, identifier: UserDefinedID, **_) -> RandomTextEntry | None: - return self.api.get(identifier) - - async def _destroy( - self, identifier: UserDefinedID, _: AnEmptyTextCleanupContext - ) -> None: - self.api.delete(identifier) - - -@pytest.fixture -async def redis_client_sdk( - redis_service: RedisSettings, -) -> AsyncIterator[RedisClientSDK]: - redis_resources_dns = redis_service.build_redis_dsn( - RedisDatabase.DISTRIBUTED_IDENTIFIERS - ) - - client = RedisClientSDK(redis_resources_dns, client_name="pytest") - assert client - assert client.redis_dsn == redis_resources_dns - # cleanup, previous run's leftovers - await client.redis.flushall() - - yield client - # cleanup, properly close the clients - await client.redis.flushall() - await client.shutdown() - - -@pytest.fixture -def component_using_random_text() -> ComponentUsingRandomText: - return ComponentUsingRandomText() - - -@pytest.fixture -async def manager_with_no_cleanup_task( - redis_client_sdk: RedisClientSDK, - component_using_random_text: ComponentUsingRandomText, -) -> RandomTextResourcesManager: - return RandomTextResourcesManager(redis_client_sdk, component_using_random_text) - - -@pytest.fixture -async def manager( - manager_with_no_cleanup_task: RandomTextResourcesManager, -) -> AsyncIterable[RandomTextResourcesManager]: - await manager_with_no_cleanup_task.setup() - yield manager_with_no_cleanup_task - await manager_with_no_cleanup_task.shutdown() - - -async def test_resource_is_missing(manager: RandomTextResourcesManager): - missing_identifier = UserDefinedID() - assert await manager.get(missing_identifier) is None - - -@pytest.mark.parametrize("delete_before_removal", [True, False]) -async def test_full_workflow( - manager: RandomTextResourcesManager, delete_before_removal: bool -): - # creation - identifier, _ = await manager.create( - cleanup_context=AnEmptyTextCleanupContext(), length=1 - ) - assert await manager.get(identifier) is not None - - # optional removal - if delete_before_removal: - await manager.remove(identifier) - - is_still_present = not delete_before_removal - assert (await manager.get(identifier) is not None) is is_still_present - - # safe remove the resource - await manager.remove(identifier) - - # resource no longer exists - assert await manager.get(identifier) is None - - -@pytest.mark.parametrize("reraise", [True, False]) -async def test_remove_raises_error( - mocker: MockerFixture, - manager: RandomTextResourcesManager, - caplog: pytest.LogCaptureFixture, - reraise: bool, -): - caplog.clear() - - error_message = "mock error during resource destroy" - mocker.patch.object(manager, "_destroy", side_effect=RuntimeError(error_message)) - - # after creation object is present - identifier, _ = await manager.create( - cleanup_context=AnEmptyTextCleanupContext(), length=1 - ) - assert await manager.get(identifier) is not None - - if reraise: - with pytest.raises(RuntimeError): - await manager.remove(identifier, reraise=reraise) - else: - await manager.remove(identifier, reraise=reraise) - # check logs in case of error - assert "Unhandled exception:" in caplog.text - assert error_message in caplog.text - - -async def _create_resources( - manager: RandomTextResourcesManager, count: int -) -> list[UserDefinedID]: - creation_results: list[tuple[UserDefinedID, RandomTextEntry]] = await logged_gather( - *[ - manager.create(cleanup_context=AnEmptyTextCleanupContext(), length=1) - for _ in range(count) - ], - max_concurrency=_MAX_REDIS_CONCURRENCY, - ) - return [x[0] for x in creation_results] - - -async def _assert_all_resources( - manager: RandomTextResourcesManager, - identifiers: list[UserDefinedID], - *, - exist: bool, -) -> None: - get_results: list[RandomTextEntry | None] = await logged_gather( - *[manager.get(identifier) for identifier in identifiers], - max_concurrency=_MAX_REDIS_CONCURRENCY, - ) - if exist: - assert all(x is not None for x in get_results) - else: - assert all(x is None for x in get_results) - - -@pytest.mark.parametrize("count", [1000]) -async def test_parallel_create_remove(manager: RandomTextResourcesManager, count: int): - # create resources - identifiers: list[UserDefinedID] = await _create_resources(manager, count) - await _assert_all_resources(manager, identifiers, exist=True) - - # safe remove the resources, they do not exist any longer - await asyncio.gather(*[manager.remove(identifier) for identifier in identifiers]) - await _assert_all_resources(manager, identifiers, exist=False) - - -async def test_background_removal_of_unused_resources( - manager_with_no_cleanup_task: RandomTextResourcesManager, - component_using_random_text: ComponentUsingRandomText, -): - # create resources - identifiers: list[UserDefinedID] = await _create_resources( - manager_with_no_cleanup_task, 10_000 - ) - await _assert_all_resources(manager_with_no_cleanup_task, identifiers, exist=True) - - # call cleanup, all resources still exist - await manager_with_no_cleanup_task._cleanup_unused_identifiers() # noqa: SLF001 - await _assert_all_resources(manager_with_no_cleanup_task, identifiers, exist=True) - - # make resources unused in external system - component_using_random_text.toggle_usage(in_use=False) - await manager_with_no_cleanup_task._cleanup_unused_identifiers() # noqa: SLF001 - await _assert_all_resources(manager_with_no_cleanup_task, identifiers, exist=False) - - -async def test_no_redis_key_overlap_when_inheriting( - redis_client_sdk: RedisClientSDK, - component_using_random_text: ComponentUsingRandomText, -): - class ChildRandomTextResourcesManager(RandomTextResourcesManager): - ... - - parent_manager = RandomTextResourcesManager( - redis_client_sdk, component_using_random_text - ) - child_manager = ChildRandomTextResourcesManager( - redis_client_sdk, component_using_random_text - ) - - # create an entry in the child and one in the parent - - parent_identifier, _ = await parent_manager.create( - cleanup_context=AnEmptyTextCleanupContext(), length=1 - ) - child_identifier, _ = await child_manager.create( - cleanup_context=AnEmptyTextCleanupContext(), length=1 - ) - assert parent_identifier != child_identifier - - keys = await redis_client_sdk.redis.keys("*") - assert len(keys) == 2 - - # check keys contain the correct prefixes - key_prefixes: set[str] = {k.split(":")[0] for k in keys} - assert key_prefixes == { - RandomTextResourcesManager.class_path(), - ChildRandomTextResourcesManager.class_path(), - } diff --git a/services/director-v2/tests/unit/with_dbs/comp_scheduler/conftest.py b/services/director-v2/tests/unit/with_dbs/comp_scheduler/conftest.py index 0804a848d356..b60be5bdcc02 100644 --- a/services/director-v2/tests/unit/with_dbs/comp_scheduler/conftest.py +++ b/services/director-v2/tests/unit/with_dbs/comp_scheduler/conftest.py @@ -8,6 +8,7 @@ # pylint: disable=too-many-statements +import datetime from unittest import mock import pytest @@ -69,3 +70,27 @@ def with_disabled_scheduler_publisher(mocker: MockerFixture) -> mock.Mock: "simcore_service_director_v2.modules.comp_scheduler._manager.request_pipeline_scheduling", autospec=True, ) + + +@pytest.fixture +def with_short_max_wait_for_cluster( + monkeypatch: pytest.MonkeyPatch, mocker: MockerFixture +) -> datetime.timedelta: + short_time = datetime.timedelta(seconds=2) + setenvs_from_dict( + monkeypatch, + {"COMPUTATIONAL_BACKEND_MAX_WAITING_FOR_CLUSTER_TIMEOUT": f"{short_time}"}, + ) + return short_time + + +@pytest.fixture +def with_short_max_wait_for_retrieving_results( + monkeypatch: pytest.MonkeyPatch, mocker: MockerFixture +) -> datetime.timedelta: + short_time = datetime.timedelta(seconds=2) + setenvs_from_dict( + monkeypatch, + {"COMPUTATIONAL_BACKEND_MAX_WAITING_FOR_RETRIEVING_RESULTS": f"{short_time}"}, + ) + return short_time diff --git a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_route_computations.py b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_route_computations.py index 368b6c273b57..44604713fa0e 100644 --- a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_route_computations.py +++ b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_route_computations.py @@ -33,6 +33,7 @@ RutPricingPlanGet, RutPricingUnitGet, ) +from models_library.computations import CollectionRunID from models_library.projects import ProjectAtDB from models_library.projects_nodes import NodeID, NodeState from models_library.projects_pipeline import PipelineDetails @@ -88,6 +89,7 @@ def minimal_configuration( faker: Faker, with_disabled_auto_scheduling: mock.Mock, with_disabled_scheduler_publisher: mock.Mock, + with_product: dict[str, Any], ): monkeypatch.setenv("DIRECTOR_V2_DYNAMIC_SIDECAR_ENABLED", "false") monkeypatch.setenv("COMPUTATIONAL_BACKEND_DASK_CLIENT_ENABLED", "1") @@ -363,11 +365,6 @@ def _mocked_get_pricing_unit(request, pricing_plan_id: int) -> httpx.Response: yield respx_mock -@pytest.fixture -def product_name(faker: Faker) -> str: - return faker.name() - - @pytest.fixture def product_api_base_url(faker: Faker) -> AnyHttpUrl: return TypeAdapter(AnyHttpUrl).validate_python(faker.url()) @@ -375,14 +372,15 @@ def product_api_base_url(faker: Faker) -> AnyHttpUrl: async def test_computation_create_validators( create_registered_user: Callable[..., dict[str, Any]], - project: Callable[..., Awaitable[ProjectAtDB]], + create_project: Callable[..., Awaitable[ProjectAtDB]], fake_workbench_without_outputs: dict[str, Any], product_name: str, product_api_base_url: AnyHttpUrl, - faker: Faker, + with_product: dict[str, Any], + fake_collection_run_id: CollectionRunID, ): user = create_registered_user() - proj = await project(user, workbench=fake_workbench_without_outputs) + proj = await create_project(user, workbench=fake_workbench_without_outputs) ComputationCreate( user_id=user["id"], project_id=proj.uuid, @@ -407,11 +405,12 @@ async def test_create_computation( product_api_base_url: AnyHttpUrl, fake_workbench_without_outputs: dict[str, Any], create_registered_user: Callable[..., dict[str, Any]], - project: Callable[..., Awaitable[ProjectAtDB]], + create_project: Callable[..., Awaitable[ProjectAtDB]], async_client: httpx.AsyncClient, + fake_collection_run_id: CollectionRunID, ): user = create_registered_user() - proj = await project(user, workbench=fake_workbench_without_outputs) + proj = await create_project(user, workbench=fake_workbench_without_outputs) create_computation_url = httpx.URL("/v2/computations") response = await async_client.post( create_computation_url, @@ -508,7 +507,7 @@ async def test_create_computation_with_wallet( product_api_base_url: AnyHttpUrl, fake_workbench_without_outputs: dict[str, Any], create_registered_user: Callable[..., dict[str, Any]], - project: Callable[..., Awaitable[ProjectAtDB]], + create_project: Callable[..., Awaitable[ProjectAtDB]], async_client: httpx.AsyncClient, wallet_info: WalletInfo, project_nodes_overrides: dict[str, Any], @@ -516,13 +515,14 @@ async def test_create_computation_with_wallet( sqlalchemy_async_engine: AsyncEngine, fake_ec2_cpus: PositiveInt, fake_ec2_ram: ByteSize, + fake_collection_run_id: CollectionRunID, ): # In billable product a wallet is passed, with a selected pricing plan # the pricing plan contains information about the hardware that should be used # this will then override the original service resources user = create_registered_user() - proj = await project( + proj = await create_project( user, project_nodes_overrides={"required_resources": project_nodes_overrides}, workbench=fake_workbench_without_outputs, @@ -620,12 +620,13 @@ async def test_create_computation_with_wallet_with_invalid_pricing_unit_name_rai product_api_base_url: AnyHttpUrl, fake_workbench_without_outputs: dict[str, Any], create_registered_user: Callable[..., dict[str, Any]], - project: Callable[..., Awaitable[ProjectAtDB]], + create_project: Callable[..., Awaitable[ProjectAtDB]], async_client: httpx.AsyncClient, wallet_info: WalletInfo, + fake_collection_run_id: CollectionRunID, ): user = create_registered_user() - proj = await project( + proj = await create_project( user, workbench=fake_workbench_without_outputs, ) @@ -663,12 +664,13 @@ async def test_create_computation_with_wallet_with_no_clusters_keeper_raises_503 product_api_base_url: AnyHttpUrl, fake_workbench_without_outputs: dict[str, Any], create_registered_user: Callable[..., dict[str, Any]], - project: Callable[..., Awaitable[ProjectAtDB]], + create_project: Callable[..., Awaitable[ProjectAtDB]], async_client: httpx.AsyncClient, wallet_info: WalletInfo, + fake_collection_run_id: CollectionRunID, ): user = create_registered_user() - proj = await project(user, workbench=fake_workbench_without_outputs) + proj = await create_project(user, workbench=fake_workbench_without_outputs) create_computation_url = httpx.URL("/v2/computations") response = await async_client.post( create_computation_url, @@ -692,15 +694,42 @@ async def test_start_computation_without_product_fails( product_name: str, fake_workbench_without_outputs: dict[str, Any], create_registered_user: Callable[..., dict[str, Any]], - project: Callable[..., Awaitable[ProjectAtDB]], + create_project: Callable[..., Awaitable[ProjectAtDB]], + async_client: httpx.AsyncClient, + fake_collection_run_id: CollectionRunID, +): + user = create_registered_user() + proj = await create_project(user, workbench=fake_workbench_without_outputs) + create_computation_url = httpx.URL("/v2/computations") + response = await async_client.post( + create_computation_url, + json={ + "user_id": f"{user['id']}", + "project_id": f"{proj.uuid}", + "start_pipeline": f"{True}", + "collection_run_id": f"{fake_collection_run_id}", + }, + ) + assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, response.text + + +async def test_start_computation_without_collection_run_id_fails( + minimal_configuration: None, + mocked_director_service_fcts: respx.MockRouter, + mocked_catalog_service_fcts: respx.MockRouter, + product_name: str, + fake_workbench_without_outputs: dict[str, Any], + create_registered_user: Callable[..., dict[str, Any]], + create_project: Callable[..., Awaitable[ProjectAtDB]], async_client: httpx.AsyncClient, ): user = create_registered_user() - proj = await project(user, workbench=fake_workbench_without_outputs) + proj = await create_project(user, workbench=fake_workbench_without_outputs) create_computation_url = httpx.URL("/v2/computations") response = await async_client.post( create_computation_url, json={ + "product_name": product_name, "user_id": f"{user['id']}", "project_id": f"{proj.uuid}", "start_pipeline": f"{True}", @@ -717,11 +746,12 @@ async def test_start_computation( product_api_base_url: AnyHttpUrl, fake_workbench_without_outputs: dict[str, Any], create_registered_user: Callable[..., dict[str, Any]], - project: Callable[..., Awaitable[ProjectAtDB]], + create_project: Callable[..., Awaitable[ProjectAtDB]], async_client: httpx.AsyncClient, + fake_collection_run_id: CollectionRunID, ): user = create_registered_user() - proj = await project(user, workbench=fake_workbench_without_outputs) + proj = await create_project(user, workbench=fake_workbench_without_outputs) create_computation_url = httpx.URL("/v2/computations") response = await async_client.post( create_computation_url, @@ -732,6 +762,7 @@ async def test_start_computation( start_pipeline=True, product_name=product_name, product_api_base_url=product_api_base_url, + collection_run_id=fake_collection_run_id, ) ), ) @@ -751,8 +782,9 @@ async def test_start_computation_with_project_node_resources_defined( product_api_base_url: AnyHttpUrl, fake_workbench_without_outputs: dict[str, Any], create_registered_user: Callable[..., dict[str, Any]], - project: Callable[..., Awaitable[ProjectAtDB]], + create_project: Callable[..., Awaitable[ProjectAtDB]], async_client: httpx.AsyncClient, + fake_collection_run_id: CollectionRunID, ): user = create_registered_user() assert "json_schema_extra" in ServiceResourcesDictHelpers.model_config @@ -762,7 +794,7 @@ async def test_start_computation_with_project_node_resources_defined( assert isinstance( ServiceResourcesDictHelpers.model_config["json_schema_extra"]["examples"], list ) - proj = await project( + proj = await create_project( user, project_nodes_overrides={ "required_resources": ServiceResourcesDictHelpers.model_config[ @@ -781,6 +813,7 @@ async def test_start_computation_with_project_node_resources_defined( start_pipeline=True, product_name=product_name, product_api_base_url=product_api_base_url, + collection_run_id=fake_collection_run_id, ) ), ) @@ -799,11 +832,12 @@ async def test_start_computation_with_deprecated_services_raises_406( fake_workbench_without_outputs: dict[str, Any], fake_workbench_adjacency: dict[str, Any], create_registered_user: Callable[..., dict[str, Any]], - project: Callable[..., Awaitable[ProjectAtDB]], + create_project: Callable[..., Awaitable[ProjectAtDB]], async_client: httpx.AsyncClient, + fake_collection_run_id: CollectionRunID, ): user = create_registered_user() - proj = await project(user, workbench=fake_workbench_without_outputs) + proj = await create_project(user, workbench=fake_workbench_without_outputs) create_computation_url = httpx.URL("/v2/computations") response = await async_client.post( create_computation_url, @@ -814,6 +848,7 @@ async def test_start_computation_with_deprecated_services_raises_406( start_pipeline=True, product_name=product_name, product_api_base_url=product_api_base_url, + collection_run_id=fake_collection_run_id, ) ), ) @@ -825,7 +860,7 @@ async def test_get_computation_from_empty_project( fake_workbench_without_outputs: dict[str, Any], fake_workbench_adjacency: dict[str, Any], create_registered_user: Callable[..., dict[str, Any]], - project: Callable[..., Awaitable[ProjectAtDB]], + create_project: Callable[..., Awaitable[ProjectAtDB]], create_pipeline: Callable[..., Awaitable[CompPipelineAtDB]], faker: Faker, async_client: httpx.AsyncClient, @@ -838,7 +873,7 @@ async def test_get_computation_from_empty_project( response = await async_client.get(get_computation_url) assert response.status_code == status.HTTP_404_NOT_FOUND, response.text # create the project - proj = await project(user, workbench=fake_workbench_without_outputs) + proj = await create_project(user, workbench=fake_workbench_without_outputs) get_computation_url = httpx.URL( f"/v2/computations/{proj.uuid}?user_id={user['id']}" ) @@ -854,7 +889,7 @@ async def test_get_computation_from_empty_project( assert returned_computation expected_computation = ComputationGet( id=proj.uuid, - state=RunningState.UNKNOWN, + state=RunningState.NOT_STARTED, pipeline_details=PipelineDetails( adjacency_list={}, node_states={}, progress=None ), @@ -876,13 +911,13 @@ async def test_get_computation_from_not_started_computation_task( fake_workbench_without_outputs: dict[str, Any], fake_workbench_adjacency: dict[str, Any], create_registered_user: Callable[..., dict[str, Any]], - project: Callable[..., Awaitable[ProjectAtDB]], + create_project: Callable[..., Awaitable[ProjectAtDB]], create_pipeline: Callable[..., Awaitable[CompPipelineAtDB]], - create_tasks: Callable[..., Awaitable[list[CompTaskAtDB]]], + create_tasks_from_project: Callable[..., Awaitable[list[CompTaskAtDB]]], async_client: httpx.AsyncClient, ): user = create_registered_user() - proj = await project(user, workbench=fake_workbench_without_outputs) + proj = await create_project(user, workbench=fake_workbench_without_outputs) get_computation_url = httpx.URL( f"/v2/computations/{proj.uuid}?user_id={user['id']}" ) @@ -895,7 +930,7 @@ async def test_get_computation_from_not_started_computation_task( assert response.status_code == status.HTTP_409_CONFLICT, response.text # now create the expected tasks and the state is good again - comp_tasks = await create_tasks(user=user, project=proj) + comp_tasks = await create_tasks_from_project(user=user, project=proj) response = await async_client.get(get_computation_url) assert response.status_code == status.HTTP_200_OK, response.text returned_computation = ComputationGet.model_validate(response.json()) @@ -911,7 +946,7 @@ async def test_get_computation_from_not_started_computation_task( node_states={ t.node_id: NodeState( modified=True, - currentStatus=RunningState.NOT_STARTED, + current_status=RunningState.NOT_STARTED, progress=None, dependencies={ NodeID(node) @@ -941,19 +976,19 @@ async def test_get_computation_from_published_computation_task( fake_workbench_without_outputs: dict[str, Any], fake_workbench_adjacency: dict[str, Any], create_registered_user: Callable[..., dict[str, Any]], - project: Callable[..., Awaitable[ProjectAtDB]], + create_project: Callable[..., Awaitable[ProjectAtDB]], create_pipeline: Callable[..., Awaitable[CompPipelineAtDB]], - create_tasks: Callable[..., Awaitable[list[CompTaskAtDB]]], + create_tasks_from_project: Callable[..., Awaitable[list[CompTaskAtDB]]], create_comp_run: Callable[..., Awaitable[CompRunsAtDB]], async_client: httpx.AsyncClient, ): user = create_registered_user() - proj = await project(user, workbench=fake_workbench_without_outputs) + proj = await create_project(user, workbench=fake_workbench_without_outputs) await create_pipeline( project_id=f"{proj.uuid}", dag_adjacency_list=fake_workbench_adjacency, ) - comp_tasks = await create_tasks( + comp_tasks = await create_tasks_from_project( user=user, project=proj, state=StateType.PUBLISHED, progress=0 ) comp_runs = await create_comp_run( @@ -983,7 +1018,7 @@ async def test_get_computation_from_published_computation_task( node_states={ t.node_id: NodeState( modified=True, - currentStatus=RunningState.PUBLISHED, + current_status=RunningState.PUBLISHED, dependencies={ NodeID(node) for node, next_nodes in fake_workbench_adjacency.items() diff --git a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_route_computations_tasks.py b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_route_computations_tasks.py index 2c539a7c2b6c..a9477379aec9 100644 --- a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_route_computations_tasks.py +++ b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_route_computations_tasks.py @@ -117,14 +117,15 @@ async def project_id( fake_workbench_without_outputs: dict[str, Any], fake_workbench_adjacency: dict[str, Any], user: dict[str, Any], - project: Callable[..., Awaitable[ProjectAtDB]], + create_project: Callable[..., Awaitable[ProjectAtDB]], create_pipeline: Callable[..., Awaitable[CompPipelineAtDB]], - create_tasks: Callable[..., Awaitable[list[CompTaskAtDB]]], + create_tasks_from_project: Callable[..., Awaitable[list[CompTaskAtDB]]], + with_product: dict[str, Any], ) -> ProjectID: """project uuid of a saved project (w/ tasks up-to-date)""" # insert project -> db - proj = await project(user, workbench=fake_workbench_without_outputs) + proj = await create_project(user, workbench=fake_workbench_without_outputs) # insert pipeline -> comp_pipeline await create_pipeline( @@ -132,7 +133,8 @@ async def project_id( dag_adjacency_list=fake_workbench_adjacency, ) # insert tasks -> comp_tasks - comp_tasks = await create_tasks(user=user, project=proj) + comp_tasks = await create_tasks_from_project(user=user, project=proj) + assert comp_tasks return proj.uuid @@ -166,7 +168,7 @@ async def test_get_all_tasks_log_files( assert resp.status_code == status.HTTP_200_OK log_files = TypeAdapter(list[TaskLogFileGet]).validate_json(resp.text) assert log_files - assert all(l.download_link for l in log_files) + assert all(file.download_link for file in log_files) async def test_get_task_logs_file( diff --git a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_rpc_computations.py b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_rpc_computations.py index b4358dfc789a..1e21400bcd92 100644 --- a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_rpc_computations.py +++ b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_rpc_computations.py @@ -10,10 +10,14 @@ from datetime import UTC, datetime, timedelta from typing import Any +from faker import Faker from models_library.api_schemas_directorv2.comp_runs import ( + ComputationCollectionRunRpcGetPage, + ComputationCollectionRunTaskRpcGetPage, ComputationRunRpcGetPage, ComputationTaskRpcGetPage, ) +from models_library.computations import CollectionRunID from models_library.projects import ProjectAtDB from models_library.projects_state import RunningState from servicelib.rabbitmq import RabbitMQRPCClient @@ -22,6 +26,9 @@ ) from simcore_postgres_database.models.comp_pipeline import StateType from simcore_service_director_v2.models.comp_pipelines import CompPipelineAtDB +from simcore_service_director_v2.models.comp_run_snapshot_tasks import ( + CompRunSnapshotTaskDBGet, +) from simcore_service_director_v2.models.comp_runs import CompRunsAtDB from simcore_service_director_v2.models.comp_tasks import CompTaskAtDB @@ -35,19 +42,20 @@ async def test_rpc_list_computation_runs_and_tasks( fake_workbench_without_outputs: dict[str, Any], fake_workbench_adjacency: dict[str, Any], create_registered_user: Callable[..., dict[str, Any]], - project: Callable[..., Awaitable[ProjectAtDB]], + create_project: Callable[..., Awaitable[ProjectAtDB]], create_pipeline: Callable[..., Awaitable[CompPipelineAtDB]], - create_tasks: Callable[..., Awaitable[list[CompTaskAtDB]]], + create_tasks_from_project: Callable[..., Awaitable[list[CompTaskAtDB]]], create_comp_run: Callable[..., Awaitable[CompRunsAtDB]], rpc_client: RabbitMQRPCClient, + with_product: dict[str, Any], ): user = create_registered_user() - proj = await project(user, workbench=fake_workbench_without_outputs) + proj = await create_project(user, workbench=fake_workbench_without_outputs) await create_pipeline( project_id=f"{proj.uuid}", dag_adjacency_list=fake_workbench_adjacency, ) - comp_tasks = await create_tasks( + comp_tasks = await create_tasks_from_project( user=user, project=proj, state=StateType.PUBLISHED, progress=None ) comp_runs = await create_comp_run( @@ -114,20 +122,21 @@ async def test_rpc_list_computation_runs_with_filtering( fake_workbench_without_outputs: dict[str, Any], fake_workbench_adjacency: dict[str, Any], create_registered_user: Callable[..., dict[str, Any]], - project: Callable[..., Awaitable[ProjectAtDB]], + create_project: Callable[..., Awaitable[ProjectAtDB]], create_pipeline: Callable[..., Awaitable[CompPipelineAtDB]], - create_tasks: Callable[..., Awaitable[list[CompTaskAtDB]]], + create_tasks_from_project: Callable[..., Awaitable[list[CompTaskAtDB]]], create_comp_run: Callable[..., Awaitable[CompRunsAtDB]], rpc_client: RabbitMQRPCClient, + with_product: dict[str, Any], ): user = create_registered_user() - proj_1 = await project(user, workbench=fake_workbench_without_outputs) + proj_1 = await create_project(user, workbench=fake_workbench_without_outputs) await create_pipeline( project_id=f"{proj_1.uuid}", dag_adjacency_list=fake_workbench_adjacency, ) - comp_tasks = await create_tasks( + comp_tasks = await create_tasks_from_project( user=user, project=proj_1, state=StateType.PUBLISHED, progress=None ) comp_runs = await create_comp_run( @@ -137,12 +146,12 @@ async def test_rpc_list_computation_runs_with_filtering( dag_adjacency_list=fake_workbench_adjacency, ) - proj_2 = await project(user, workbench=fake_workbench_without_outputs) + proj_2 = await create_project(user, workbench=fake_workbench_without_outputs) await create_pipeline( project_id=f"{proj_2.uuid}", dag_adjacency_list=fake_workbench_adjacency, ) - comp_tasks = await create_tasks( + comp_tasks = await create_tasks_from_project( user=user, project=proj_2, state=StateType.SUCCESS, progress=None ) comp_runs = await create_comp_run( @@ -170,22 +179,24 @@ async def test_rpc_list_computation_runs_history( fake_workbench_without_outputs: dict[str, Any], fake_workbench_adjacency: dict[str, Any], create_registered_user: Callable[..., dict[str, Any]], - project: Callable[..., Awaitable[ProjectAtDB]], + create_project: Callable[..., Awaitable[ProjectAtDB]], create_pipeline: Callable[..., Awaitable[CompPipelineAtDB]], - create_tasks: Callable[..., Awaitable[list[CompTaskAtDB]]], + create_tasks_from_project: Callable[..., Awaitable[list[CompTaskAtDB]]], create_comp_run: Callable[..., Awaitable[CompRunsAtDB]], rpc_client: RabbitMQRPCClient, + with_product: dict[str, Any], ): user = create_registered_user() - proj = await project(user, workbench=fake_workbench_without_outputs) + proj = await create_project(user, workbench=fake_workbench_without_outputs) await create_pipeline( project_id=f"{proj.uuid}", dag_adjacency_list=fake_workbench_adjacency, ) - comp_tasks = await create_tasks( + comp_tasks = await create_tasks_from_project( user=user, project=proj, state=StateType.PUBLISHED, progress=None ) + assert comp_tasks comp_runs_1 = await create_comp_run( user=user, project=proj, @@ -195,6 +206,7 @@ async def test_rpc_list_computation_runs_history( iteration=1, dag_adjacency_list=fake_workbench_adjacency, ) + assert comp_runs_1 comp_runs_2 = await create_comp_run( user=user, project=proj, @@ -204,6 +216,7 @@ async def test_rpc_list_computation_runs_history( iteration=2, dag_adjacency_list=fake_workbench_adjacency, ) + assert comp_runs_2 comp_runs_3 = await create_comp_run( user=user, project=proj, @@ -213,9 +226,197 @@ async def test_rpc_list_computation_runs_history( iteration=3, dag_adjacency_list=fake_workbench_adjacency, ) + assert comp_runs_3 output = await rpc_computations.list_computations_iterations_page( rpc_client, product_name="osparc", user_id=user["id"], project_ids=[proj.uuid] ) assert output.total == 3 assert isinstance(output, ComputationRunRpcGetPage) + + +async def test_rpc_list_computation_collection_runs_page_and_collection_run_tasks_page( + fake_workbench_without_outputs: dict[str, Any], # <-- Has 4 nodes + fake_workbench_adjacency: dict[str, Any], + create_registered_user: Callable[..., dict[str, Any]], + create_project: Callable[..., Awaitable[ProjectAtDB]], + create_pipeline: Callable[..., Awaitable[CompPipelineAtDB]], + create_tasks_from_project: Callable[..., Awaitable[list[CompTaskAtDB]]], + create_comp_run_snapshot_tasks: Callable[ + ..., Awaitable[list[CompRunSnapshotTaskDBGet]] + ], + create_comp_run: Callable[..., Awaitable[CompRunsAtDB]], + rpc_client: RabbitMQRPCClient, + faker: Faker, + with_product: dict[str, Any], +): + user = create_registered_user() + projects = [ + await create_project(user, workbench=fake_workbench_without_outputs) + for _ in range(3) + ] + + default_collection_run_id = CollectionRunID(f"{faker.uuid4(cast_to=None)}") + not_default_collection_run_id = CollectionRunID(f"{faker.uuid4(cast_to=None)}") + + collection_run_id_project_list = [ + default_collection_run_id, + default_collection_run_id, + not_default_collection_run_id, + ] + + running_state_project_list = [ + RunningState.SUCCESS, + RunningState.PENDING, + RunningState.SUCCESS, + ] + + for proj, collection_run_id, running_state in zip( + projects, + collection_run_id_project_list, + running_state_project_list, + strict=True, + ): + await create_pipeline( + project_id=f"{proj.uuid}", + dag_adjacency_list=fake_workbench_adjacency, + ) + await create_tasks_from_project( + user=user, project=proj, state=running_state, progress=None + ) + run = await create_comp_run( + user=user, + project=proj, + result=running_state, + started=datetime.now(tz=UTC) - timedelta(minutes=120), + ended=datetime.now(tz=UTC) - timedelta(minutes=100), + iteration=1, + dag_adjacency_list=fake_workbench_adjacency, + collection_run_id=f"{collection_run_id}", + ) + await create_comp_run_snapshot_tasks( + user=user, + project=proj, + run_id=run.run_id, + ) + + output = await rpc_computations.list_computation_collection_runs_page( + rpc_client, product_name="osparc", user_id=user["id"], project_ids=None + ) + assert output.total == 2 + assert len(output.items) == 2 + assert isinstance(output, ComputationCollectionRunRpcGetPage) + assert len(output.items[0].project_ids) == 1 + assert len(output.items[1].project_ids) == 2 + + output = await rpc_computations.list_computation_collection_run_tasks_page( + rpc_client, + product_name="osparc", + user_id=user["id"], + collection_run_id=default_collection_run_id, + ) + assert output.total == 8 + assert len(output.items) == 8 + isinstance(output, ComputationCollectionRunTaskRpcGetPage) + + output = await rpc_computations.list_computation_collection_run_tasks_page( + rpc_client, + product_name="osparc", + user_id=user["id"], + collection_run_id=not_default_collection_run_id, + ) + assert output.total == 4 + assert len(output.items) == 4 + isinstance(output, ComputationCollectionRunTaskRpcGetPage) + + # Test filtering only running collection runs + output = await rpc_computations.list_computation_collection_runs_page( + rpc_client, + product_name="osparc", + user_id=user["id"], + project_ids=None, + filter_only_running=True, # <-- This is the tested filter + ) + assert output.total == 1 + assert len(output.items) == 1 + assert isinstance(output, ComputationCollectionRunRpcGetPage) + assert len(output.items[0].project_ids) == 2 + + +async def test_rpc_list_computation_collection_runs_empty_ids_when_user_has_already_run_history( + fake_workbench_without_outputs: dict[str, Any], # <-- Has 4 nodes + fake_workbench_adjacency: dict[str, Any], + create_registered_user: Callable[..., dict[str, Any]], + create_project: Callable[..., Awaitable[ProjectAtDB]], + create_pipeline: Callable[..., Awaitable[CompPipelineAtDB]], + create_tasks_from_project: Callable[..., Awaitable[list[CompTaskAtDB]]], + create_comp_run_snapshot_tasks: Callable[ + ..., Awaitable[list[CompRunSnapshotTaskDBGet]] + ], + create_comp_run: Callable[..., Awaitable[CompRunsAtDB]], + rpc_client: RabbitMQRPCClient, + faker: Faker, + with_product: dict[str, Any], +): + user = create_registered_user() + proj = await create_project(user, workbench=fake_workbench_without_outputs) + + await create_pipeline( + project_id=f"{proj.uuid}", + dag_adjacency_list=fake_workbench_adjacency, + ) + await create_tasks_from_project( + user=user, project=proj, state=RunningState.SUCCESS, progress=None + ) + run = await create_comp_run( + user=user, + project=proj, + result=RunningState.SUCCESS, + started=datetime.now(tz=UTC) - timedelta(minutes=120), + ended=datetime.now(tz=UTC) - timedelta(minutes=100), + iteration=1, + dag_adjacency_list=fake_workbench_adjacency, + ) + await create_comp_run_snapshot_tasks( + user=user, + project=proj, + run_id=run.run_id, + ) + + output = await rpc_computations.list_computation_collection_runs_page( + rpc_client, product_name="osparc", user_id=user["id"], project_ids=None + ) + assert output.total == 1 + assert len(output.items) == 1 + assert isinstance(output, ComputationCollectionRunRpcGetPage) + + # Test filtering only running collection runs + output = await rpc_computations.list_computation_collection_runs_page( + rpc_client, + product_name="osparc", + user_id=user["id"], + project_ids=None, + filter_only_running=True, # <-- This is the tested filter + ) + assert output.total == 0 + assert len(output.items) == 0 + + +async def test_rpc_list_computation_collection_runs_empty_ids_when_user_do_not_have_run_history( + create_registered_user: Callable[..., dict[str, Any]], + rpc_client: RabbitMQRPCClient, + with_product: dict[str, Any], +): + user = create_registered_user() + + # Test with empty collection_run_ids + output = await rpc_computations.list_computation_collection_runs_page( + rpc_client, + product_name="osparc", + user_id=user["id"], + project_ids=None, + filter_only_running=True, # This will result in empty collection_run_ids + ) + assert output.total == 0 + assert len(output.items) == 0 + assert isinstance(output, ComputationCollectionRunRpcGetPage) diff --git a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_rpc_computations_tasks.py b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_rpc_computations_tasks.py new file mode 100644 index 000000000000..8117b89e04a2 --- /dev/null +++ b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_api_rpc_computations_tasks.py @@ -0,0 +1,75 @@ +# pylint: disable=unused-argument + +from collections.abc import Awaitable, Callable +from typing import Any + +import pytest +from faker import Faker +from models_library.api_schemas_directorv2.computations import TaskLogFileIdGet +from models_library.projects import ProjectAtDB, ProjectID +from models_library.projects_state import RunningState +from servicelib.rabbitmq import RabbitMQRPCClient +from servicelib.rabbitmq.rpc_interfaces.director_v2 import ( + computations_tasks as rpc_computations_tasks, +) +from servicelib.rabbitmq.rpc_interfaces.director_v2.errors import ( + ComputationalTaskMissingError, +) +from simcore_postgres_database.models.comp_pipeline import StateType +from simcore_service_director_v2.models.comp_pipelines import CompPipelineAtDB +from simcore_service_director_v2.models.comp_runs import CompRunsAtDB +from simcore_service_director_v2.models.comp_tasks import CompTaskAtDB + +_faker = Faker() + +pytest_simcore_core_services_selection = ["postgres", "rabbit", "redis"] +pytest_simcore_ops_services_selection = [ + "adminer", +] + + +async def test_get_computation_task_log_file_ids( + fake_workbench_without_outputs: dict[str, Any], + fake_workbench_adjacency: dict[str, Any], + create_registered_user: Callable[..., dict[str, Any]], + create_project: Callable[..., Awaitable[ProjectAtDB]], + create_pipeline: Callable[..., Awaitable[CompPipelineAtDB]], + create_tasks_from_project: Callable[..., Awaitable[list[CompTaskAtDB]]], + create_comp_run: Callable[..., Awaitable[CompRunsAtDB]], + rpc_client: RabbitMQRPCClient, + with_product: dict[str, Any], +): + user = create_registered_user() + proj = await create_project(user, workbench=fake_workbench_without_outputs) + await create_pipeline( + project_id=f"{proj.uuid}", + dag_adjacency_list=fake_workbench_adjacency, + ) + comp_tasks = await create_tasks_from_project( + user=user, project=proj, state=StateType.PUBLISHED, progress=None + ) + comp_runs = await create_comp_run( + user=user, + project=proj, + result=RunningState.PUBLISHED, + dag_adjacency_list=fake_workbench_adjacency, + ) + assert comp_runs + + output = await rpc_computations_tasks.get_computation_task_log_file_ids( + rpc_client, project_id=proj.uuid + ) + assert isinstance(output, list) + assert len(output) <= len( + comp_tasks + ) # output doesn't contain e.g. filepickers and dynamic services + assert all(isinstance(elm, TaskLogFileIdGet) for elm in output) + + +async def test_get_computation_task_log_file_ids_no_pipeline( + rpc_client: RabbitMQRPCClient, +): + with pytest.raises(ComputationalTaskMissingError): + await rpc_computations_tasks.get_computation_task_log_file_ids( + rpc_client, project_id=ProjectID(_faker.uuid4()) + ) diff --git a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_db_repositories_comp_runs.py b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_db_repositories_comp_runs.py index 64de723a48af..41753f8fc6f9 100644 --- a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_db_repositories_comp_runs.py +++ b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_db_repositories_comp_runs.py @@ -8,13 +8,15 @@ import asyncio import datetime import random +import uuid from collections.abc import Awaitable, Callable -from typing import cast +from typing import Any, cast import arrow import pytest from _helpers import PublishedProject from faker import Faker +from models_library.computations import CollectionRunID from models_library.projects import ProjectID from models_library.projects_state import RunningState from models_library.users import UserID @@ -56,6 +58,7 @@ async def test_get( fake_project_id: ProjectID, publish_project: Callable[[], Awaitable[PublishedProject]], create_comp_run: Callable[..., Awaitable[CompRunsAtDB]], + with_product: dict[str, Any], ): with pytest.raises(ComputationalRunNotFoundError): await CompRunsRepository(sqlalchemy_async_engine).get( @@ -88,6 +91,8 @@ async def test_list( publish_project: Callable[[], Awaitable[PublishedProject]], run_metadata: RunMetadataDict, faker: Faker, + fake_collection_run_id: CollectionRunID, + with_product: dict[str, Any], ): assert await CompRunsRepository(sqlalchemy_async_engine).list_() == [] @@ -101,6 +106,7 @@ async def test_list( metadata=run_metadata, use_on_demand_clusters=faker.pybool(), dag_adjacency_list=published_project.pipeline.dag_adjacency_list, + collection_run_id=fake_collection_run_id, ) assert await CompRunsRepository(sqlalchemy_async_engine).list_() == [created] @@ -113,6 +119,7 @@ async def test_list( metadata=run_metadata, use_on_demand_clusters=faker.pybool(), dag_adjacency_list=published_project.pipeline.dag_adjacency_list, + collection_run_id=faker.uuid4(), ) for n in range(50) ) @@ -269,6 +276,7 @@ async def test_create( run_metadata: RunMetadataDict, faker: Faker, publish_project: Callable[[], Awaitable[PublishedProject]], + with_product: dict[str, Any], ): with pytest.raises(ProjectNotFoundError): await CompRunsRepository(sqlalchemy_async_engine).create( @@ -278,6 +286,7 @@ async def test_create( metadata=run_metadata, use_on_demand_clusters=faker.pybool(), dag_adjacency_list={}, + collection_run_id=faker.uuid4(), ) published_project = await publish_project() with pytest.raises(UserNotFoundError): @@ -288,6 +297,7 @@ async def test_create( metadata=run_metadata, use_on_demand_clusters=faker.pybool(), dag_adjacency_list=published_project.pipeline.dag_adjacency_list, + collection_run_id=faker.uuid4(), ) created = await CompRunsRepository(sqlalchemy_async_engine).create( @@ -297,6 +307,7 @@ async def test_create( metadata=run_metadata, use_on_demand_clusters=faker.pybool(), dag_adjacency_list=published_project.pipeline.dag_adjacency_list, + collection_run_id=faker.uuid4(), ) got = await CompRunsRepository(sqlalchemy_async_engine).get( user_id=published_project.user["id"], @@ -312,6 +323,7 @@ async def test_create( metadata=run_metadata, use_on_demand_clusters=faker.pybool(), dag_adjacency_list=published_project.pipeline.dag_adjacency_list, + collection_run_id=faker.uuid4(), ) assert created != got assert created.iteration == got.iteration + 1 @@ -331,6 +343,8 @@ async def test_update( run_metadata: RunMetadataDict, faker: Faker, publish_project: Callable[[], Awaitable[PublishedProject]], + fake_collection_run_id: CollectionRunID, + with_product: dict[str, Any], ): # this updates nothing but also does not complain updated = await CompRunsRepository(sqlalchemy_async_engine).update( @@ -346,6 +360,7 @@ async def test_update( metadata=run_metadata, use_on_demand_clusters=faker.pybool(), dag_adjacency_list=published_project.pipeline.dag_adjacency_list, + collection_run_id=fake_collection_run_id, ) got = await CompRunsRepository(sqlalchemy_async_engine).get( @@ -371,6 +386,8 @@ async def test_set_run_result( run_metadata: RunMetadataDict, faker: Faker, publish_project: Callable[[], Awaitable[PublishedProject]], + fake_collection_run_id: CollectionRunID, + with_product: dict[str, Any], ): published_project = await publish_project() created = await CompRunsRepository(sqlalchemy_async_engine).create( @@ -380,6 +397,7 @@ async def test_set_run_result( metadata=run_metadata, use_on_demand_clusters=faker.pybool(), dag_adjacency_list=published_project.pipeline.dag_adjacency_list, + collection_run_id=fake_collection_run_id, ) got = await CompRunsRepository(sqlalchemy_async_engine).get( user_id=published_project.user["id"], @@ -419,6 +437,8 @@ async def test_mark_for_cancellation( run_metadata: RunMetadataDict, faker: Faker, publish_project: Callable[[], Awaitable[PublishedProject]], + fake_collection_run_id: CollectionRunID, + with_product: dict[str, Any], ): published_project = await publish_project() created = await CompRunsRepository(sqlalchemy_async_engine).create( @@ -428,6 +448,7 @@ async def test_mark_for_cancellation( metadata=run_metadata, use_on_demand_clusters=faker.pybool(), dag_adjacency_list=published_project.pipeline.dag_adjacency_list, + collection_run_id=fake_collection_run_id, ) got = await CompRunsRepository(sqlalchemy_async_engine).get( user_id=published_project.user["id"], @@ -451,6 +472,8 @@ async def test_mark_for_scheduling( run_metadata: RunMetadataDict, faker: Faker, publish_project: Callable[[], Awaitable[PublishedProject]], + fake_collection_run_id: CollectionRunID, + with_product: dict[str, Any], ): published_project = await publish_project() created = await CompRunsRepository(sqlalchemy_async_engine).create( @@ -460,6 +483,7 @@ async def test_mark_for_scheduling( metadata=run_metadata, use_on_demand_clusters=faker.pybool(), dag_adjacency_list=published_project.pipeline.dag_adjacency_list, + collection_run_id=fake_collection_run_id, ) got = await CompRunsRepository(sqlalchemy_async_engine).get( user_id=published_project.user["id"], @@ -485,6 +509,8 @@ async def test_mark_scheduling_done( run_metadata: RunMetadataDict, faker: Faker, publish_project: Callable[[], Awaitable[PublishedProject]], + fake_collection_run_id: CollectionRunID, + with_product: dict[str, Any], ): published_project = await publish_project() created = await CompRunsRepository(sqlalchemy_async_engine).create( @@ -494,6 +520,7 @@ async def test_mark_scheduling_done( metadata=run_metadata, use_on_demand_clusters=faker.pybool(), dag_adjacency_list=published_project.pipeline.dag_adjacency_list, + collection_run_id=fake_collection_run_id, ) got = await CompRunsRepository(sqlalchemy_async_engine).get( user_id=published_project.user["id"], @@ -512,3 +539,785 @@ async def test_mark_scheduling_done( assert updated != created assert updated.scheduled is None assert updated.processed is not None + + +def _normalize_uuids(data): + """Recursively convert UUID objects to strings in a nested dictionary.""" + if isinstance(data, dict): + return {k: _normalize_uuids(v) for k, v in data.items()} + if isinstance(data, list): + return [_normalize_uuids(i) for i in data] + if isinstance(data, uuid.UUID): + return str(data) + return data + + +async def test_list_group_by_collection_run_id( + sqlalchemy_async_engine: AsyncEngine, + run_metadata: RunMetadataDict, + faker: Faker, + publish_project: Callable[[], Awaitable[PublishedProject]], + fake_collection_run_id: CollectionRunID, + with_product: dict[str, Any], +): + """Test list_group_by_collection_run_id function with simple data insertion and retrieval.""" + # Create a few published projects + published_project_1 = await publish_project() + published_project_2 = ( + await publish_project() + ) # Create a shared collection run ID for grouping + collection_run_id = fake_collection_run_id + + # Create computation runs with the same collection_run_id + await asyncio.gather( + CompRunsRepository(sqlalchemy_async_engine).create( + user_id=published_project_1.user["id"], + project_id=published_project_1.project.uuid, + iteration=None, + metadata=run_metadata, + use_on_demand_clusters=faker.pybool(), + dag_adjacency_list=published_project_1.pipeline.dag_adjacency_list, + collection_run_id=collection_run_id, + ), + CompRunsRepository(sqlalchemy_async_engine).create( + user_id=published_project_1.user["id"], + project_id=published_project_2.project.uuid, + iteration=None, + metadata=run_metadata, + use_on_demand_clusters=faker.pybool(), + dag_adjacency_list=published_project_2.pipeline.dag_adjacency_list, + collection_run_id=collection_run_id, + ), + ) + + # Test the list_group_by_collection_run_id function + total_count, items = await CompRunsRepository( + sqlalchemy_async_engine + ).list_group_by_collection_run_id( + product_name=run_metadata.get("product_name"), + user_id=published_project_1.user["id"], + offset=0, + limit=10, + ) + + # Assertions + assert total_count == 1 # One collection group + assert len(items) == 1 + + collection_item = items[0] + assert collection_item.collection_run_id == collection_run_id + assert len(collection_item.project_ids) == 2 # Two projects in the collection + assert str(published_project_1.project.uuid) in collection_item.project_ids + assert str(published_project_2.project.uuid) in collection_item.project_ids + assert ( + collection_item.state + == RunningState.STARTED # Initial state returned to activity overview + ) + assert collection_item.info == _normalize_uuids(run_metadata) + assert collection_item.submitted_at is not None + assert collection_item.started_at is None # Not started yet + assert collection_item.ended_at is None # Not ended yet + + +async def test_list_group_by_collection_run_id_with_mixed_states_returns_started( + sqlalchemy_async_engine: AsyncEngine, + run_metadata: RunMetadataDict, + faker: Faker, + publish_project: Callable[[], Awaitable[PublishedProject]], + fake_collection_run_id: CollectionRunID, + with_product: dict[str, Any], +): + """Test that if any state is not final, the grouped state returns STARTED.""" + # Create published projects + published_project_1 = await publish_project() + published_project_2 = await publish_project() + published_project_3 = await publish_project() + + collection_run_id = fake_collection_run_id + repo = CompRunsRepository(sqlalchemy_async_engine) + + # Create computation runs with same collection_run_id + comp_run_1 = await repo.create( + user_id=published_project_1.user["id"], + project_id=published_project_1.project.uuid, + iteration=None, + metadata=run_metadata, + use_on_demand_clusters=faker.pybool(), + dag_adjacency_list=published_project_1.pipeline.dag_adjacency_list, + collection_run_id=collection_run_id, + ) + comp_run_2 = await repo.create( + user_id=published_project_1.user["id"], + project_id=published_project_2.project.uuid, + iteration=None, + metadata=run_metadata, + use_on_demand_clusters=faker.pybool(), + dag_adjacency_list=published_project_2.pipeline.dag_adjacency_list, + collection_run_id=collection_run_id, + ) + comp_run_3 = await repo.create( + user_id=published_project_1.user["id"], + project_id=published_project_3.project.uuid, + iteration=None, + metadata=run_metadata, + use_on_demand_clusters=faker.pybool(), + dag_adjacency_list=published_project_3.pipeline.dag_adjacency_list, + collection_run_id=collection_run_id, + ) + + # Set mixed states: one SUCCESS (final), one FAILED (final), one STARTED (non-final) + await repo.set_run_result( + user_id=published_project_1.user["id"], + project_id=published_project_1.project.uuid, + iteration=comp_run_1.iteration, + result_state=RunningState.SUCCESS, + final_state=True, + ) + await repo.set_run_result( + user_id=published_project_1.user["id"], + project_id=published_project_2.project.uuid, + iteration=comp_run_2.iteration, + result_state=RunningState.FAILED, + final_state=True, + ) + await repo.set_run_result( + user_id=published_project_1.user["id"], + project_id=published_project_3.project.uuid, + iteration=comp_run_3.iteration, + result_state=RunningState.STARTED, + final_state=False, + ) + + # Test the list_group_by_collection_run_id function + total_count, items = await repo.list_group_by_collection_run_id( + product_name=run_metadata.get("product_name"), + user_id=published_project_1.user["id"], + offset=0, + limit=10, + ) + + # Assertions + assert total_count == 1 + assert len(items) == 1 + collection_item = items[0] + assert collection_item.collection_run_id == collection_run_id + assert collection_item.state == RunningState.STARTED # Non-final state wins + + +async def test_list_group_by_collection_run_id_all_success_returns_success( + sqlalchemy_async_engine: AsyncEngine, + run_metadata: RunMetadataDict, + faker: Faker, + publish_project: Callable[[], Awaitable[PublishedProject]], + fake_collection_run_id: CollectionRunID, + with_product: dict[str, Any], +): + """Test that if all states are SUCCESS, the grouped state returns SUCCESS.""" + published_project_1 = await publish_project() + published_project_2 = await publish_project() + + collection_run_id = fake_collection_run_id + repo = CompRunsRepository(sqlalchemy_async_engine) + + # Create computation runs + comp_run_1 = await repo.create( + user_id=published_project_1.user["id"], + project_id=published_project_1.project.uuid, + iteration=None, + metadata=run_metadata, + use_on_demand_clusters=faker.pybool(), + dag_adjacency_list=published_project_1.pipeline.dag_adjacency_list, + collection_run_id=collection_run_id, + ) + comp_run_2 = await repo.create( + user_id=published_project_1.user["id"], + project_id=published_project_2.project.uuid, + iteration=None, + metadata=run_metadata, + use_on_demand_clusters=faker.pybool(), + dag_adjacency_list=published_project_2.pipeline.dag_adjacency_list, + collection_run_id=collection_run_id, + ) + + # Set both to SUCCESS + await repo.set_run_result( + user_id=published_project_1.user["id"], + project_id=published_project_1.project.uuid, + iteration=comp_run_1.iteration, + result_state=RunningState.SUCCESS, + final_state=True, + ) + await repo.set_run_result( + user_id=published_project_1.user["id"], + project_id=published_project_2.project.uuid, + iteration=comp_run_2.iteration, + result_state=RunningState.SUCCESS, + final_state=True, + ) + + # Test the function + total_count, items = await repo.list_group_by_collection_run_id( + product_name=run_metadata.get("product_name"), + user_id=published_project_1.user["id"], + offset=0, + limit=10, + ) + + # Assertions + assert total_count == 1 + assert len(items) == 1 + collection_item = items[0] + assert collection_item.state == RunningState.SUCCESS + + +async def test_list_group_by_collection_run_id_with_failed_returns_failed( + sqlalchemy_async_engine: AsyncEngine, + run_metadata: RunMetadataDict, + faker: Faker, + publish_project: Callable[[], Awaitable[PublishedProject]], + fake_collection_run_id: CollectionRunID, + with_product: dict[str, Any], +): + """Test that if any state is FAILED (among final states), the grouped state returns FAILED.""" + published_project_1 = await publish_project() + published_project_2 = await publish_project() + published_project_3 = await publish_project() + + collection_run_id = fake_collection_run_id + repo = CompRunsRepository(sqlalchemy_async_engine) + + # Create computation runs + comp_runs = [] + for project in [published_project_1, published_project_2, published_project_3]: + comp_run = await repo.create( + user_id=published_project_1.user["id"], + project_id=project.project.uuid, + iteration=None, + metadata=run_metadata, + use_on_demand_clusters=faker.pybool(), + dag_adjacency_list=project.pipeline.dag_adjacency_list, + collection_run_id=collection_run_id, + ) + comp_runs.append((project, comp_run)) + + # Set states: SUCCESS, FAILED, ABORTED (all final states, but FAILED is present) + await repo.set_run_result( + user_id=published_project_1.user["id"], + project_id=comp_runs[0][0].project.uuid, + iteration=comp_runs[0][1].iteration, + result_state=RunningState.SUCCESS, + final_state=True, + ) + await repo.set_run_result( + user_id=published_project_1.user["id"], + project_id=comp_runs[1][0].project.uuid, + iteration=comp_runs[1][1].iteration, + result_state=RunningState.FAILED, + final_state=True, + ) + await repo.set_run_result( + user_id=published_project_1.user["id"], + project_id=comp_runs[2][0].project.uuid, + iteration=comp_runs[2][1].iteration, + result_state=RunningState.ABORTED, + final_state=True, + ) + + # Test the function + total_count, items = await repo.list_group_by_collection_run_id( + product_name=run_metadata.get("product_name"), + user_id=published_project_1.user["id"], + offset=0, + limit=10, + ) + + # Assertions + assert total_count == 1 + assert len(items) == 1 + collection_item = items[0] + assert collection_item.state == RunningState.FAILED # FAILED takes precedence + + +async def test_list_group_by_collection_run_id_with_aborted_returns_aborted( + sqlalchemy_async_engine: AsyncEngine, + run_metadata: RunMetadataDict, + faker: Faker, + publish_project: Callable[[], Awaitable[PublishedProject]], + fake_collection_run_id: CollectionRunID, + with_product: dict[str, Any], +): + """Test that if any state is ABORTED (but no FAILED), the grouped state returns ABORTED.""" + published_project_1 = await publish_project() + published_project_2 = await publish_project() + + collection_run_id = fake_collection_run_id + repo = CompRunsRepository(sqlalchemy_async_engine) + + # Create computation runs + comp_run_1 = await repo.create( + user_id=published_project_1.user["id"], + project_id=published_project_1.project.uuid, + iteration=None, + metadata=run_metadata, + use_on_demand_clusters=faker.pybool(), + dag_adjacency_list=published_project_1.pipeline.dag_adjacency_list, + collection_run_id=collection_run_id, + ) + comp_run_2 = await repo.create( + user_id=published_project_1.user["id"], + project_id=published_project_2.project.uuid, + iteration=None, + metadata=run_metadata, + use_on_demand_clusters=faker.pybool(), + dag_adjacency_list=published_project_2.pipeline.dag_adjacency_list, + collection_run_id=collection_run_id, + ) + + # Set states: SUCCESS, ABORTED (final states, no FAILED) + await repo.set_run_result( + user_id=published_project_1.user["id"], + project_id=published_project_1.project.uuid, + iteration=comp_run_1.iteration, + result_state=RunningState.SUCCESS, + final_state=True, + ) + await repo.set_run_result( + user_id=published_project_1.user["id"], + project_id=published_project_2.project.uuid, + iteration=comp_run_2.iteration, + result_state=RunningState.ABORTED, + final_state=True, + ) + + # Test the function + total_count, items = await repo.list_group_by_collection_run_id( + product_name=run_metadata.get("product_name"), + user_id=published_project_1.user["id"], + offset=0, + limit=10, + ) + + # Assertions + assert total_count == 1 + assert len(items) == 1 + collection_item = items[0] + assert collection_item.state == RunningState.ABORTED + + +async def test_list_group_by_collection_run_id_with_unknown_returns_unknown( + sqlalchemy_async_engine: AsyncEngine, + run_metadata: RunMetadataDict, + faker: Faker, + publish_project: Callable[[], Awaitable[PublishedProject]], + fake_collection_run_id: CollectionRunID, + with_product: dict[str, Any], +): + """Test that if any state is UNKNOWN (but no FAILED/ABORTED), the grouped state returns UNKNOWN.""" + published_project_1 = await publish_project() + published_project_2 = await publish_project() + + collection_run_id = fake_collection_run_id + repo = CompRunsRepository(sqlalchemy_async_engine) + + # Create computation runs + comp_run_1 = await repo.create( + user_id=published_project_1.user["id"], + project_id=published_project_1.project.uuid, + iteration=None, + metadata=run_metadata, + use_on_demand_clusters=faker.pybool(), + dag_adjacency_list=published_project_1.pipeline.dag_adjacency_list, + collection_run_id=collection_run_id, + ) + comp_run_2 = await repo.create( + user_id=published_project_1.user["id"], + project_id=published_project_2.project.uuid, + iteration=None, + metadata=run_metadata, + use_on_demand_clusters=faker.pybool(), + dag_adjacency_list=published_project_2.pipeline.dag_adjacency_list, + collection_run_id=collection_run_id, + ) + + # Set states: SUCCESS, UNKNOWN (final states, no FAILED/ABORTED) + await repo.set_run_result( + user_id=published_project_1.user["id"], + project_id=published_project_1.project.uuid, + iteration=comp_run_1.iteration, + result_state=RunningState.SUCCESS, + final_state=True, + ) + await repo.set_run_result( + user_id=published_project_1.user["id"], + project_id=published_project_2.project.uuid, + iteration=comp_run_2.iteration, + result_state=RunningState.UNKNOWN, # --> is setup to be FAILED + final_state=True, + ) + + # Test the function + assert "product_name" in run_metadata + total_count, items = await repo.list_group_by_collection_run_id( + product_name=run_metadata["product_name"], + user_id=published_project_1.user["id"], + offset=0, + limit=10, + ) + + # Assertions + assert total_count == 1 + assert len(items) == 1 + collection_item = items[0] + assert collection_item.state == RunningState.UNKNOWN + + +async def test_list_group_by_collection_run_id_with_project_filter( + sqlalchemy_async_engine: AsyncEngine, + run_metadata: RunMetadataDict, + faker: Faker, + publish_project: Callable[[], Awaitable[PublishedProject]], + with_product: dict[str, Any], +): + """Test list_group_by_collection_run_id with project_ids filter.""" + published_project_1 = await publish_project() + published_project_2 = await publish_project() + published_project_3 = await publish_project() + + collection_run_id_1 = CollectionRunID(f"{faker.uuid4(cast_to=None)}") + collection_run_id_2 = CollectionRunID(f"{faker.uuid4(cast_to=None)}") + repo = CompRunsRepository(sqlalchemy_async_engine) + + # Create computation runs with different collection_run_ids + await repo.create( + user_id=published_project_1.user["id"], + project_id=published_project_1.project.uuid, + iteration=None, + metadata=run_metadata, + use_on_demand_clusters=faker.pybool(), + dag_adjacency_list=published_project_1.pipeline.dag_adjacency_list, + collection_run_id=collection_run_id_1, + ) + await repo.create( + user_id=published_project_1.user["id"], + project_id=published_project_2.project.uuid, + iteration=None, + metadata=run_metadata, + use_on_demand_clusters=faker.pybool(), + dag_adjacency_list=published_project_2.pipeline.dag_adjacency_list, + collection_run_id=collection_run_id_1, + ) + await repo.create( + user_id=published_project_1.user["id"], + project_id=published_project_3.project.uuid, + iteration=None, + metadata=run_metadata, + use_on_demand_clusters=faker.pybool(), + dag_adjacency_list=published_project_3.pipeline.dag_adjacency_list, + collection_run_id=collection_run_id_2, + ) + + # Test with project filter for only first two projects + total_count, items = await repo.list_group_by_collection_run_id( + product_name=run_metadata.get("product_name"), + user_id=published_project_1.user["id"], + project_ids_or_none=[ + published_project_1.project.uuid, + published_project_2.project.uuid, + ], + offset=0, + limit=10, + ) + + # Should only return collection_run_id_1 + assert total_count == 1 + assert len(items) == 1 + collection_item = items[0] + assert collection_item.collection_run_id == collection_run_id_1 + assert len(collection_item.project_ids) == 2 + + +async def test_list_group_by_collection_run_id_pagination( + sqlalchemy_async_engine: AsyncEngine, + run_metadata: RunMetadataDict, + faker: Faker, + publish_project: Callable[[], Awaitable[PublishedProject]], + with_product: dict[str, Any], +): + """Test pagination functionality of list_group_by_collection_run_id.""" + published_project = await publish_project() + repo = CompRunsRepository(sqlalchemy_async_engine) + + # Create multiple collection runs + collection_run_ids = [] + for _ in range(5): + collection_run_id = CollectionRunID(f"{faker.uuid4(cast_to=None)}") + collection_run_ids.append(collection_run_id) + + project = await publish_project() + await repo.create( + user_id=published_project.user["id"], + project_id=project.project.uuid, + iteration=None, + metadata=run_metadata, + use_on_demand_clusters=faker.pybool(), + dag_adjacency_list=project.pipeline.dag_adjacency_list, + collection_run_id=collection_run_id, + ) + + # Test first page + total_count, items = await repo.list_group_by_collection_run_id( + product_name=run_metadata.get("product_name"), + user_id=published_project.user["id"], + offset=0, + limit=2, + ) + + assert total_count == 5 + assert len(items) == 2 + + # Test second page + total_count, items = await repo.list_group_by_collection_run_id( + product_name=run_metadata.get("product_name"), + user_id=published_project.user["id"], + offset=2, + limit=2, + ) + + assert total_count == 5 + assert len(items) == 2 + + # Test last page + total_count, items = await repo.list_group_by_collection_run_id( + product_name=run_metadata.get("product_name"), + user_id=published_project.user["id"], + offset=4, + limit=2, + ) + + assert total_count == 5 + assert len(items) == 1 + + +async def test_list_group_by_collection_run_id_empty_result( + sqlalchemy_async_engine: AsyncEngine, + run_metadata: RunMetadataDict, + fake_user_id: UserID, + with_product: dict[str, Any], +): + """Test list_group_by_collection_run_id returns empty when no runs exist.""" + repo = CompRunsRepository(sqlalchemy_async_engine) + + total_count, items = await repo.list_group_by_collection_run_id( + product_name=run_metadata.get("product_name"), + user_id=fake_user_id, + offset=0, + limit=10, + ) + + assert total_count == 0 + assert len(items) == 0 + + +async def test_list_group_by_collection_run_id_with_different_users( + sqlalchemy_async_engine: AsyncEngine, + create_registered_user: Callable[..., dict[str, Any]], + run_metadata: RunMetadataDict, + faker: Faker, + publish_project: Callable[[], Awaitable[PublishedProject]], + with_product: dict[str, Any], +): + """Test that list_group_by_collection_run_id filters by user_id correctly.""" + published_project_user1 = await publish_project() + published_project_user2 = await publish_project() + + user1 = create_registered_user() + user2 = create_registered_user() + + collection_run_id_1 = CollectionRunID(f"{faker.uuid4(cast_to=None)}") + collection_run_id_2 = CollectionRunID(f"{faker.uuid4(cast_to=None)}") + + repo = CompRunsRepository(sqlalchemy_async_engine) + + # Create runs for different users with same collection_run_id + await repo.create( + user_id=user1["id"], + project_id=published_project_user1.project.uuid, + iteration=None, + metadata=run_metadata, + use_on_demand_clusters=faker.pybool(), + dag_adjacency_list=published_project_user1.pipeline.dag_adjacency_list, + collection_run_id=collection_run_id_1, + ) + await repo.create( + user_id=user2["id"], + project_id=published_project_user2.project.uuid, + iteration=None, + metadata=run_metadata, + use_on_demand_clusters=faker.pybool(), + dag_adjacency_list=published_project_user2.pipeline.dag_adjacency_list, + collection_run_id=collection_run_id_2, + ) + + # Test for user1 - should only see their own runs + total_count, items = await repo.list_group_by_collection_run_id( + product_name=run_metadata.get("product_name"), + user_id=user1["id"], + offset=0, + limit=10, + ) + + assert total_count == 1 + assert len(items) == 1 + collection_item = items[0] + assert len(collection_item.project_ids) == 1 + assert str(published_project_user1.project.uuid) in collection_item.project_ids + assert str(published_project_user2.project.uuid) not in collection_item.project_ids + + # Test for user2 - should only see their own runs + total_count, items = await repo.list_group_by_collection_run_id( + product_name=run_metadata.get("product_name"), + user_id=user2["id"], + offset=0, + limit=10, + ) + + assert total_count == 1 + assert len(items) == 1 + collection_item = items[0] + assert len(collection_item.project_ids) == 1 + assert str(published_project_user2.project.uuid) in collection_item.project_ids + assert str(published_project_user1.project.uuid) not in collection_item.project_ids + + +async def test_list_group_by_collection_run_id_state_priority_precedence( + sqlalchemy_async_engine: AsyncEngine, + run_metadata: RunMetadataDict, + faker: Faker, + publish_project: Callable[[], Awaitable[PublishedProject]], + fake_collection_run_id: CollectionRunID, + with_product: dict[str, Any], +): + """Test that state resolution follows correct priority: FAILED > ABORTED > UNKNOWN.""" + published_projects = [await publish_project() for _ in range(4)] + + collection_run_id = fake_collection_run_id + repo = CompRunsRepository(sqlalchemy_async_engine) + + # Create computation runs + comp_runs = [] + for project in published_projects: + comp_run = await repo.create( + user_id=published_projects[0].user["id"], + project_id=project.project.uuid, + iteration=None, + metadata=run_metadata, + use_on_demand_clusters=faker.pybool(), + dag_adjacency_list=project.pipeline.dag_adjacency_list, + collection_run_id=collection_run_id, + ) + comp_runs.append((project, comp_run)) + + # Set states: SUCCESS, UNKNOWN, ABORTED, FAILED - should return FAILED + states = [ + RunningState.SUCCESS, + RunningState.UNKNOWN, + RunningState.ABORTED, + RunningState.FAILED, + ] + for i, (project, comp_run) in enumerate(comp_runs): + await repo.set_run_result( + user_id=published_projects[0].user["id"], + project_id=project.project.uuid, + iteration=comp_run.iteration, + result_state=states[i], + final_state=True, + ) + + # Test the function + total_count, items = await repo.list_group_by_collection_run_id( + product_name=run_metadata.get("product_name"), + user_id=published_projects[0].user["id"], + offset=0, + limit=10, + ) + + # Assertions - FAILED should have highest priority + assert total_count == 1 + assert len(items) == 1 + collection_item = items[0] + assert collection_item.state == RunningState.FAILED + + +async def test_get_latest_run_by_project( + sqlalchemy_async_engine: AsyncEngine, + run_metadata: RunMetadataDict, + faker: Faker, + publish_project: Callable[[], Awaitable[PublishedProject]], + create_registered_user: Callable[..., dict[str, Any]], + with_product: dict[str, Any], +): + """Test that get() with user_id=None retrieves the latest run regardless of user""" + published_project = await publish_project() + + # Create a second user + second_user = create_registered_user() + + # Create comp runs for the original user + comp_run_user1_iter1 = await CompRunsRepository(sqlalchemy_async_engine).create( + user_id=published_project.user["id"], + project_id=published_project.project.uuid, + iteration=None, + metadata=run_metadata, + use_on_demand_clusters=faker.pybool(), + dag_adjacency_list=published_project.pipeline.dag_adjacency_list, + collection_run_id=CollectionRunID(faker.uuid4()), + ) + + # Create comp runs for the second user (this should increment iteration) + comp_run_user2_iter2 = await CompRunsRepository(sqlalchemy_async_engine).create( + user_id=second_user["id"], + project_id=published_project.project.uuid, + iteration=None, + metadata=run_metadata, + use_on_demand_clusters=faker.pybool(), + dag_adjacency_list=published_project.pipeline.dag_adjacency_list, + collection_run_id=CollectionRunID(faker.uuid4()), + ) + + # Create another run for the first user (should be iteration 3) + comp_run_user1_iter3 = await CompRunsRepository(sqlalchemy_async_engine).create( + user_id=published_project.user["id"], + project_id=published_project.project.uuid, + iteration=None, + metadata=run_metadata, + use_on_demand_clusters=faker.pybool(), + dag_adjacency_list=published_project.pipeline.dag_adjacency_list, + collection_run_id=CollectionRunID(faker.uuid4()), + ) + + # Verify iterations are correct + assert comp_run_user1_iter1.iteration == 1 + assert comp_run_user2_iter2.iteration == 1 + assert comp_run_user1_iter3.iteration == 2 + + # Test get with user_id=None should return the latest run (highest iteration) + latest_run = await CompRunsRepository( + sqlalchemy_async_engine + ).get_latest_run_by_project( + project_id=published_project.project.uuid, + ) + assert latest_run == comp_run_user1_iter3 + assert latest_run.iteration == 2 + + # Test get with specific user_id still works + user1_latest = await CompRunsRepository(sqlalchemy_async_engine).get( + user_id=published_project.user["id"], + project_id=published_project.project.uuid, + ) + assert user1_latest == comp_run_user1_iter3 + + user2_latest = await CompRunsRepository(sqlalchemy_async_engine).get( + user_id=second_user["id"], + project_id=published_project.project.uuid, + ) + assert user2_latest == comp_run_user2_iter2 diff --git a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_db_repositories_comp_runs_snapshot_tasks.py b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_db_repositories_comp_runs_snapshot_tasks.py new file mode 100644 index 000000000000..1696d6f2fd91 --- /dev/null +++ b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_db_repositories_comp_runs_snapshot_tasks.py @@ -0,0 +1,246 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments + + +from collections.abc import Awaitable, Callable +from typing import Any + +from _helpers import PublishedProject +from models_library.computations import CollectionRunID +from models_library.products import ProductName +from simcore_service_director_v2.models.comp_run_snapshot_tasks import ( + CompRunSnapshotTaskDBGet, +) +from simcore_service_director_v2.models.comp_runs import CompRunsAtDB +from simcore_service_director_v2.modules.db.repositories.comp_runs_snapshot_tasks import ( + CompRunsSnapshotTasksRepository, +) +from sqlalchemy.ext.asyncio import AsyncEngine + +pytest_simcore_core_services_selection = [ + "postgres", +] +pytest_simcore_ops_services_selection = [ + "adminer", +] + + +async def test_list_computation_collection_run_tasks( + sqlalchemy_async_engine: AsyncEngine, + publish_project: Callable[[], Awaitable[PublishedProject]], + create_comp_run: Callable[..., Awaitable[CompRunsAtDB]], + create_comp_run_snapshot_tasks: Callable[ + ..., Awaitable[list[CompRunSnapshotTaskDBGet]] + ], + osparc_product_name: ProductName, + fake_collection_run_id: CollectionRunID, + with_product: dict[str, Any], +): + repo = CompRunsSnapshotTasksRepository(db_engine=sqlalchemy_async_engine) + + # 1. create a project + published_project = await publish_project() + user_id = published_project.user["id"] + + # 2. create a comp_run + run = await create_comp_run( + published_project.user, + published_project.project, + dag_adjacency_list=published_project.pipeline.dag_adjacency_list, + collection_run_id=f"{fake_collection_run_id}", + ) + + # 3. create snapshot tasks for that run + snapshot_tasks = await create_comp_run_snapshot_tasks( + user=published_project.user, + project=published_project.project, + run_id=run.run_id, + ) + + # 4. list them + total_count, tasks = await repo.list_computation_collection_run_tasks( + product_name=osparc_product_name, + user_id=user_id, + collection_run_id=fake_collection_run_id, + ) + + assert total_count == len(snapshot_tasks) + assert tasks + assert len(tasks) == len(snapshot_tasks) + assert {t.snapshot_task_id for t in tasks} == { + t.snapshot_task_id for t in snapshot_tasks + } + + +async def test_list_computation_collection_run_tasks_empty( + sqlalchemy_async_engine: AsyncEngine, + osparc_product_name: ProductName, + fake_collection_run_id: CollectionRunID, + with_product: dict[str, Any], +): + repo = CompRunsSnapshotTasksRepository(db_engine=sqlalchemy_async_engine) + # Use a random user_id unlikely to have tasks + user_id = 999999 + total_count, tasks = await repo.list_computation_collection_run_tasks( + product_name=osparc_product_name, + user_id=user_id, + collection_run_id=fake_collection_run_id, + ) + assert total_count == 0 + assert tasks == [] + + +async def test_list_computation_collection_run_tasks_pagination( + sqlalchemy_async_engine: AsyncEngine, + publish_project: Callable[[], Awaitable[PublishedProject]], + create_comp_run: Callable[..., Awaitable[CompRunsAtDB]], + create_comp_run_snapshot_tasks: Callable[ + ..., Awaitable[list[CompRunSnapshotTaskDBGet]] + ], + osparc_product_name: ProductName, + fake_collection_run_id: CollectionRunID, + with_product: dict[str, Any], +): + repo = CompRunsSnapshotTasksRepository(db_engine=sqlalchemy_async_engine) + published_project = await publish_project() + user_id = published_project.user["id"] + run = await create_comp_run( + published_project.user, + published_project.project, + dag_adjacency_list=published_project.pipeline.dag_adjacency_list, + collection_run_id=f"{fake_collection_run_id}", + ) + snapshot_tasks = await create_comp_run_snapshot_tasks( + user=published_project.user, + project=published_project.project, + run_id=run.run_id, + ) + # Test pagination: limit=1 + total_count, tasks = await repo.list_computation_collection_run_tasks( + product_name=osparc_product_name, + user_id=user_id, + collection_run_id=fake_collection_run_id, + limit=1, + offset=0, + ) + assert total_count == len(snapshot_tasks) + assert len(tasks) == 1 + # Test pagination: offset=1 + _, tasks_offset = await repo.list_computation_collection_run_tasks( + product_name=osparc_product_name, + user_id=user_id, + collection_run_id=fake_collection_run_id, + limit=1, + offset=1, + ) + assert len(tasks_offset) == 1 or ( + len(snapshot_tasks) == 1 and len(tasks_offset) == 0 + ) + + +async def test_list_computation_collection_run_tasks_wrong_user( + sqlalchemy_async_engine: AsyncEngine, + publish_project: Callable[[], Awaitable[PublishedProject]], + create_comp_run: Callable[..., Awaitable[CompRunsAtDB]], + create_comp_run_snapshot_tasks: Callable[ + ..., Awaitable[list[CompRunSnapshotTaskDBGet]] + ], + osparc_product_name: ProductName, + fake_collection_run_id: CollectionRunID, + with_product: dict[str, Any], +): + repo = CompRunsSnapshotTasksRepository(db_engine=sqlalchemy_async_engine) + published_project = await publish_project() + run = await create_comp_run( + published_project.user, + published_project.project, + dag_adjacency_list=published_project.pipeline.dag_adjacency_list, + collection_run_id=f"{fake_collection_run_id}", + ) + await create_comp_run_snapshot_tasks( + user=published_project.user, + project=published_project.project, + run_id=run.run_id, + ) + # Use a different user_id + wrong_user_id = 123456789 + total_count, tasks = await repo.list_computation_collection_run_tasks( + product_name=osparc_product_name, + user_id=wrong_user_id, + collection_run_id=fake_collection_run_id, + ) + assert total_count == 0 + assert tasks == [] + + +async def test_list_computation_collection_run_tasks_multiple_comp_runs_same_collection( + sqlalchemy_async_engine: AsyncEngine, + publish_project: Callable[[], Awaitable[PublishedProject]], + create_comp_run: Callable[..., Awaitable[CompRunsAtDB]], + create_comp_run_snapshot_tasks: Callable[ + ..., Awaitable[list[CompRunSnapshotTaskDBGet]] + ], + osparc_product_name: ProductName, + fake_collection_run_id: CollectionRunID, + with_product: dict[str, Any], +): + repo = CompRunsSnapshotTasksRepository(db_engine=sqlalchemy_async_engine) + published_project1 = await publish_project() + published_project2 = await publish_project() + published_project3 = await publish_project() + user_id = published_project1.user["id"] + + # Create 3 comp_runs, 2 with the same collection_run_id, 1 with a different one + run1 = await create_comp_run( + published_project1.user, + published_project1.project, + dag_adjacency_list=published_project1.pipeline.dag_adjacency_list, + collection_run_id=f"{fake_collection_run_id}", + ) + run2 = await create_comp_run( + published_project2.user, + published_project2.project, + dag_adjacency_list=published_project2.pipeline.dag_adjacency_list, + collection_run_id=f"{fake_collection_run_id}", + ) + other_collection_run_id = CollectionRunID("00000000-0000-0000-0000-000000000001") + run3 = await create_comp_run( + published_project3.user, + published_project3.project, + dag_adjacency_list=published_project3.pipeline.dag_adjacency_list, + collection_run_id=f"{other_collection_run_id}", + ) + + # Create snapshot tasks for each run + tasks_run1 = await create_comp_run_snapshot_tasks( + user=published_project1.user, + project=published_project1.project, + run_id=run1.run_id, + ) + tasks_run2 = await create_comp_run_snapshot_tasks( + user=published_project2.user, + project=published_project2.project, + run_id=run2.run_id, + ) + tasks_run3 = await create_comp_run_snapshot_tasks( + user=published_project3.user, + project=published_project3.project, + run_id=run3.run_id, + ) + + # Query for tasks with the shared collection_run_id + total_count, tasks = await repo.list_computation_collection_run_tasks( + product_name=osparc_product_name, + user_id=user_id, + collection_run_id=fake_collection_run_id, + ) + expected_task_ids = {t.snapshot_task_id for t in tasks_run1 + tasks_run2} + actual_task_ids = {t.snapshot_task_id for t in tasks} + assert total_count == len(expected_task_ids) + assert actual_task_ids == expected_task_ids + # Ensure tasks from run3 are not included + assert not any( + t.snapshot_task_id in {tt.snapshot_task_id for tt in tasks_run3} for t in tasks + ) diff --git a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_manager.py b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_manager.py index f38b33024318..8e3351317756 100644 --- a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_manager.py +++ b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_manager.py @@ -9,6 +9,7 @@ import asyncio +import contextlib import datetime import logging from collections.abc import AsyncIterator, Awaitable, Callable @@ -18,6 +19,7 @@ import pytest from _helpers import PublishedProject, assert_comp_runs, assert_comp_runs_empty from fastapi import FastAPI +from models_library.computations import CollectionRunID from models_library.projects import ProjectAtDB from models_library.projects_state import RunningState from pytest_mock.plugin import MockerFixture @@ -41,6 +43,14 @@ CompRunsRepository, ) from sqlalchemy.ext.asyncio import AsyncEngine +from tenacity import ( + RetryError, + retry, + retry_if_exception_type, + retry_unless_exception_type, + stop_after_delay, + wait_fixed, +) pytest_simcore_core_services_selection = ["postgres", "rabbit", "redis"] pytest_simcore_ops_services_selection = ["adminer", "redis-commander"] @@ -82,6 +92,7 @@ async def test_manager_starts_and_auto_schedules_pipelines( mocked_schedule_all_pipelines: mock.Mock, initialized_app: FastAPI, sqlalchemy_async_engine: AsyncEngine, + with_product: dict[str, Any], ): await assert_comp_runs_empty(sqlalchemy_async_engine) mocked_schedule_all_pipelines.assert_called() @@ -93,13 +104,14 @@ async def test_schedule_all_pipelines_empty_db( initialized_app: FastAPI, scheduler_rabbit_client_parser: mock.AsyncMock, sqlalchemy_async_engine: AsyncEngine, + with_product: dict[str, Any], ): await assert_comp_runs_empty(sqlalchemy_async_engine) await schedule_all_pipelines(initialized_app) # check nothing was distributed - scheduler_rabbit_client_parser.assert_not_called() + _assert_scheduler_client_not_called(scheduler_rabbit_client_parser) # check comp_runs is still empty await assert_comp_runs_empty(sqlalchemy_async_engine) @@ -109,6 +121,7 @@ async def test_schedule_all_pipelines_concurently_runs_exclusively_and_raises( with_disabled_auto_scheduling: mock.Mock, initialized_app: FastAPI, mocker: MockerFixture, + with_product: dict[str, Any], ): CONCURRENCY = 5 # NOTE: this ensure no flakyness as empty scheduling is very fast @@ -138,6 +151,35 @@ async def slow_limited_gather(*args, **kwargs): mock_function.assert_called_once() +@retry( + retry=retry_if_exception_type(AssertionError), + stop=stop_after_delay(3), + wait=wait_fixed(0.5), + reraise=True, +) +def _assert_scheduler_client_called_once_with( + scheduler_rabbit_client_parser: mock.AsyncMock, + expected_message: SchedulePipelineRabbitMessage, +): + scheduler_rabbit_client_parser.assert_called_once_with(expected_message.body()) + + +def _assert_scheduler_client_not_called( + scheduler_rabbit_client_parser: mock.AsyncMock, +): + @retry( + retry=retry_unless_exception_type(AssertionError), + stop=stop_after_delay(3), + wait=wait_fixed(1), + reraise=True, + ) # pylint: disable=unused-variable + def _(): + scheduler_rabbit_client_parser.assert_not_called() + + with contextlib.suppress(RetryError): + _() + + async def test_schedule_all_pipelines( with_disabled_auto_scheduling: mock.Mock, with_disabled_scheduler_worker: mock.Mock, @@ -146,6 +188,7 @@ async def test_schedule_all_pipelines( sqlalchemy_async_engine: AsyncEngine, run_metadata: RunMetadataDict, scheduler_rabbit_client_parser: mock.AsyncMock, + fake_collection_run_id: CollectionRunID, ): await assert_comp_runs_empty(sqlalchemy_async_engine) assert published_project.project.prj_owner @@ -156,14 +199,16 @@ async def test_schedule_all_pipelines( project_id=published_project.project.uuid, run_metadata=run_metadata, use_on_demand_clusters=False, + collection_run_id=fake_collection_run_id, ) # this directly schedule a new pipeline - scheduler_rabbit_client_parser.assert_called_once_with( + _assert_scheduler_client_called_once_with( + scheduler_rabbit_client_parser, SchedulePipelineRabbitMessage( user_id=published_project.project.prj_owner, project_id=published_project.project.uuid, iteration=1, - ).body() + ), ) scheduler_rabbit_client_parser.reset_mock() comp_run = (await assert_comp_runs(sqlalchemy_async_engine, expected_total=1))[0] @@ -180,7 +225,7 @@ async def test_schedule_all_pipelines( # this will now not schedule the pipeline since it was already scheduled await schedule_all_pipelines(initialized_app) - scheduler_rabbit_client_parser.assert_not_called() + _assert_scheduler_client_not_called(scheduler_rabbit_client_parser) comp_runs = await assert_comp_runs(sqlalchemy_async_engine, expected_total=1) comp_run = comp_runs[0] assert comp_run.scheduled @@ -200,12 +245,13 @@ async def test_schedule_all_pipelines( # now we schedule a pipeline again, but we wait for the scheduler interval to pass # this will trigger a new schedule await schedule_all_pipelines(initialized_app) - scheduler_rabbit_client_parser.assert_called_once_with( + _assert_scheduler_client_called_once_with( + scheduler_rabbit_client_parser, SchedulePipelineRabbitMessage( user_id=published_project.project.prj_owner, project_id=published_project.project.uuid, iteration=1, - ).body() + ), ) scheduler_rabbit_client_parser.reset_mock() comp_runs = await assert_comp_runs(sqlalchemy_async_engine, expected_total=1) @@ -223,12 +269,13 @@ async def test_schedule_all_pipelines( project_id=published_project.project.uuid, ) await schedule_all_pipelines(initialized_app) - scheduler_rabbit_client_parser.assert_called_once_with( + _assert_scheduler_client_called_once_with( + scheduler_rabbit_client_parser, SchedulePipelineRabbitMessage( user_id=published_project.project.prj_owner, project_id=published_project.project.uuid, iteration=1, - ).body() + ), ) scheduler_rabbit_client_parser.reset_mock() comp_runs = await assert_comp_runs(sqlalchemy_async_engine, expected_total=1) @@ -247,6 +294,7 @@ async def test_schedule_all_pipelines_logs_error_if_it_find_old_pipelines( run_metadata: RunMetadataDict, scheduler_rabbit_client_parser: mock.AsyncMock, caplog: pytest.LogCaptureFixture, + fake_collection_run_id: CollectionRunID, ): await assert_comp_runs_empty(sqlalchemy_async_engine) assert published_project.project.prj_owner @@ -257,14 +305,16 @@ async def test_schedule_all_pipelines_logs_error_if_it_find_old_pipelines( project_id=published_project.project.uuid, run_metadata=run_metadata, use_on_demand_clusters=False, + collection_run_id=fake_collection_run_id, ) # this directly schedule a new pipeline - scheduler_rabbit_client_parser.assert_called_once_with( + _assert_scheduler_client_called_once_with( + scheduler_rabbit_client_parser, SchedulePipelineRabbitMessage( user_id=published_project.project.prj_owner, project_id=published_project.project.uuid, iteration=1, - ).body() + ), ) scheduler_rabbit_client_parser.reset_mock() comp_run = (await assert_comp_runs(sqlalchemy_async_engine, expected_total=1))[0] @@ -280,7 +330,7 @@ async def test_schedule_all_pipelines_logs_error_if_it_find_old_pipelines( # this will now not schedule the pipeline since it was already scheduled await schedule_all_pipelines(initialized_app) - scheduler_rabbit_client_parser.assert_not_called() + _assert_scheduler_client_not_called(scheduler_rabbit_client_parser) comp_runs = await assert_comp_runs(sqlalchemy_async_engine, expected_total=1) comp_run = comp_runs[0] assert comp_run.scheduled == start_schedule_time, "scheduled time changed!" @@ -297,15 +347,21 @@ async def test_schedule_all_pipelines_logs_error_if_it_find_old_pipelines( ) with caplog.at_level(logging.ERROR): await schedule_all_pipelines(initialized_app) + lost_pipeline_messages = [ + msg + for msg in caplog.messages + if "lost pipelines" in msg and "re-scheduled" in msg + ] assert ( - "found 1 lost pipelines, they will be re-scheduled now" in caplog.messages - ) - scheduler_rabbit_client_parser.assert_called_once_with( + len(lost_pipeline_messages) > 0 + ), f"Expected lost pipeline message, got: {caplog.messages}" + _assert_scheduler_client_called_once_with( + scheduler_rabbit_client_parser, SchedulePipelineRabbitMessage( user_id=published_project.project.prj_owner, project_id=published_project.project.uuid, iteration=1, - ).body() + ), ) scheduler_rabbit_client_parser.reset_mock() comp_runs = await assert_comp_runs(sqlalchemy_async_engine, expected_total=1) @@ -320,17 +376,19 @@ async def test_empty_pipeline_is_not_scheduled( with_disabled_auto_scheduling: mock.Mock, with_disabled_scheduler_worker: mock.Mock, initialized_app: FastAPI, + with_product: dict[str, Any], create_registered_user: Callable[..., dict[str, Any]], - project: Callable[..., Awaitable[ProjectAtDB]], + create_project: Callable[..., Awaitable[ProjectAtDB]], create_pipeline: Callable[..., Awaitable[CompPipelineAtDB]], run_metadata: RunMetadataDict, sqlalchemy_async_engine: AsyncEngine, scheduler_rabbit_client_parser: mock.AsyncMock, caplog: pytest.LogCaptureFixture, + fake_collection_run_id: CollectionRunID, ): await assert_comp_runs_empty(sqlalchemy_async_engine) user = create_registered_user() - empty_project = await project(user) + empty_project = await create_project(user) # the project is not in the comp_pipeline, therefore scheduling it should fail with pytest.raises(PipelineNotFoundError): @@ -340,9 +398,10 @@ async def test_empty_pipeline_is_not_scheduled( project_id=empty_project.uuid, run_metadata=run_metadata, use_on_demand_clusters=False, + collection_run_id=fake_collection_run_id, ) await assert_comp_runs_empty(sqlalchemy_async_engine) - scheduler_rabbit_client_parser.assert_not_called() + _assert_scheduler_client_not_called(scheduler_rabbit_client_parser) # create the empty pipeline now await create_pipeline(project_id=f"{empty_project.uuid}") @@ -355,8 +414,9 @@ async def test_empty_pipeline_is_not_scheduled( project_id=empty_project.uuid, run_metadata=run_metadata, use_on_demand_clusters=False, + collection_run_id=fake_collection_run_id, ) assert len(caplog.records) == 1 assert "no computational dag defined" in caplog.records[0].message await assert_comp_runs_empty(sqlalchemy_async_engine) - scheduler_rabbit_client_parser.assert_not_called() + _assert_scheduler_client_not_called(scheduler_rabbit_client_parser) diff --git a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_scheduler_dask.py b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_scheduler_dask.py index c4d360adba27..aa04cf322ff4 100644 --- a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_scheduler_dask.py +++ b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_scheduler_dask.py @@ -5,11 +5,13 @@ # pylint:disable=protected-access # pylint:disable=too-many-arguments # pylint:disable=no-name-in-module -# pylint: disable=too-many-statements +# pylint:disable=too-many-positional-arguments +# pylint:disable=too-many-statements import asyncio import datetime +import random from collections.abc import AsyncIterator, Awaitable, Callable from copy import deepcopy from dataclasses import dataclass @@ -25,16 +27,22 @@ assert_comp_runs_empty, assert_comp_tasks_and_comp_run_snapshot_tasks, ) -from dask_task_models_library.container_tasks.errors import TaskCancelledError +from dask_task_models_library.container_tasks.errors import ( + ServiceRuntimeError, + TaskCancelledError, +) from dask_task_models_library.container_tasks.events import TaskProgressEvent from dask_task_models_library.container_tasks.io import TaskOutputData from dask_task_models_library.container_tasks.protocol import TaskOwner from faker import Faker from fastapi.applications import FastAPI +from models_library.computations import CollectionRunID +from models_library.errors import ErrorDict from models_library.projects import ProjectAtDB, ProjectID from models_library.projects_nodes_io import NodeID from models_library.projects_state import RunningState from models_library.rabbitmq_messages import ( + ComputationalPipelineStatusMessage, InstrumentationRabbitMessage, RabbitResourceTrackingBaseMessage, RabbitResourceTrackingHeartbeatMessage, @@ -46,6 +54,7 @@ from pydantic import TypeAdapter from pytest_mock.plugin import MockerFixture from servicelib.rabbitmq import RabbitMQClient +from servicelib.rabbitmq._constants import BIND_TO_ALL_TOPICS from simcore_postgres_database.models.comp_runs import comp_runs from simcore_postgres_database.models.comp_tasks import NodeClass from simcore_service_director_v2.core.errors import ( @@ -68,6 +77,8 @@ BaseCompScheduler, ) from simcore_service_director_v2.modules.comp_scheduler._scheduler_dask import ( + _TASK_RETRIEVAL_ERROR_CONTEXT_TIME_KEY, + _TASK_RETRIEVAL_ERROR_TYPE, DaskScheduler, ) from simcore_service_director_v2.modules.comp_scheduler._utils import COMPLETED_STATES @@ -160,6 +171,8 @@ async def _assert_start_pipeline( sqlalchemy_async_engine: AsyncEngine, published_project: PublishedProject, run_metadata: RunMetadataDict, + computational_pipeline_rabbit_client_parser: mock.AsyncMock, + collection_run_id: CollectionRunID, ) -> tuple[CompRunsAtDB, list[CompTaskAtDB]]: exp_published_tasks = deepcopy(published_project.tasks) assert published_project.project.prj_owner @@ -169,6 +182,7 @@ async def _assert_start_pipeline( project_id=published_project.project.uuid, run_metadata=run_metadata, use_on_demand_clusters=False, + collection_run_id=collection_run_id, ) # check the database is correctly updated, the run is published @@ -181,6 +195,11 @@ async def _assert_start_pipeline( comp_runs.c.project_uuid == f"{published_project.project.uuid}", ), ) + await _assert_message_received( + computational_pipeline_rabbit_client_parser, + 1, + ComputationalPipelineStatusMessage.model_validate_json, + ) await assert_comp_tasks_and_comp_run_snapshot_tasks( sqlalchemy_async_engine, project_uuid=published_project.project.uuid, @@ -198,6 +217,7 @@ async def _assert_publish_in_dask_backend( published_tasks: list[CompTaskAtDB], mocked_dask_client: mock.MagicMock, scheduler: BaseCompScheduler, + computational_pipeline_rabbit_client_parser: mock.AsyncMock, ) -> tuple[list[CompTaskAtDB], dict[NodeID, Callable[[], None]]]: expected_pending_tasks = [ published_tasks[1], @@ -285,6 +305,11 @@ async def _return_tasks_pending(job_ids: list[str]) -> list[RunningState]: where_statement=(comp_runs.c.user_id == published_project.project.prj_owner) & (comp_runs.c.project_uuid == f"{published_project.project.uuid}"), ) + await _assert_message_received( + computational_pipeline_rabbit_client_parser, + 1, + ComputationalPipelineStatusMessage.model_validate_json, + ) await assert_comp_tasks_and_comp_run_snapshot_tasks( sqlalchemy_async_engine, project_uuid=published_project.project.uuid, @@ -336,11 +361,42 @@ async def resource_tracking_rabbit_client_parser( await client.unsubscribe(queue_name) +@pytest.fixture +async def computational_pipeline_rabbit_client_parser( + create_rabbitmq_client: Callable[[str], RabbitMQClient], mocker: MockerFixture +) -> AsyncIterator[mock.AsyncMock]: + client = create_rabbitmq_client("computational_pipeline_pytest_consumer") + mock = mocker.AsyncMock(return_value=True) + queue_name, _ = await client.subscribe( + ComputationalPipelineStatusMessage.get_channel_name(), + mock, + topics=[BIND_TO_ALL_TOPICS], + ) + yield mock + await client.unsubscribe(queue_name) + + async def _assert_message_received( mocked_message_parser: mock.AsyncMock, expected_call_count: int, message_parser: Callable, ) -> list: + if expected_call_count == 0: + # ensure it remains so for a few seconds + mocked_message_parser.assert_not_called() + async for attempt in AsyncRetrying( + wait=wait_fixed(1), + stop=stop_after_delay(3), + retry=retry_if_exception_type(AssertionError), + reraise=True, + ): + with attempt: + print( + f"--> waiting for rabbitmq message [{attempt.retry_state.attempt_number}, {attempt.retry_state.idle_for}]" + ) + mocked_message_parser.assert_not_called() + + return [] async for attempt in AsyncRetrying( wait=wait_fixed(0.1), stop=stop_after_delay(5), @@ -351,7 +407,9 @@ async def _assert_message_received( print( f"--> waiting for rabbitmq message [{attempt.retry_state.attempt_number}, {attempt.retry_state.idle_for}]" ) - assert mocked_message_parser.call_count == expected_call_count + assert ( + mocked_message_parser.call_count == expected_call_count + ), mocked_message_parser.call_args_list print( f"<-- rabbitmq message received after [{attempt.retry_state.attempt_number}, {attempt.retry_state.idle_for}]" ) @@ -422,7 +480,9 @@ async def test_proper_pipeline_is_scheduled( # noqa: PLR0915 mocked_clean_task_output_and_log_files_if_invalid: mock.Mock, instrumentation_rabbit_client_parser: mock.AsyncMock, resource_tracking_rabbit_client_parser: mock.AsyncMock, + computational_pipeline_rabbit_client_parser: mock.AsyncMock, run_metadata: RunMetadataDict, + fake_collection_run_id: CollectionRunID, ): with_disabled_auto_scheduling.assert_called_once() _with_mock_send_computation_tasks(published_project.tasks, mocked_dask_client) @@ -435,6 +495,8 @@ async def test_proper_pipeline_is_scheduled( # noqa: PLR0915 sqlalchemy_async_engine=sqlalchemy_async_engine, published_project=published_project, run_metadata=run_metadata, + computational_pipeline_rabbit_client_parser=computational_pipeline_rabbit_client_parser, + collection_run_id=fake_collection_run_id, ) with_disabled_scheduler_publisher.assert_called() @@ -446,6 +508,7 @@ async def test_proper_pipeline_is_scheduled( # noqa: PLR0915 expected_published_tasks, mocked_dask_client, scheduler_api, + computational_pipeline_rabbit_client_parser, ) # ------------------------------------------------------------------------------- @@ -478,6 +541,11 @@ async def _return_1st_task_running(job_ids: list[str]) -> list[RunningState]: comp_runs.c.project_uuid == f"{published_project.project.uuid}", ), ) + await _assert_message_received( + computational_pipeline_rabbit_client_parser, + 1, + ComputationalPipelineStatusMessage.model_validate_json, + ) await assert_comp_tasks_and_comp_run_snapshot_tasks( sqlalchemy_async_engine, project_uuid=published_project.project.uuid, @@ -773,7 +841,13 @@ async def _return_2nd_task_failed(job_ids: list[str]) -> list[RunningState]: ] mocked_dask_client.get_tasks_status.side_effect = _return_2nd_task_failed - mocked_dask_client.get_task_result.side_effect = None + mocked_dask_client.get_task_result.side_effect = ServiceRuntimeError( + service_key="simcore/services/dynamic/some-service", + service_version="1.0.0", + container_id="some-container-id", + exit_code=1, + service_logs="simulated error", + ) await scheduler_api.apply( user_id=run_in_db.user_id, project_id=run_in_db.project_uuid, @@ -854,6 +928,11 @@ async def _return_3rd_task_success(job_ids: list[str]) -> list[RunningState]: comp_runs.c.project_uuid == f"{published_project.project.uuid}", ), ) + await _assert_message_received( + computational_pipeline_rabbit_client_parser, + 1, + ComputationalPipelineStatusMessage.model_validate_json, + ) await assert_comp_tasks_and_comp_run_snapshot_tasks( sqlalchemy_async_engine, @@ -903,6 +982,9 @@ async def with_started_project( scheduler_api: BaseCompScheduler, instrumentation_rabbit_client_parser: mock.AsyncMock, resource_tracking_rabbit_client_parser: mock.AsyncMock, + computational_pipeline_rabbit_client_parser: mock.AsyncMock, + fake_collection_run_id: CollectionRunID, + with_product: dict[str, Any], ) -> RunningProject: with_disabled_auto_scheduling.assert_called_once() published_project = await publish_project() @@ -914,6 +996,8 @@ async def with_started_project( sqlalchemy_async_engine=sqlalchemy_async_engine, published_project=published_project, run_metadata=run_metadata, + computational_pipeline_rabbit_client_parser=computational_pipeline_rabbit_client_parser, + collection_run_id=fake_collection_run_id, ) with_disabled_scheduler_publisher.assert_called_once() @@ -929,6 +1013,7 @@ async def with_started_project( expected_published_tasks, mocked_dask_client, scheduler_api, + computational_pipeline_rabbit_client_parser, ) # @@ -966,6 +1051,11 @@ async def _return_1st_task_running(job_ids: list[str]) -> list[RunningState]: comp_runs.c.project_uuid == f"{published_project.project.uuid}", ), ) + await _assert_message_received( + computational_pipeline_rabbit_client_parser, + 1, + ComputationalPipelineStatusMessage.model_validate_json, + ) await assert_comp_tasks_and_comp_run_snapshot_tasks( sqlalchemy_async_engine, project_uuid=published_project.project.uuid, @@ -1134,17 +1224,22 @@ async def test_broken_pipeline_configuration_is_not_scheduled_and_aborted( initialized_app: FastAPI, scheduler_api: BaseCompScheduler, create_registered_user: Callable[..., dict[str, Any]], - project: Callable[..., Awaitable[ProjectAtDB]], + create_project: Callable[..., Awaitable[ProjectAtDB]], create_pipeline: Callable[..., Awaitable[CompPipelineAtDB]], fake_workbench_without_outputs: dict[str, Any], fake_workbench_adjacency: dict[str, Any], sqlalchemy_async_engine: AsyncEngine, run_metadata: RunMetadataDict, + computational_pipeline_rabbit_client_parser: mock.AsyncMock, + fake_collection_run_id: CollectionRunID, + with_product: dict[str, Any], ): """A pipeline which comp_tasks are missing should not be scheduled. It shall be aborted and shown as such in the comp_runs db""" user = create_registered_user() - sleepers_project = await project(user, workbench=fake_workbench_without_outputs) + sleepers_project = await create_project( + user, workbench=fake_workbench_without_outputs + ) await create_pipeline( project_id=f"{sleepers_project.uuid}", dag_adjacency_list=fake_workbench_adjacency, @@ -1160,6 +1255,7 @@ async def test_broken_pipeline_configuration_is_not_scheduled_and_aborted( project_id=sleepers_project.uuid, run_metadata=run_metadata, use_on_demand_clusters=False, + collection_run_id=fake_collection_run_id, ) with_disabled_scheduler_publisher.assert_called_once() # we shall have a a new comp_runs row with the new pipeline job @@ -1172,6 +1268,11 @@ async def test_broken_pipeline_configuration_is_not_scheduled_and_aborted( & (comp_runs.c.project_uuid == f"{sleepers_project.uuid}"), ) )[0] + await _assert_message_received( + computational_pipeline_rabbit_client_parser, + 1, + ComputationalPipelineStatusMessage.model_validate_json, + ) # # Trigger scheduling manually. since the pipeline is broken, it shall be aborted @@ -1184,10 +1285,15 @@ async def test_broken_pipeline_configuration_is_not_scheduled_and_aborted( await assert_comp_runs( sqlalchemy_async_engine, expected_total=1, - expected_state=RunningState.ABORTED, + expected_state=RunningState.FAILED, where_statement=(comp_runs.c.user_id == user["id"]) & (comp_runs.c.project_uuid == f"{sleepers_project.uuid}"), ) + await _assert_message_received( + computational_pipeline_rabbit_client_parser, + 1, + ComputationalPipelineStatusMessage.model_validate_json, + ) async def test_task_progress_triggers( @@ -1201,6 +1307,8 @@ async def test_task_progress_triggers( mocked_parse_output_data_fct: mock.Mock, mocked_clean_task_output_and_log_files_if_invalid: mock.Mock, run_metadata: RunMetadataDict, + computational_pipeline_rabbit_client_parser: mock.AsyncMock, + fake_collection_run_id: CollectionRunID, ): _with_mock_send_computation_tasks(published_project.tasks, mocked_dask_client) _run_in_db, expected_published_tasks = await _assert_start_pipeline( @@ -1208,7 +1316,10 @@ async def test_task_progress_triggers( sqlalchemy_async_engine=sqlalchemy_async_engine, published_project=published_project, run_metadata=run_metadata, + computational_pipeline_rabbit_client_parser=computational_pipeline_rabbit_client_parser, + collection_run_id=fake_collection_run_id, ) + # ------------------------------------------------------------------------------- # 1. first run will move comp_tasks to PENDING so the dask-worker can take them expected_pending_tasks, _ = await _assert_publish_in_dask_backend( @@ -1217,6 +1328,7 @@ async def test_task_progress_triggers( expected_published_tasks, mocked_dask_client, scheduler_api, + computational_pipeline_rabbit_client_parser, ) # send some progress @@ -1272,6 +1384,8 @@ async def test_handling_of_disconnected_scheduler_dask( published_project: PublishedProject, backend_error: ComputationalSchedulerError, run_metadata: RunMetadataDict, + computational_pipeline_rabbit_client_parser: mock.AsyncMock, + fake_collection_run_id: CollectionRunID, ): # this will create a non connected backend issue that will trigger re-connection mocked_dask_client_send_task = mocker.patch( @@ -1288,6 +1402,12 @@ async def test_handling_of_disconnected_scheduler_dask( project_id=published_project.project.uuid, run_metadata=run_metadata, use_on_demand_clusters=False, + collection_run_id=fake_collection_run_id, + ) + await _assert_message_received( + computational_pipeline_rabbit_client_parser, + 1, + ComputationalPipelineStatusMessage.model_validate_json, ) # since there is no cluster, there is no dask-scheduler, @@ -1337,6 +1457,11 @@ async def test_handling_of_disconnected_scheduler_dask( expected_progress=1, run_id=run_in_db.run_id, ) + await _assert_message_received( + computational_pipeline_rabbit_client_parser, + 1, + ComputationalPipelineStatusMessage.model_validate_json, + ) # then we have another scheduler run await scheduler_api.apply( user_id=run_in_db.user_id, @@ -1372,6 +1497,7 @@ class RebootState: expected_task_state_group2: RunningState expected_task_progress_group2: float expected_run_state: RunningState + expected_pipeline_state_notification: int @pytest.mark.parametrize( @@ -1386,6 +1512,7 @@ class RebootState: expected_task_state_group2=RunningState.ABORTED, expected_task_progress_group2=1, expected_run_state=RunningState.FAILED, + expected_pipeline_state_notification=1, ), id="reboot with lost tasks", ), @@ -1398,6 +1525,7 @@ class RebootState: expected_task_state_group2=RunningState.ABORTED, expected_task_progress_group2=1, expected_run_state=RunningState.ABORTED, + expected_pipeline_state_notification=1, ), id="reboot with aborted tasks", ), @@ -1410,6 +1538,7 @@ class RebootState: expected_task_state_group2=RunningState.ABORTED, expected_task_progress_group2=1, expected_run_state=RunningState.FAILED, + expected_pipeline_state_notification=1, ), id="reboot with failed tasks", ), @@ -1424,6 +1553,7 @@ class RebootState: expected_task_state_group2=RunningState.STARTED, expected_task_progress_group2=0, expected_run_state=RunningState.STARTED, + expected_pipeline_state_notification=0, ), id="reboot with running tasks", ), @@ -1436,6 +1566,7 @@ class RebootState: expected_task_state_group2=RunningState.SUCCESS, expected_task_progress_group2=1, expected_run_state=RunningState.SUCCESS, + expected_pipeline_state_notification=1, ), id="reboot with completed tasks", ), @@ -1452,6 +1583,7 @@ async def test_handling_scheduled_tasks_after_director_reboots( mocked_parse_output_data_fct: mock.Mock, mocked_clean_task_output_fct: mock.Mock, reboot_state: RebootState, + computational_pipeline_rabbit_client_parser: mock.AsyncMock, ): """After the dask client is rebooted, or that the director-v2 reboots the dv-2 internal scheduler shall continue scheduling correctly. Even though the task might have continued to run @@ -1534,6 +1666,11 @@ async def mocked_get_task_result(_job_id: str) -> TaskOutputData: comp_runs.c.project_uuid == f"{running_project.project.uuid}", ), ) + await _assert_message_received( + computational_pipeline_rabbit_client_parser, + reboot_state.expected_pipeline_state_notification, + ComputationalPipelineStatusMessage.model_validate_json, + ) async def test_handling_cancellation_of_jobs_after_reboot( @@ -1545,6 +1682,7 @@ async def test_handling_cancellation_of_jobs_after_reboot( scheduler_api: BaseCompScheduler, mocked_parse_output_data_fct: mock.Mock, mocked_clean_task_output_fct: mock.Mock, + computational_pipeline_rabbit_client_parser: mock.AsyncMock, ): """A running pipeline was cancelled by a user and the DV-2 was restarted BEFORE It could actually cancel the task. On reboot the DV-2 shall recover @@ -1564,6 +1702,11 @@ async def test_handling_cancellation_of_jobs_after_reboot( ), ) )[0] + await _assert_message_received( + computational_pipeline_rabbit_client_parser, + 0, + ComputationalPipelineStatusMessage.model_validate_json, + ) await assert_comp_tasks_and_comp_run_snapshot_tasks( sqlalchemy_async_engine, @@ -1660,6 +1803,11 @@ async def _return_random_task_result(job_id) -> TaskOutputData: ), ) mocked_clean_task_output_fct.assert_called() + await _assert_message_received( + computational_pipeline_rabbit_client_parser, + 1, + ComputationalPipelineStatusMessage.model_validate_json, + ) @pytest.fixture @@ -1682,6 +1830,8 @@ async def test_running_pipeline_triggers_heartbeat( published_project: PublishedProject, resource_tracking_rabbit_client_parser: mock.AsyncMock, run_metadata: RunMetadataDict, + computational_pipeline_rabbit_client_parser: mock.AsyncMock, + fake_collection_run_id: CollectionRunID, ): _with_mock_send_computation_tasks(published_project.tasks, mocked_dask_client) run_in_db, expected_published_tasks = await _assert_start_pipeline( @@ -1689,6 +1839,8 @@ async def test_running_pipeline_triggers_heartbeat( sqlalchemy_async_engine=sqlalchemy_async_engine, published_project=published_project, run_metadata=run_metadata, + computational_pipeline_rabbit_client_parser=computational_pipeline_rabbit_client_parser, + collection_run_id=fake_collection_run_id, ) # ------------------------------------------------------------------------------- # 1. first run will move comp_tasks to PENDING so the dask-worker can take them @@ -1698,6 +1850,7 @@ async def test_running_pipeline_triggers_heartbeat( expected_published_tasks, mocked_dask_client, scheduler_api, + computational_pipeline_rabbit_client_parser, ) # ------------------------------------------------------------------------------- # 2. the "worker" starts processing a task @@ -1796,6 +1949,8 @@ async def test_pipeline_with_on_demand_cluster_with_not_ready_backend_waits( run_metadata: RunMetadataDict, mocked_get_or_create_cluster: mock.Mock, faker: Faker, + computational_pipeline_rabbit_client_parser: mock.AsyncMock, + fake_collection_run_id: CollectionRunID, ): mocked_get_or_create_cluster.side_effect = ( ComputationalBackendOnDemandNotReadyError( @@ -1810,6 +1965,7 @@ async def test_pipeline_with_on_demand_cluster_with_not_ready_backend_waits( project_id=published_project.project.uuid, run_metadata=run_metadata, use_on_demand_clusters=True, + collection_run_id=fake_collection_run_id, ) # we ask to use an on-demand cluster, therefore the tasks are published first @@ -1824,6 +1980,11 @@ async def test_pipeline_with_on_demand_cluster_with_not_ready_backend_waits( ), ) )[0] + await _assert_message_received( + computational_pipeline_rabbit_client_parser, + 1, + ComputationalPipelineStatusMessage.model_validate_json, + ) await assert_comp_tasks_and_comp_run_snapshot_tasks( sqlalchemy_async_engine, project_uuid=published_project.project.uuid, @@ -1855,6 +2016,11 @@ async def test_pipeline_with_on_demand_cluster_with_not_ready_backend_waits( comp_runs.c.project_uuid == f"{published_project.project.uuid}", ), ) + await _assert_message_received( + computational_pipeline_rabbit_client_parser, + 1, + ComputationalPipelineStatusMessage.model_validate_json, + ) await assert_comp_tasks_and_comp_run_snapshot_tasks( sqlalchemy_async_engine, project_uuid=published_project.project.uuid, @@ -1895,9 +2061,10 @@ async def test_pipeline_with_on_demand_cluster_with_not_ready_backend_waits( "get_or_create_exception", [ClustersKeeperNotAvailableError], ) -async def test_pipeline_with_on_demand_cluster_with_no_clusters_keeper_fails( +async def test_pipeline_with_on_demand_cluster_with_no_clusters_keeper_waits_and_eventually_timesout_fails( with_disabled_auto_scheduling: mock.Mock, with_disabled_scheduler_publisher: mock.Mock, + with_short_max_wait_for_cluster: datetime.timedelta, initialized_app: FastAPI, scheduler_api: BaseCompScheduler, sqlalchemy_async_engine: AsyncEngine, @@ -1905,9 +2072,9 @@ async def test_pipeline_with_on_demand_cluster_with_no_clusters_keeper_fails( run_metadata: RunMetadataDict, mocked_get_or_create_cluster: mock.Mock, get_or_create_exception: Exception, + computational_pipeline_rabbit_client_parser: mock.AsyncMock, + fake_collection_run_id: CollectionRunID, ): - # needs to change: https://github.com/ITISFoundation/osparc-simcore/issues/6817 - mocked_get_or_create_cluster.side_effect = get_or_create_exception # running the pipeline will trigger a call to the clusters-keeper assert published_project.project.prj_owner @@ -1917,6 +2084,7 @@ async def test_pipeline_with_on_demand_cluster_with_no_clusters_keeper_fails( project_id=published_project.project.uuid, run_metadata=run_metadata, use_on_demand_clusters=True, + collection_run_id=fake_collection_run_id, ) # we ask to use an on-demand cluster, therefore the tasks are published first @@ -1931,6 +2099,11 @@ async def test_pipeline_with_on_demand_cluster_with_no_clusters_keeper_fails( ), ) )[0] + await _assert_message_received( + computational_pipeline_rabbit_client_parser, + 1, + ComputationalPipelineStatusMessage.model_validate_json, + ) await assert_comp_tasks_and_comp_run_snapshot_tasks( sqlalchemy_async_engine, project_uuid=published_project.project.uuid, @@ -1939,8 +2112,8 @@ async def test_pipeline_with_on_demand_cluster_with_no_clusters_keeper_fails( expected_progress=None, run_id=run_in_db.run_id, ) - # now it should switch to failed, the run still runs until the next iteration - expected_failed_tasks = [ + # now it should switch to waiting for cluster and waits + expected_waiting_for_cluster_tasks = [ published_project.tasks[1], published_project.tasks[3], ] @@ -1955,21 +2128,58 @@ async def test_pipeline_with_on_demand_cluster_with_no_clusters_keeper_fails( await assert_comp_runs( sqlalchemy_async_engine, expected_total=1, - expected_state=RunningState.FAILED, + expected_state=RunningState.WAITING_FOR_CLUSTER, where_statement=and_( comp_runs.c.user_id == published_project.project.prj_owner, comp_runs.c.project_uuid == f"{published_project.project.uuid}", ), ) + await _assert_message_received( + computational_pipeline_rabbit_client_parser, + 1, + ComputationalPipelineStatusMessage.model_validate_json, + ) await assert_comp_tasks_and_comp_run_snapshot_tasks( sqlalchemy_async_engine, project_uuid=published_project.project.uuid, - task_ids=[t.node_id for t in expected_failed_tasks], - expected_state=RunningState.FAILED, - expected_progress=1.0, + task_ids=[t.node_id for t in expected_waiting_for_cluster_tasks], + expected_state=RunningState.WAITING_FOR_CLUSTER, + expected_progress=None, + run_id=run_in_db.run_id, + ) + # again will trigger the call again + await scheduler_api.apply( + user_id=run_in_db.user_id, + project_id=run_in_db.project_uuid, + iteration=run_in_db.iteration, + ) + mocked_get_or_create_cluster.assert_called() + assert mocked_get_or_create_cluster.call_count == 1 + mocked_get_or_create_cluster.reset_mock() + await assert_comp_runs( + sqlalchemy_async_engine, + expected_total=1, + expected_state=RunningState.WAITING_FOR_CLUSTER, + where_statement=and_( + comp_runs.c.user_id == published_project.project.prj_owner, + comp_runs.c.project_uuid == f"{published_project.project.uuid}", + ), + ) + await _assert_message_received( + computational_pipeline_rabbit_client_parser, + 0, + ComputationalPipelineStatusMessage.model_validate_json, + ) + await assert_comp_tasks_and_comp_run_snapshot_tasks( + sqlalchemy_async_engine, + project_uuid=published_project.project.uuid, + task_ids=[t.node_id for t in expected_waiting_for_cluster_tasks], + expected_state=RunningState.WAITING_FOR_CLUSTER, + expected_progress=None, run_id=run_in_db.run_id, ) - # again will not re-trigger the call to clusters-keeper + await asyncio.sleep(with_short_max_wait_for_cluster.total_seconds() + 1) + # again will trigger the call again, but now it will start failing, first the task will be mark as FAILED await scheduler_api.apply( user_id=run_in_db.user_id, project_id=run_in_db.project_uuid, @@ -1985,11 +2195,247 @@ async def test_pipeline_with_on_demand_cluster_with_no_clusters_keeper_fails( comp_runs.c.project_uuid == f"{published_project.project.uuid}", ), ) + await _assert_message_received( + computational_pipeline_rabbit_client_parser, + 1, + ComputationalPipelineStatusMessage.model_validate_json, + ) await assert_comp_tasks_and_comp_run_snapshot_tasks( sqlalchemy_async_engine, project_uuid=published_project.project.uuid, - task_ids=[t.node_id for t in expected_failed_tasks], + task_ids=[t.node_id for t in expected_waiting_for_cluster_tasks], expected_state=RunningState.FAILED, expected_progress=1.0, run_id=run_in_db.run_id, ) + + +async def test_run_new_pipeline_called_twice_prevents_duplicate_runs( + with_disabled_auto_scheduling: mock.Mock, + with_disabled_scheduler_publisher: mock.Mock, + initialized_app: FastAPI, + sqlalchemy_async_engine: AsyncEngine, + published_project: PublishedProject, + run_metadata: RunMetadataDict, + computational_pipeline_rabbit_client_parser: mock.AsyncMock, + fake_collection_run_id: CollectionRunID, +): + # Ensure we start with an empty database + await assert_comp_runs_empty(sqlalchemy_async_engine) + + # First call to run_new_pipeline - should succeed + assert published_project.project.prj_owner + await run_new_pipeline( + initialized_app, + user_id=published_project.project.prj_owner, + project_id=published_project.project.uuid, + run_metadata=run_metadata, + use_on_demand_clusters=False, + collection_run_id=fake_collection_run_id, + ) + + # Verify first run was created and published + runs_after_first_call = await assert_comp_runs( + sqlalchemy_async_engine, + expected_total=1, + expected_state=RunningState.PUBLISHED, + where_statement=and_( + comp_runs.c.user_id == published_project.project.prj_owner, + comp_runs.c.project_uuid == f"{published_project.project.uuid}", + ), + ) + first_run = runs_after_first_call[0] + + # Verify first RabbitMQ message was sent + await _assert_message_received( + computational_pipeline_rabbit_client_parser, + 1, + ComputationalPipelineStatusMessage.model_validate_json, + ) + + # Second call to run_new_pipeline - should be ignored since first run is still running + await run_new_pipeline( + initialized_app, + user_id=published_project.project.prj_owner, + project_id=published_project.project.uuid, + run_metadata=run_metadata, + use_on_demand_clusters=False, + collection_run_id=fake_collection_run_id, + ) + + # Verify still only one run exists with same run_id + runs_after_second_call = await assert_comp_runs( + sqlalchemy_async_engine, + expected_total=1, + expected_state=RunningState.PUBLISHED, + where_statement=and_( + comp_runs.c.user_id == published_project.project.prj_owner, + comp_runs.c.project_uuid == f"{published_project.project.uuid}", + ), + ) + second_run = runs_after_second_call[0] + + # Verify it's the same run (same run_id, same created timestamp) + assert first_run.run_id == second_run.run_id + assert first_run.created == second_run.created + assert first_run.iteration == second_run.iteration + + # Verify no additional RabbitMQ message was sent (still only 1 total) + await _assert_message_received( + computational_pipeline_rabbit_client_parser, + 0, # No new messages expected + ComputationalPipelineStatusMessage.model_validate_json, + ) + + +async def test_getting_task_result_raises_exception_does_not_fail_task_and_retries( + with_disabled_auto_scheduling: mock.Mock, + with_disabled_scheduler_publisher: mock.Mock, + with_short_max_wait_for_retrieving_results: datetime.timedelta, + mocked_dask_client: mock.MagicMock, + initialized_app: FastAPI, + scheduler_api: BaseCompScheduler, + sqlalchemy_async_engine: AsyncEngine, + running_project: RunningProject, + mocked_parse_output_data_fct: mock.Mock, +): + # this tests the behavior of the scheduling when the dask client cannot retrieve + # the result of a task because of some communication error. In this case the task + # should be processed again in the next iteration and not marked as failed + # immediately. + async def mocked_get_tasks_status(job_ids: list[str]) -> list[RunningState]: + return [RunningState.SUCCESS for j in job_ids] + + mocked_dask_client.get_tasks_status.side_effect = mocked_get_tasks_status + + computational_tasks = [ + t for t in running_project.tasks if t.node_class is NodeClass.COMPUTATIONAL + ] + expected_timeouted_tasks = random.sample( + computational_tasks, k=len(computational_tasks) - 1 + ) + successful_tasks = [ + t for t in computational_tasks if t not in expected_timeouted_tasks + ] + + async def mocked_get_task_result(job_id: str) -> TaskOutputData: + if job_id in [t.job_id for t in successful_tasks]: + return TaskOutputData.model_validate({"whatever_output": 123}) + raise ComputationalBackendTaskResultsNotReadyError(job_id=job_id) + + mocked_dask_client.get_task_result.side_effect = mocked_get_task_result + # calling apply should not raise + assert running_project.project.prj_owner + await scheduler_api.apply( + user_id=running_project.project.prj_owner, + project_id=running_project.project.uuid, + iteration=1, + ) + assert mocked_dask_client.get_task_result.call_count == len(computational_tasks) + mocked_dask_client.get_task_result.reset_mock() + + # check the tasks in the DB, the error shall be set there and the task state is set back to STARTED + comp_tasks, _ = await assert_comp_tasks_and_comp_run_snapshot_tasks( + sqlalchemy_async_engine, + project_uuid=running_project.project.uuid, + task_ids=[t.node_id for t in expected_timeouted_tasks], + expected_state=RunningState.STARTED, + expected_progress=0, + run_id=running_project.runs.run_id, + ) + # we should have an error in all these comp_tasks + retrieval_times = [] + for t in comp_tasks: + assert t.errors + assert len(t.errors) == 1 + error_dict = TypeAdapter(ErrorDict).validate_python(t.errors[0]) + assert error_dict["type"] == _TASK_RETRIEVAL_ERROR_TYPE + assert "ctx" in error_dict + assert _TASK_RETRIEVAL_ERROR_CONTEXT_TIME_KEY in error_dict["ctx"] + retrieval_times.append( + error_dict["ctx"][_TASK_RETRIEVAL_ERROR_CONTEXT_TIME_KEY] + ) + assert len(retrieval_times) == len(expected_timeouted_tasks) + + await assert_comp_tasks_and_comp_run_snapshot_tasks( + sqlalchemy_async_engine, + project_uuid=running_project.project.uuid, + task_ids=[t.node_id for t in successful_tasks], + expected_state=RunningState.SUCCESS, + expected_progress=1.0, + run_id=running_project.runs.run_id, + ) + await assert_comp_runs( + sqlalchemy_async_engine, + expected_total=1, + expected_state=RunningState.STARTED, + where_statement=and_( + comp_runs.c.user_id == running_project.project.prj_owner, + comp_runs.c.project_uuid == f"{running_project.project.uuid}", + ), + ) + + # calling again should not raise neither but try again + assert running_project.project.prj_owner + for _ in range(3): + await scheduler_api.apply( + user_id=running_project.project.prj_owner, + project_id=running_project.project.uuid, + iteration=1, + ) + assert mocked_dask_client.get_task_result.call_count == ( + len(expected_timeouted_tasks) + ) + mocked_dask_client.get_task_result.reset_mock() + + comp_tasks, _ = await assert_comp_tasks_and_comp_run_snapshot_tasks( + sqlalchemy_async_engine, + project_uuid=running_project.project.uuid, + task_ids=[t.node_id for t in expected_timeouted_tasks], + expected_state=RunningState.STARTED, + expected_progress=0, + run_id=running_project.runs.run_id, + ) + # the times shall remain the same + for t in comp_tasks: + assert t.errors + assert len(t.errors) == 1 + error_dict = TypeAdapter(ErrorDict).validate_python(t.errors[0]) + assert error_dict["type"] == _TASK_RETRIEVAL_ERROR_TYPE + assert "ctx" in error_dict + assert _TASK_RETRIEVAL_ERROR_CONTEXT_TIME_KEY in error_dict["ctx"] + # the time shall be the same as before + assert ( + error_dict["ctx"][_TASK_RETRIEVAL_ERROR_CONTEXT_TIME_KEY] in retrieval_times + ) + await assert_comp_runs( + sqlalchemy_async_engine, + expected_total=1, + expected_state=RunningState.STARTED, + where_statement=and_( + comp_runs.c.user_id == running_project.project.prj_owner, + comp_runs.c.project_uuid == f"{running_project.project.uuid}", + ), + ) + + # now we wait for the max time and the task should be marked as FAILED + await asyncio.sleep(with_short_max_wait_for_retrieving_results.total_seconds() + 1) + await scheduler_api.apply( + user_id=running_project.project.prj_owner, + project_id=running_project.project.uuid, + iteration=1, + ) + assert mocked_dask_client.get_task_result.call_count == len( + expected_timeouted_tasks + ) + # NOTE: we do not check all tasks here as some are depending on random others + # so some are ABORTED and others are FAILED depending on the random sample above + await assert_comp_runs( + sqlalchemy_async_engine, + expected_total=1, + expected_state=RunningState.FAILED, + where_statement=and_( + comp_runs.c.user_id == running_project.project.prj_owner, + comp_runs.c.project_uuid == f"{running_project.project.uuid}", + ), + ) diff --git a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_worker.py b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_worker.py index 1970797e5d71..5c7354e093e9 100644 --- a/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_worker.py +++ b/services/director-v2/tests/unit/with_dbs/comp_scheduler/test_worker.py @@ -9,11 +9,13 @@ import asyncio from collections.abc import Awaitable, Callable +from typing import Any from unittest import mock import pytest from _helpers import PublishedProject from fastapi import FastAPI +from models_library.computations import CollectionRunID from pytest_mock import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict @@ -61,6 +63,7 @@ async def test_worker_properly_autocalls_scheduler_api( mocked_get_scheduler_worker: mock.Mock, published_project: PublishedProject, run_metadata: RunMetadataDict, + fake_collection_run_id: CollectionRunID, ): assert published_project.project.prj_owner await run_new_pipeline( @@ -69,6 +72,7 @@ async def test_worker_properly_autocalls_scheduler_api( project_id=published_project.project.uuid, run_metadata=run_metadata, use_on_demand_clusters=False, + collection_run_id=fake_collection_run_id, ) mocked_get_scheduler_worker.assert_called_once_with(initialized_app) mocked_get_scheduler_worker.return_value.apply.assert_called_once_with( @@ -109,6 +113,8 @@ async def test_worker_scheduling_parallelism( initialized_app: FastAPI, publish_project: Callable[[], Awaitable[PublishedProject]], run_metadata: RunMetadataDict, + fake_collection_run_id: CollectionRunID, + with_product: dict[str, Any], ): with_disabled_auto_scheduling.assert_called_once() @@ -126,6 +132,7 @@ async def _project_pipeline_creation_workflow() -> None: project_id=published_project.project.uuid, run_metadata=run_metadata, use_on_demand_clusters=False, + collection_run_id=fake_collection_run_id, ) # whatever scheduling concurrency we call in here, we shall always see the same number of calls to the scheduler diff --git a/services/director-v2/tests/unit/with_dbs/conftest.py b/services/director-v2/tests/unit/with_dbs/conftest.py index b95dfdb162b9..22209ec67c5c 100644 --- a/services/director-v2/tests/unit/with_dbs/conftest.py +++ b/services/director-v2/tests/unit/with_dbs/conftest.py @@ -8,7 +8,6 @@ import datetime from collections.abc import AsyncIterator, Awaitable, Callable from typing import Any, cast -from uuid import uuid4 import arrow import pytest @@ -17,16 +16,17 @@ from dask_task_models_library.container_tasks.utils import generate_dask_job_id from faker import Faker from fastapi.encoders import jsonable_encoder +from models_library.computations import CollectionRunID from models_library.projects import ProjectAtDB, ProjectID from models_library.projects_nodes_io import NodeID from pydantic import PositiveInt from pydantic.main import BaseModel -from simcore_postgres_database.models.comp_pipeline import StateType, comp_pipeline +from simcore_postgres_database.models.comp_pipeline import StateType from simcore_postgres_database.models.comp_run_snapshot_tasks import ( comp_run_snapshot_tasks, ) from simcore_postgres_database.models.comp_runs import comp_runs -from simcore_postgres_database.models.comp_tasks import NodeClass, comp_tasks +from simcore_postgres_database.models.comp_tasks import NodeClass from simcore_service_director_v2.models.comp_pipelines import CompPipelineAtDB from simcore_service_director_v2.models.comp_runs import ( CompRunsAtDB, @@ -40,46 +40,19 @@ @pytest.fixture async def create_pipeline( - sqlalchemy_async_engine: AsyncEngine, -) -> AsyncIterator[Callable[..., Awaitable[CompPipelineAtDB]]]: - created_pipeline_ids: list[str] = [] - + create_pipeline: Callable[..., Awaitable[dict[str, Any]]], +) -> Callable[..., Awaitable[CompPipelineAtDB]]: async def _(**pipeline_kwargs) -> CompPipelineAtDB: - pipeline_config = { - "project_id": f"{uuid4()}", - "dag_adjacency_list": {}, - "state": StateType.NOT_STARTED, - } - pipeline_config.update(**pipeline_kwargs) - async with sqlalchemy_async_engine.begin() as conn: - result = await conn.execute( - comp_pipeline.insert() - .values(**pipeline_config) - .returning(sa.literal_column("*")) - ) - assert result - - new_pipeline = CompPipelineAtDB.model_validate(result.first()) - created_pipeline_ids.append(f"{new_pipeline.project_id}") - return new_pipeline - - yield _ + created_pipeline_dict = await create_pipeline(**pipeline_kwargs) + return CompPipelineAtDB.model_validate(created_pipeline_dict) - # cleanup - async with sqlalchemy_async_engine.begin() as conn: - await conn.execute( - comp_pipeline.delete().where( - comp_pipeline.c.project_id.in_(created_pipeline_ids) - ) - ) + return _ @pytest.fixture -async def create_tasks( - sqlalchemy_async_engine: AsyncEngine, -) -> AsyncIterator[Callable[..., Awaitable[list[CompTaskAtDB]]]]: - created_task_ids: list[int] = [] - +async def create_tasks_from_project( + create_comp_task: Callable[..., Awaitable[dict[str, Any]]], +) -> Callable[..., Awaitable[list[CompTaskAtDB]]]: async def _( user: dict[str, Any], project: ProjectAtDB, **overrides_kwargs ) -> list[CompTaskAtDB]: @@ -131,24 +104,12 @@ async def _( ), } task_config.update(**overrides_kwargs) - async with sqlalchemy_async_engine.begin() as conn: - result = await conn.execute( - comp_tasks.insert() - .values(**task_config) - .returning(sa.literal_column("*")) - ) - new_task = CompTaskAtDB.model_validate(result.first()) - created_tasks.append(new_task) - created_task_ids.extend([t.task_id for t in created_tasks if t.task_id]) + task_dict = await create_comp_task(**task_config) + new_task = CompTaskAtDB.model_validate(task_dict) + created_tasks.append(new_task) return created_tasks - yield _ - - # cleanup - async with sqlalchemy_async_engine.begin() as conn: - await conn.execute( - comp_tasks.delete().where(comp_tasks.c.task_id.in_(created_task_ids)) - ) + return _ @pytest.fixture @@ -184,9 +145,16 @@ def run_metadata( ) +@pytest.fixture +def fake_collection_run_id(faker: Faker) -> CollectionRunID: + return CollectionRunID(f"{faker.uuid4(cast_to=None)}") + + @pytest.fixture async def create_comp_run( - sqlalchemy_async_engine: AsyncEngine, run_metadata: RunMetadataDict + sqlalchemy_async_engine: AsyncEngine, + run_metadata: RunMetadataDict, + faker: Faker, ) -> AsyncIterator[Callable[..., Awaitable[CompRunsAtDB]]]: created_run_ids: list[int] = [] @@ -201,6 +169,7 @@ async def _( "metadata": jsonable_encoder(run_metadata), "use_on_demand_clusters": False, "dag_adjacency_list": {}, + "collection_run_id": faker.uuid4(), } run_config.update(**run_kwargs) async with sqlalchemy_async_engine.begin() as conn: @@ -284,6 +253,7 @@ async def _( project_id=project.uuid, node_id=NodeID(node_id), ), + "state": StateType.PUBLISHED.value, } task_config.update(**overrides_kwargs) async with sqlalchemy_async_engine.begin() as conn: @@ -315,16 +285,18 @@ async def _( @pytest.fixture async def publish_project( create_registered_user: Callable[..., dict[str, Any]], - project: Callable[..., Awaitable[ProjectAtDB]], + create_project: Callable[..., Awaitable[ProjectAtDB]], create_pipeline: Callable[..., Awaitable[CompPipelineAtDB]], - create_tasks: Callable[..., Awaitable[list[CompTaskAtDB]]], + create_tasks_from_project: Callable[..., Awaitable[list[CompTaskAtDB]]], fake_workbench_without_outputs: dict[str, Any], fake_workbench_adjacency: dict[str, Any], ) -> Callable[[], Awaitable[PublishedProject]]: user = create_registered_user() async def _() -> PublishedProject: - created_project = await project(user, workbench=fake_workbench_without_outputs) + created_project = await create_project( + user, workbench=fake_workbench_without_outputs + ) return PublishedProject( user=user, project=created_project, @@ -332,7 +304,7 @@ async def _() -> PublishedProject: project_id=f"{created_project.uuid}", dag_adjacency_list=fake_workbench_adjacency, ), - tasks=await create_tasks( + tasks=await create_tasks_from_project( user=user, project=created_project, state=StateType.PUBLISHED ), ) @@ -342,6 +314,7 @@ async def _() -> PublishedProject: @pytest.fixture async def published_project( + with_product: dict[str, Any], publish_project: Callable[[], Awaitable[PublishedProject]], ) -> PublishedProject: return await publish_project() @@ -349,10 +322,11 @@ async def published_project( @pytest.fixture async def running_project( + with_product: dict[str, Any], create_registered_user: Callable[..., dict[str, Any]], - project: Callable[..., Awaitable[ProjectAtDB]], + create_project: Callable[..., Awaitable[ProjectAtDB]], create_pipeline: Callable[..., Awaitable[CompPipelineAtDB]], - create_tasks: Callable[..., Awaitable[list[CompTaskAtDB]]], + create_tasks_from_project: Callable[..., Awaitable[list[CompTaskAtDB]]], create_comp_run: Callable[..., Awaitable[CompRunsAtDB]], create_comp_run_snapshot_tasks: Callable[ ..., Awaitable[list[CompRunSnapshotTaskAtDBGet]] @@ -361,7 +335,9 @@ async def running_project( fake_workbench_adjacency: dict[str, Any], ) -> RunningProject: user = create_registered_user() - created_project = await project(user, workbench=fake_workbench_without_outputs) + created_project = await create_project( + user, workbench=fake_workbench_without_outputs + ) now_time = arrow.utcnow().datetime _comp_run = await create_comp_run( user=user, @@ -377,7 +353,7 @@ async def running_project( project_id=f"{created_project.uuid}", dag_adjacency_list=fake_workbench_adjacency, ), - tasks=await create_tasks( + tasks=await create_tasks_from_project( user=user, project=created_project, state=StateType.RUNNING, @@ -400,18 +376,21 @@ async def running_project( @pytest.fixture async def running_project_mark_for_cancellation( create_registered_user: Callable[..., dict[str, Any]], - project: Callable[..., Awaitable[ProjectAtDB]], + create_project: Callable[..., Awaitable[ProjectAtDB]], create_pipeline: Callable[..., Awaitable[CompPipelineAtDB]], - create_tasks: Callable[..., Awaitable[list[CompTaskAtDB]]], + create_tasks_from_project: Callable[..., Awaitable[list[CompTaskAtDB]]], create_comp_run: Callable[..., Awaitable[CompRunsAtDB]], create_comp_run_snapshot_tasks: Callable[ ..., Awaitable[list[CompRunSnapshotTaskAtDBGet]] ], fake_workbench_without_outputs: dict[str, Any], fake_workbench_adjacency: dict[str, Any], + with_product: dict[str, Any], ) -> RunningProject: user = create_registered_user() - created_project = await project(user, workbench=fake_workbench_without_outputs) + created_project = await create_project( + user, workbench=fake_workbench_without_outputs + ) now_time = arrow.utcnow().datetime _comp_run = await create_comp_run( user=user, @@ -428,7 +407,7 @@ async def running_project_mark_for_cancellation( project_id=f"{created_project.uuid}", dag_adjacency_list=fake_workbench_adjacency, ), - tasks=await create_tasks( + tasks=await create_tasks_from_project( user=user, project=created_project, state=StateType.RUNNING, diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py b/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py index fd1d43e25aa4..8a70f85eb170 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_dynamic_services.py @@ -40,6 +40,8 @@ X_DYNAMIC_SIDECAR_REQUEST_SCHEME, X_SIMCORE_USER_AGENT, ) +from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings from simcore_service_director_v2.models.dynamic_services_scheduler import SchedulerData from simcore_service_director_v2.modules.dynamic_sidecar.errors import ( DynamicSidecarNotFoundError, @@ -52,6 +54,8 @@ pytest_simcore_core_services_selection = [ "postgres", + "rabbit", + "redis", ] pytest_simcore_ops_services_selection = [ "adminer", @@ -70,7 +74,6 @@ class ServiceParams(NamedTuple): @pytest.fixture def minimal_config( - disable_rabbitmq: None, mock_env: EnvVarsDict, postgres_host_config: dict[str, str], monkeypatch: pytest.MonkeyPatch, @@ -98,7 +101,8 @@ def mock_env( mock_env: EnvVarsDict, mock_exclusive: None, disable_postgres: None, - disable_rabbitmq: None, + redis_service: RedisSettings, + rabbit_service: RabbitSettings, monkeypatch: pytest.MonkeyPatch, faker: Faker, ) -> None: @@ -123,11 +127,6 @@ def mock_env( monkeypatch.setenv("COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH", "{}") monkeypatch.setenv("DIRECTOR_V2_DYNAMIC_SCHEDULER_ENABLED", "true") - monkeypatch.setenv("RABBIT_HOST", "mocked_host") - monkeypatch.setenv("RABBIT_SECURE", "false") - monkeypatch.setenv("RABBIT_USER", "mocked_user") - monkeypatch.setenv("RABBIT_PASSWORD", "mocked_password") - monkeypatch.setenv("REGISTRY_AUTH", "false") monkeypatch.setenv("REGISTRY_USER", "test") monkeypatch.setenv("REGISTRY_PW", "test") diff --git a/services/director-v2/tests/unit/with_dbs/test_cli.py b/services/director-v2/tests/unit/with_dbs/test_cli.py index 813bd93aa078..a4a231984a4c 100644 --- a/services/director-v2/tests/unit/with_dbs/test_cli.py +++ b/services/director-v2/tests/unit/with_dbs/test_cli.py @@ -22,7 +22,9 @@ from models_library.projects_nodes_io import NodeID from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict -from servicelib.long_running_tasks._models import ProgressCallback +from servicelib.long_running_tasks.models import ProgressCallback +from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings from simcore_service_director_v2.cli import DEFAULT_NODE_SAVE_ATTEMPTS, main from simcore_service_director_v2.cli._close_and_save_service import ( ThinDV2LocalhostClient, @@ -31,6 +33,8 @@ pytest_simcore_core_services_selection = [ "postgres", + "rabbit", + "redis", ] pytest_simcore_ops_services_selection = [ "adminer", @@ -41,8 +45,11 @@ def minimal_configuration( mock_env: EnvVarsDict, postgres_host_config: dict[str, str], + redis_service: RedisSettings, + rabbit_service: RabbitSettings, monkeypatch: pytest.MonkeyPatch, faker: Faker, + with_product: dict[str, Any], ): monkeypatch.setenv("DIRECTOR_V2_DYNAMIC_SIDECAR_ENABLED", "false") monkeypatch.setenv("DIRECTOR_V2_POSTGRES_ENABLED", "1") @@ -62,11 +69,12 @@ def cli_runner(minimal_configuration: None) -> CliRunner: @pytest.fixture async def project_at_db( create_registered_user: Callable[..., dict[str, Any]], - project: Callable[..., Awaitable[ProjectAtDB]], + with_product: dict[str, Any], + create_project: Callable[..., Awaitable[ProjectAtDB]], fake_workbench_without_outputs: dict[str, Any], ) -> ProjectAtDB: user = create_registered_user() - return await project(user, workbench=fake_workbench_without_outputs) + return await create_project(user, workbench=fake_workbench_without_outputs) @pytest.fixture diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_db_repositories_projects.py b/services/director-v2/tests/unit/with_dbs/test_modules_db_repositories_projects.py index 14ff015d790c..46dfe7e3d89e 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_db_repositories_projects.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_db_repositories_projects.py @@ -69,39 +69,40 @@ def workbench() -> dict[str, Any]: @pytest.fixture() -async def project( +async def with_project( mock_env: EnvVarsDict, create_registered_user: Callable[..., dict], - project: Callable[..., Awaitable[ProjectAtDB]], + with_product: dict[str, Any], + create_project: Callable[..., Awaitable[ProjectAtDB]], workbench: dict[str, Any], ) -> ProjectAtDB: - return await project(create_registered_user(), workbench=workbench) + return await create_project(create_registered_user(), workbench=workbench) async def test_is_node_present_in_workbench( - initialized_app: FastAPI, project: ProjectAtDB, faker: Faker + initialized_app: FastAPI, with_project: ProjectAtDB, faker: Faker ): project_repository = get_repository(initialized_app, ProjectsRepository) - for node_uuid in project.workbench: + for node_uuid in with_project.workbench: assert ( await project_repository.is_node_present_in_workbench( - project_id=project.uuid, node_uuid=NodeID(node_uuid) + project_id=with_project.uuid, node_uuid=NodeID(node_uuid) ) is True ) not_existing_node = faker.uuid4(cast_to=None) - assert not_existing_node not in project.workbench + assert not_existing_node not in with_project.workbench assert ( await project_repository.is_node_present_in_workbench( - project_id=project.uuid, node_uuid=not_existing_node + project_id=with_project.uuid, node_uuid=not_existing_node ) is False ) not_existing_project = faker.uuid4(cast_to=None) - assert not_existing_project != project.uuid + assert not_existing_project != with_project.uuid assert ( await project_repository.is_node_present_in_workbench( project_id=not_existing_project, node_uuid=not_existing_node @@ -111,13 +112,13 @@ async def test_is_node_present_in_workbench( async def test_get_project_id_from_node( - initialized_app: FastAPI, project: ProjectAtDB, faker: Faker + initialized_app: FastAPI, with_project: ProjectAtDB, faker: Faker ): project_repository = get_repository(initialized_app, ProjectsRepository) - for node_uuid in project.workbench: + for node_uuid in with_project.workbench: assert ( await project_repository.get_project_id_from_node(NodeID(node_uuid)) - == project.uuid + == with_project.uuid ) not_existing_node_id = faker.uuid4(cast_to=None) diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py index 06a791cc745c..dbd4f4019132 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py @@ -14,10 +14,11 @@ import pytest from aiodocker.utils import clean_filters from faker import Faker -from models_library.docker import DockerNodeID, to_simcore_runtime_docker_label_key +from models_library.docker import DockerNodeID from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.services_enums import ServiceState +from models_library.services_metadata_runtime import to_simcore_runtime_docker_label_key from models_library.users import UserID from pydantic import TypeAdapter from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict @@ -452,7 +453,7 @@ async def test_create_service( cleanup_test_service_name: None, docker_swarm: None, ): - service_id = await docker_api.create_service_and_get_id(service_spec) + service_id = await docker_api.create_service_and_get_id(service_spec, None) assert service_id @@ -464,7 +465,7 @@ async def test_services_to_observe_exist( docker_swarm: None, ): service_id = await docker_api.create_service_and_get_id( - dynamic_sidecar_service_spec + dynamic_sidecar_service_spec, None ) assert service_id @@ -483,7 +484,7 @@ async def test_dynamic_sidecar_in_running_state_and_node_id_is_recovered( docker_swarm: None, ): service_id = await docker_api.create_service_and_get_id( - dynamic_sidecar_service_spec + dynamic_sidecar_service_spec, None ) assert service_id @@ -510,7 +511,7 @@ async def test_dynamic_sidecar_get_dynamic_sidecar_sate_fail_to_schedule( } service_id = await docker_api.create_service_and_get_id( - dynamic_sidecar_service_spec + dynamic_sidecar_service_spec, None ) assert service_id @@ -538,7 +539,9 @@ async def test_is_dynamic_sidecar_stack_missing( # start 2 fake services to emulate the dynamic-sidecar stack for dynamic_sidecar_stack in dynamic_sidecar_stack_specs: - service_id = await docker_api.create_service_and_get_id(dynamic_sidecar_stack) + service_id = await docker_api.create_service_and_get_id( + dynamic_sidecar_stack, None + ) assert service_id services_are_missing = await docker_api.is_dynamic_sidecar_stack_missing( @@ -561,7 +564,9 @@ async def test_are_sidecar_and_proxy_services_present( # start 2 fake services to emulate the dynamic-sidecar stack for dynamic_sidecar_stack in dynamic_sidecar_stack_specs: - service_id = await docker_api.create_service_and_get_id(dynamic_sidecar_stack) + service_id = await docker_api.create_service_and_get_id( + dynamic_sidecar_stack, None + ) assert service_id services_are_missing = await docker_api.are_sidecar_and_proxy_services_present( @@ -604,7 +609,9 @@ async def _count_services_in_stack( # start 2 fake services to emulate the dynamic-sidecar stack for dynamic_sidecar_stack in dynamic_sidecar_stack_specs: - service_id = await docker_api.create_service_and_get_id(dynamic_sidecar_stack) + service_id = await docker_api.create_service_and_get_id( + dynamic_sidecar_stack, None + ) assert service_id assert ( @@ -666,7 +673,9 @@ async def test_is_sidecar_running( # start 2 fake services to emulate the dynamic-sidecar stack for dynamic_sidecar_stack in dynamic_sidecar_stack_specs: - service_id = await docker_api.create_service_and_get_id(dynamic_sidecar_stack) + service_id = await docker_api.create_service_and_get_id( + dynamic_sidecar_stack, None + ) assert service_id async for attempt in AsyncRetrying( diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py index 4618a9a9ba01..99fd1525c20c 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py @@ -15,16 +15,16 @@ from fastapi.encoders import jsonable_encoder from models_library.aiodocker_api import AioDockerServiceSpec from models_library.callbacks_mapping import CallbacksMapping -from models_library.docker import ( - DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY, - to_simcore_runtime_docker_label_key, -) from models_library.resource_tracker import HardwareInfo, PricingInfo from models_library.service_settings_labels import ( SimcoreServiceLabels, SimcoreServiceSettingsLabel, ) from models_library.services import ServiceKeyVersion, ServiceRunID +from models_library.services_metadata_runtime import ( + DOCKER_TASK_EC2_INSTANCE_TYPE_PLACEMENT_CONSTRAINT_KEY, + to_simcore_runtime_docker_label_key, +) from models_library.wallets import WalletInfo from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict @@ -71,7 +71,6 @@ def mock_env( "DYNAMIC_SIDECAR_IMAGE": "local/dynamic-sidecar:MOCK", "LOG_LEVEL": "DEBUG", "POSTGRES_DB": "test", - "POSTGRES_ENDPOINT": "localhost:5432", "POSTGRES_HOST": "localhost", "POSTGRES_PASSWORD": "test", "POSTGRES_PORT": "5432", @@ -82,6 +81,7 @@ def mock_env( "RABBIT_PORT": "5672", "RABBIT_USER": "admin", "RABBIT_SECURE": "false", + "REDIS_SETTINGS": '{"REDIS_SECURE":false,"REDIS_HOST":"redis","REDIS_PORT":6789,"REDIS_USER":null,"REDIS_PASSWORD":null}', "REGISTRY_AUTH": "false", "REGISTRY_PW": "test", "REGISTRY_SSL": "false", @@ -274,12 +274,12 @@ def expected_dynamic_sidecar_spec( "POSTGRES_PORT": "5432", "POSTGRES_USER": "test", "POSTGRES_PASSWORD": "test", - "POSTGRES_ENDPOINT": "localhost:5432", "RABBIT_HOST": "rabbit", "RABBIT_PASSWORD": "adminadmin", "RABBIT_PORT": "5672", "RABBIT_USER": "admin", "RABBIT_SECURE": "False", + "REDIS_SETTINGS": '{"REDIS_SECURE":false,"REDIS_HOST":"redis","REDIS_PORT":6789,"REDIS_USER":null,"REDIS_PASSWORD":null}', "R_CLONE_OPTION_BUFFER_SIZE": "16M", "R_CLONE_OPTION_RETRIES": "3", "R_CLONE_OPTION_TRANSFERS": "5", diff --git a/services/director-v2/tests/unit/with_dbs/test_utils_dask.py b/services/director-v2/tests/unit/with_dbs/test_utils_dask.py index 682e24825fcc..88459999d1c8 100644 --- a/services/director-v2/tests/unit/with_dbs/test_utils_dask.py +++ b/services/director-v2/tests/unit/with_dbs/test_utils_dask.py @@ -33,10 +33,10 @@ FileUploadLinks, FileUploadSchema, ) -from models_library.docker import to_simcore_runtime_docker_label_key from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, SimCoreFileLink, SimcoreS3FileID from models_library.services import ServiceRunID +from models_library.services_metadata_runtime import to_simcore_runtime_docker_label_key from models_library.users import UserID from pydantic import ByteSize, TypeAdapter from pydantic.networks import AnyUrl @@ -48,7 +48,7 @@ from simcore_service_director_v2.models.comp_tasks import CompTaskAtDB from simcore_service_director_v2.modules.dask_clients_pool import DaskClientsPool from simcore_service_director_v2.utils.dask import ( - _LOGS_FILE_NAME, + LOGS_FILE_NAME, _to_human_readable_resource_values, check_if_cluster_is_able_to_run_pipeline, clean_task_output_and_log_files_if_invalid, @@ -438,12 +438,12 @@ async def test_clean_task_output_and_log_files_if_invalid( mock.call( user_id=user_id, store_id=0, - s3_object=f"{published_project.project.uuid}/{sleeper_task.node_id}/{_LOGS_FILE_NAME}", + s3_object=f"{published_project.project.uuid}/{sleeper_task.node_id}/{LOGS_FILE_NAME}", ) ] def _add_is_directory(entry: mock._Call) -> mock._Call: - new_kwargs: dict[str, Any] = deepcopy(entry.kwargs) + new_kwargs = dict(deepcopy(entry.kwargs)) new_kwargs["is_directory"] = False return mock.call(**new_kwargs) @@ -520,7 +520,9 @@ async def test_check_if_cluster_is_able_to_run_pipeline( ) default_cluster = dask_scheduler_settings.default_cluster dask_clients_pool = DaskClientsPool.instance(initialized_app) - async with dask_clients_pool.acquire(default_cluster) as dask_client: + async with dask_clients_pool.acquire( + default_cluster, ref="test-utils-dask-ref" + ) as dask_client: check_if_cluster_is_able_to_run_pipeline( project_id=project_id, node_id=node_id, diff --git a/services/director-v2/tests/unit/with_dbs/test_utils_rabbitmq.py b/services/director-v2/tests/unit/with_dbs/test_utils_rabbitmq.py index cb3d81a910d1..6c18bae729e0 100644 --- a/services/director-v2/tests/unit/with_dbs/test_utils_rabbitmq.py +++ b/services/director-v2/tests/unit/with_dbs/test_utils_rabbitmq.py @@ -84,27 +84,28 @@ def user(create_registered_user: Callable[..., dict]) -> dict: @pytest.fixture -async def project( +async def with_project( user: dict[str, Any], fake_workbench_without_outputs: dict[str, Any], - project: Callable[..., Awaitable[ProjectAtDB]], + create_project: Callable[..., Awaitable[ProjectAtDB]], + with_product: dict[str, Any], ) -> ProjectAtDB: - return await project(user, workbench=fake_workbench_without_outputs) + return await create_project(user, workbench=fake_workbench_without_outputs) @pytest.fixture async def tasks( user: dict[str, Any], - project: ProjectAtDB, + with_project: ProjectAtDB, fake_workbench_adjacency: dict[str, Any], create_pipeline: Callable[..., Awaitable[CompPipelineAtDB]], - create_tasks: Callable[..., Awaitable[list[CompTaskAtDB]]], + create_tasks_from_project: Callable[..., Awaitable[list[CompTaskAtDB]]], ) -> list[CompTaskAtDB]: await create_pipeline( - project_id=f"{project.uuid}", + project_id=f"{with_project.uuid}", dag_adjacency_list=fake_workbench_adjacency, ) - comp_tasks = await create_tasks(user, project) + comp_tasks = await create_tasks_from_project(user, with_project) assert len(comp_tasks) > 0 return comp_tasks @@ -161,7 +162,7 @@ async def test_publish_service_stopped_metrics( async def test_publish_service_resource_tracking_started( create_rabbitmq_client: Callable[[str], RabbitMQClient], user: dict[str, Any], - project: ProjectAtDB, + with_project: ProjectAtDB, simcore_user_agent: str, tasks: list[CompTaskAtDB], mocked_message_parser: mock.AsyncMock, @@ -190,10 +191,10 @@ async def test_publish_service_resource_tracking_started( simcore_user_agent=simcore_user_agent, user_id=user["id"], user_email=faker.email(), - project_id=project.uuid, - project_name=project.name, + project_id=with_project.uuid, + project_name=with_project.name, node_id=random_task.node_id, - node_name=project.workbench[NodeIDStr(f"{random_task.node_id}")].label, + node_name=with_project.workbench[NodeIDStr(f"{random_task.node_id}")].label, parent_project_id=None, parent_node_id=None, root_parent_project_id=None, diff --git a/services/director/Dockerfile b/services/director/Dockerfile index 1cc5cd42c796..8ff33334260a 100644 --- a/services/director/Dockerfile +++ b/services/director/Dockerfile @@ -2,7 +2,7 @@ # Define arguments in the global scope ARG PYTHON_VERSION="3.11.9" -ARG UV_VERSION="0.6" +ARG UV_VERSION="0.7" FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build FROM python:${PYTHON_VERSION}-slim-bookworm AS base-arm64 @@ -31,6 +31,7 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=private \ set -eux && \ apt-get update && \ apt-get install -y --no-install-recommends \ + fd-find \ gosu \ && apt-get clean -y \ && rm -rf /var/lib/apt/lists/* \ @@ -90,10 +91,7 @@ RUN uv venv "${VIRTUAL_ENV}" -RUN --mount=type=cache,target=/root/.cache/uv \ - uv pip install --upgrade \ - wheel \ - setuptools + WORKDIR /build @@ -106,6 +104,9 @@ WORKDIR /build FROM build AS prod-only-deps ENV SC_BUILD_TARGET=prod-only-deps +# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode +ENV UV_COMPILE_BYTECODE=1 \ + UV_LINK_MODE=copy WORKDIR /build/services/director @@ -131,8 +132,6 @@ ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production ENV PYTHONOPTIMIZE=TRUE -# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode -ENV UV_COMPILE_BYTECODE=1 WORKDIR /home/scu # ensure home folder is read/writable for user scu diff --git a/services/director/docker/boot.sh b/services/director/docker/boot.sh index af732895050a..38ad83b661f6 100755 --- a/services/director/docker/boot.sh +++ b/services/director/docker/boot.sh @@ -24,15 +24,15 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/director - uv pip --quiet sync requirements/dev.txt + uv pip --quiet sync --link-mode=copy requirements/dev.txt cd - - uv pip list + # uv pip list fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy if command -v uv >/dev/null 2>&1; then - uv pip install debugpy + uv pip install --link-mode=copy debugpy else pip install debugpy fi @@ -47,19 +47,22 @@ SERVER_LOG_LEVEL=$(echo "${APP_LOG_LEVEL}" | tr '[:upper:]' '[:lower:]') echo "$INFO" "Log-level app/server: $APP_LOG_LEVEL/$SERVER_LOG_LEVEL" if [ "${SC_BOOT_MODE}" = "debug" ]; then - reload_dir_packages=$(find /devel/packages -maxdepth 3 -type d -path "*/src/*" ! -path "*.*" -exec echo '--reload-dir {} \' \;) + reload_dir_packages=$(fdfind src /devel/packages --exec echo '--reload-dir {} ' | tr '\n' ' ') exec sh -c " cd services/director/src/simcore_service_director && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${DIRECTOR_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${DIRECTOR_REMOTE_DEBUGGING_PORT} -m \ + uvicorn \ + --factory main:app_factory \ --host 0.0.0.0 \ --reload \ - $reload_dir_packages + $reload_dir_packages \ --reload-dir . \ --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_director.main:the_app \ + exec uvicorn \ + --factory simcore_service_director.main:app_factory \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/director/docker/entrypoint.sh b/services/director/docker/entrypoint.sh index ad982fd8d5cc..651a1ea875f0 100755 --- a/services/director/docker/entrypoint.sh +++ b/services/director/docker/entrypoint.sh @@ -26,6 +26,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" # # DEVELOPMENT MODE @@ -63,10 +64,9 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME" echo "$INFO" "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \; - # change user property of files already around + fdfind --owner ":$SC_USER_ID" --exclude proc --exec-batch chgrp --no-dereference "$CONT_GROUPNAME" . '/' echo "$INFO" "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \; + fdfind --owner "$SC_USER_ID:" --exclude proc --exec-batch chown --no-dereference "$SC_USER_NAME" . '/' fi fi diff --git a/services/director/requirements/_base.txt b/services/director/requirements/_base.txt index d3f83d7df9fe..54b2c88d8995 100644 --- a/services/director/requirements/_base.txt +++ b/services/director/requirements/_base.txt @@ -14,7 +14,7 @@ aiofiles==24.1.0 # via -r requirements/../../../packages/service-library/requirements/_base.in aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -47,6 +47,8 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi attrs==24.2.0 @@ -71,18 +73,14 @@ certifi==2024.8.30 # httpcore # httpx # requests + # sentry-sdk charset-normalizer==3.4.0 # via requests -click==8.1.7 +click==8.2.1 # via + # rich-toolkit # typer # uvicorn -deprecated==1.2.14 - # via - # opentelemetry-api - # opentelemetry-exporter-otlp-proto-grpc - # opentelemetry-exporter-otlp-proto-http - # opentelemetry-semantic-conventions dnspython==2.7.0 # via email-validator email-validator==2.2.0 @@ -91,12 +89,14 @@ email-validator==2.2.0 # pydantic fast-depends==2.4.12 # via faststream -fastapi==0.115.12 +fastapi==0.116.1 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi-lifespan-manager -fastapi-cli==0.0.5 +fastapi-cli==0.0.8 # via fastapi +fastapi-cloud-cli==0.1.5 + # via fastapi-cli fastapi-lifespan-manager==0.1.4 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in faststream==0.5.31 @@ -105,13 +105,13 @@ frozenlist==1.5.0 # via # aiohttp # aiosignal -googleapis-common-protos==1.66.0 +googleapis-common-protos==1.70.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http grpcio==1.67.1 # via opentelemetry-exporter-otlp-proto-grpc -h11==0.14.0 +h11==0.16.0 # via # httpcore # uvicorn @@ -119,7 +119,7 @@ h2==4.1.0 # via httpx hpack==4.0.0 # via h2 -httpcore==1.0.6 +httpcore==1.0.9 # via httpx httptools==0.6.4 # via uvicorn @@ -140,6 +140,7 @@ httpx==0.27.2 # -r requirements/../../../packages/service-library/requirements/_fastapi.in # -r requirements/_base.in # fastapi + # fastapi-cloud-cli hyperframe==6.0.1 # via h2 idna==3.10 @@ -166,6 +167,10 @@ jinja2==3.1.6 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # fastapi +jsonref==1.1.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema==4.23.0 # via # -r requirements/../../../packages/models-library/requirements/_base.in @@ -182,7 +187,7 @@ multidict==6.1.0 # via # aiohttp # yarl -opentelemetry-api==1.28.1 +opentelemetry-api==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc @@ -190,6 +195,7 @@ opentelemetry-api==1.28.1 # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-logging @@ -197,59 +203,63 @@ opentelemetry-api==1.28.1 # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.28.1 +opentelemetry-exporter-otlp==1.34.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.28.1 +opentelemetry-exporter-otlp-proto-common==1.34.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.28.1 +opentelemetry-exporter-otlp-proto-grpc==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.28.1 +opentelemetry-exporter-otlp-proto-http==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.49b1 +opentelemetry-instrumentation==0.55b1 # via # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aio-pika==0.49b1 +opentelemetry-instrumentation-aio-pika==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-asgi==0.49b1 +opentelemetry-instrumentation-asgi==0.55b1 # via opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-fastapi==0.49b1 +opentelemetry-instrumentation-asyncpg==0.55b1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-fastapi==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-httpx==0.49b1 +opentelemetry-instrumentation-httpx==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-logging==0.49b1 +opentelemetry-instrumentation-logging==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.49b1 +opentelemetry-instrumentation-redis==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.49b1 +opentelemetry-instrumentation-requests==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.28.1 +opentelemetry-proto==1.34.1 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.28.1 +opentelemetry-sdk==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.49b1 +opentelemetry-semantic-conventions==0.55b1 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.49b1 +opentelemetry-util-http==0.55b1 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi @@ -289,7 +299,7 @@ propcache==0.2.0 # via # aiohttp # yarl -protobuf==5.28.3 +protobuf==5.29.5 # via # googleapis-common-protos # opentelemetry-proto @@ -297,7 +307,7 @@ psutil==6.1.0 # via -r requirements/../../../packages/service-library/requirements/_base.in pycryptodome==3.21.0 # via stream-zip -pydantic==2.10.2 +pydantic==2.11.7 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -325,11 +335,12 @@ pydantic==2.10.2 # -r requirements/_base.in # fast-depends # fastapi + # fastapi-cloud-cli # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.1 +pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.10.0 +pydantic-extra-types==2.10.5 # via # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -339,7 +350,7 @@ pydantic-extra-types==2.10.0 # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in -pydantic-settings==2.6.1 +pydantic-settings==2.7.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -416,17 +427,26 @@ referencing==0.29.3 # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications -requests==2.32.3 +requests==2.32.4 # via opentelemetry-exporter-otlp-proto-http -rich==13.9.4 +rich==14.1.0 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in + # rich-toolkit # typer +rich-toolkit==0.15.0 + # via + # fastapi-cli + # fastapi-cloud-cli +rignore==0.6.4 + # via fastapi-cloud-cli rpds-py==0.21.0 # via # jsonschema # referencing +sentry-sdk==2.35.0 + # via fastapi-cloud-cli shellingham==1.5.4 # via typer six==1.16.0 @@ -434,8 +454,9 @@ six==1.16.0 sniffio==1.3.1 # via # anyio + # asgi-lifespan # httpx -starlette==0.41.3 +starlette==0.47.2 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -460,24 +481,34 @@ toolz==1.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in tqdm==4.67.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.13.0 +typer==0.16.1 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # fastapi-cli + # fastapi-cloud-cli types-python-dateutil==2.9.0.20241003 # via arrow -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # aiodebug # fastapi # faststream + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http # opentelemetry-sdk + # opentelemetry-semantic-conventions # pydantic # pydantic-core # pydantic-extra-types + # rich-toolkit + # starlette # typer -urllib3==2.2.3 + # typing-inspection +typing-inspection==0.4.1 + # via pydantic +urllib3==2.5.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -492,10 +523,12 @@ urllib3==2.2.3 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # requests + # sentry-sdk uvicorn==0.34.2 # via # fastapi # fastapi-cli + # fastapi-cloud-cli uvloop==0.21.0 # via uvicorn watchfiles==0.24.0 @@ -504,7 +537,6 @@ websockets==14.1 # via uvicorn wrapt==1.16.0 # via - # deprecated # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-httpx diff --git a/services/director/requirements/_test.txt b/services/director/requirements/_test.txt index 5fcdf45b0bf4..6ef18394388d 100644 --- a/services/director/requirements/_test.txt +++ b/services/director/requirements/_test.txt @@ -2,7 +2,7 @@ aiohappyeyeballs==2.6.1 # via # -c requirements/_base.txt # aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -18,7 +18,9 @@ anyio==4.6.2.post1 # -c requirements/_base.txt # httpx asgi-lifespan==2.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in attrs==24.2.0 # via # -c requirements/_base.txt @@ -46,11 +48,11 @@ frozenlist==1.5.0 # -c requirements/_base.txt # aiohttp # aiosignal -h11==0.14.0 +h11==0.16.0 # via # -c requirements/_base.txt # httpcore -httpcore==1.0.6 +httpcore==1.0.9 # via # -c requirements/_base.txt # httpx @@ -69,7 +71,9 @@ idna==3.10 iniconfig==2.0.0 # via pytest jsonref==1.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in multidict==6.1.0 # via # -c requirements/_base.txt @@ -82,7 +86,9 @@ packaging==24.2 # pytest # pytest-sugar pluggy==1.5.0 - # via pytest + # via + # pytest + # pytest-cov propcache==0.2.0 # via # -c requirements/_base.txt @@ -90,7 +96,11 @@ propcache==0.2.0 # yarl py-cpuinfo==9.0.0 # via pytest-benchmark -pytest==8.3.5 +pygments==2.18.0 + # via + # -c requirements/_base.txt + # pytest +pytest==8.4.1 # via # -r requirements/_test.in # pytest-asyncio @@ -100,23 +110,23 @@ pytest==8.3.5 # pytest-instafail # pytest-mock # pytest-sugar -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via -r requirements/_test.in pytest-benchmark==5.1.0 # via -r requirements/_test.in -pytest-cov==6.0.0 +pytest-cov==6.2.1 # via -r requirements/_test.in -pytest-docker==3.2.0 +pytest-docker==3.2.3 # via -r requirements/_test.in pytest-instafail==0.5.0 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in pytest-sugar==1.0.0 # via -r requirements/_test.in -requests==2.32.3 +requests==2.32.4 # via # -c requirements/_base.txt # docker @@ -132,7 +142,7 @@ termcolor==2.5.0 # via pytest-sugar tzdata==2025.1 # via faker -urllib3==2.2.3 +urllib3==2.5.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt diff --git a/services/director/requirements/_tools.txt b/services/director/requirements/_tools.txt index 9bc3fb1323cf..62139d9bd47d 100644 --- a/services/director/requirements/_tools.txt +++ b/services/director/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.7 +click==8.2.1 # via # -c requirements/_base.txt # black @@ -27,9 +27,9 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via -r requirements/../../../requirements/devenv.txt -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via # black # mypy @@ -42,7 +42,9 @@ packaging==24.2 # black # build pathspec==0.12.1 - # via black + # via + # black + # mypy pip==25.0.1 # via pip-tools pip-tools==7.4.1 @@ -68,11 +70,11 @@ pyyaml==6.0.2 # watchdog ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.2 +setuptools==80.9.0 # via pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # mypy diff --git a/services/director/src/simcore_service_director/_meta.py b/services/director/src/simcore_service_director/_meta.py index 5bf4218d6788..a9a1b7868c4d 100644 --- a/services/director/src/simcore_service_director/_meta.py +++ b/services/director/src/simcore_service_director/_meta.py @@ -1,6 +1,4 @@ -""" Application's metadata - -""" +"""Application's metadata""" from typing import Final @@ -15,7 +13,7 @@ PROJECT_NAME: Final[str] = info.project_name VERSION: Final[Version] = info.version API_VERSION: Final[VersionStr] = info.__version__ -APP_NAME: Final[str] = PROJECT_NAME +APP_NAME: Final[str] = info.app_name API_VTAG: Final[VersionTag] = VersionTag(info.api_prefix_path_tag) SUMMARY: Final[str] = info.get_summary() diff --git a/services/director/src/simcore_service_director/api/v0/schemas/project-v0.0.1.json b/services/director/src/simcore_service_director/api/v0/schemas/project-v0.0.1.json index 9b5a55525021..24d05ed6bcf7 100644 --- a/services/director/src/simcore_service_director/api/v0/schemas/project-v0.0.1.json +++ b/services/director/src/simcore_service_director/api/v0/schemas/project-v0.0.1.json @@ -458,6 +458,16 @@ "ABORTED" ], "type": "string" + }, + "lock_state": { + "type": "object", + "properties": { + "locked": { + "type": "boolean", + "description": "true if the node is locked", + "default": false + } + } } }, "additionalProperties": false @@ -765,7 +775,10 @@ "description": "Object containing Quality Assessment related data" }, "workspaceId": { - "type": ["integer", "null"] + "type": [ + "integer", + "null" + ] }, "type": { "type": "string", diff --git a/services/director/src/simcore_service_director/cli.py b/services/director/src/simcore_service_director/cli.py index f2e16f6b97ee..1a797d76d0b8 100644 --- a/services/director/src/simcore_service_director/cli.py +++ b/services/director/src/simcore_service_director/cli.py @@ -21,6 +21,6 @@ def run(): """Runs application""" typer.secho("Sorry, this entrypoint is intentionally disabled. Use instead") typer.secho( - "$ uvicorn simcore_service_director.main:the_app", + "$ uvicorn --factory simcore_service_director.main:app_factory", fg=typer.colors.BLUE, ) diff --git a/services/director/src/simcore_service_director/core/application.py b/services/director/src/simcore_service_director/core/application.py index 0baa557506f6..fb14ce4e86cf 100644 --- a/services/director/src/simcore_service_director/core/application.py +++ b/services/director/src/simcore_service_director/core/application.py @@ -1,9 +1,9 @@ import logging -from typing import Final from fastapi import FastAPI from servicelib.async_utils import cancel_sequential_workers from servicelib.fastapi.client_session import setup_client_session +from servicelib.fastapi.http_error import set_app_default_http_error_handlers from servicelib.fastapi.tracing import ( initialize_fastapi_app_tracing, setup_tracing, @@ -21,22 +21,10 @@ from ..registry_proxy import setup as setup_registry from .settings import ApplicationSettings -_LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR -_NOISY_LOGGERS: Final[tuple[str]] = ("werkzeug",) - _logger = logging.getLogger(__name__) def create_app(settings: ApplicationSettings) -> FastAPI: - # keep mostly quiet noisy loggers - quiet_level: int = max( - min(logging.root.level + _LOG_LEVEL_STEP, logging.CRITICAL), logging.WARNING - ) - for name in _NOISY_LOGGERS: - logging.getLogger(name).setLevel(quiet_level) - - _logger.info("app settings: %s", settings.model_dump_json(indent=1)) - app = FastAPI( debug=settings.DIRECTOR_DEBUG, title=APP_NAME, @@ -62,6 +50,7 @@ def create_app(settings: ApplicationSettings) -> FastAPI: app, max_keepalive_connections=settings.DIRECTOR_REGISTRY_CLIENT_MAX_KEEPALIVE_CONNECTIONS, default_timeout=settings.DIRECTOR_REGISTRY_CLIENT_TIMEOUT, + tracing_settings=settings.DIRECTOR_TRACING, ) setup_registry(app) @@ -69,6 +58,7 @@ def create_app(settings: ApplicationSettings) -> FastAPI: initialize_fastapi_app_tracing(app) # ERROR HANDLERS + set_app_default_http_error_handlers(app) # EVENTS async def _on_startup() -> None: diff --git a/services/director/src/simcore_service_director/core/errors.py b/services/director/src/simcore_service_director/core/errors.py index c7113baa4021..b983d520734d 100644 --- a/services/director/src/simcore_service_director/core/errors.py +++ b/services/director/src/simcore_service_director/core/errors.py @@ -17,6 +17,12 @@ class ServiceNotAvailableError(DirectorRuntimeError): msg_template: str = "Service {service_name}:{service_tag} is not available" +class DockerRegistryUnsupportedManifestSchemaVersionError(DirectorRuntimeError): + msg_template: str = ( + "Docker registry schema version {version} issue with {service_name}:{service_tag}" + ) + + class ServiceUUIDNotFoundError(DirectorRuntimeError): msg_template: str = "Service with uuid {service_uuid} was not found" diff --git a/services/director/src/simcore_service_director/core/settings.py b/services/director/src/simcore_service_director/core/settings.py index 5560de876fa6..7e1b010aae89 100644 --- a/services/director/src/simcore_service_director/core/settings.py +++ b/services/director/src/simcore_service_director/core/settings.py @@ -1,11 +1,13 @@ import datetime import warnings +from functools import cached_property from typing import cast +from common_library.logging.logging_utils_filtering import LoggerName, MessageSubstring from fastapi import FastAPI from models_library.basic_types import LogLevel, PortInt, VersionTag from pydantic import AliasChoices, Field, NonNegativeInt, PositiveInt, field_validator -from servicelib.logging_utils_filtering import LoggerName, MessageSubstring +from servicelib.logging_utils import LogLevelInt from settings_library.application import BaseApplicationSettings from settings_library.docker_registry import RegistrySettings from settings_library.postgres import PostgresSettings @@ -27,7 +29,7 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): ) DIRECTOR_REMOTE_DEBUG_PORT: PortInt = 3000 - DIRECTOR_LOGLEVEL: LogLevel = Field( + DIRECTOR_LOG_LEVEL: LogLevel = Field( ..., validation_alias=AliasChoices("DIRECTOR_LOGLEVEL", "LOG_LEVEL", "LOGLEVEL") ) DIRECTOR_LOG_FORMAT_LOCAL_DEV_ENABLED: bool = Field( @@ -143,10 +145,10 @@ def _validate_substitutions(cls, v): return v - @field_validator("DIRECTOR_LOGLEVEL", mode="before") - @classmethod - def _valid_log_level(cls, value: str) -> str: - return cls.validate_log_level(value) + @cached_property + def log_level(self) -> LogLevelInt: + """override""" + return cast(LogLevelInt, self.DIRECTOR_LOG_LEVEL) def get_application_settings(app: FastAPI) -> ApplicationSettings: diff --git a/services/director/src/simcore_service_director/main.py b/services/director/src/simcore_service_director/main.py index da0c480065f6..5ad1c4b03d9a 100644 --- a/services/director/src/simcore_service_director/main.py +++ b/services/director/src/simcore_service_director/main.py @@ -1,24 +1,37 @@ -"""Main application to be deployed by uvicorn (or equivalent) server - -""" +"""Main application to be deployed by uvicorn (or equivalent) server""" import logging +from typing import Final +from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.logging_utils import config_all_loggers +from servicelib.fastapi.logging_lifespan import create_logging_shutdown_event from simcore_service_director.core.application import create_app from simcore_service_director.core.settings import ApplicationSettings -_the_settings = ApplicationSettings.create_from_envs() +_logger = logging.getLogger(__name__) -# SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 -logging.basicConfig(level=_the_settings.DIRECTOR_LOGLEVEL) -logging.root.setLevel(_the_settings.DIRECTOR_LOGLEVEL) -config_all_loggers( - log_format_local_dev_enabled=_the_settings.DIRECTOR_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=_the_settings.DIRECTOR_LOG_FILTER_MAPPING, - tracing_settings=_the_settings.DIRECTOR_TRACING, +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "httpcore", + "httpx", + "werkzeug", ) -# SINGLETON FastAPI app -the_app: FastAPI = create_app(_the_settings) + +def app_factory() -> FastAPI: + app_settings = ApplicationSettings.create_from_envs() + logging_shutdown_event = create_logging_shutdown_event( + log_format_local_dev_enabled=app_settings.DIRECTOR_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.DIRECTOR_LOG_FILTER_MAPPING, + tracing_settings=app_settings.DIRECTOR_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) + + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + app = create_app(settings=app_settings) + app.add_event_handler("shutdown", logging_shutdown_event) + return app diff --git a/services/director/src/simcore_service_director/registry_proxy.py b/services/director/src/simcore_service_director/registry_proxy.py index 56b5d812f8c7..8490e4d2c6ef 100644 --- a/services/director/src/simcore_service_director/registry_proxy.py +++ b/services/director/src/simcore_service_director/registry_proxy.py @@ -8,9 +8,9 @@ import httpx from aiocache import Cache, SimpleMemoryCache # type: ignore[import-untyped] +from common_library.async_tools import cancel_wait_task from common_library.json_serialization import json_loads from fastapi import FastAPI, status -from servicelib.async_utils import cancel_wait_task from servicelib.background_task import create_periodic_task from servicelib.fastapi.client_session import get_client_session from servicelib.logging_utils import log_catch, log_context @@ -25,6 +25,7 @@ from .constants import DIRECTOR_SIMCORE_SERVICES_PREFIX from .core.errors import ( DirectorRuntimeError, + DockerRegistryUnsupportedManifestSchemaVersionError, RegistryConnectionError, ServiceNotAvailableError, ) @@ -93,7 +94,9 @@ async def _basic_auth_registry_request( raise ServiceNotAvailableError(service_name=path) elif response.status_code >= status.HTTP_400_BAD_REQUEST: - raise RegistryConnectionError(msg=str(response)) + raise RegistryConnectionError( + msg=f"{response}: {response.text} for {request_url}" + ) else: # registry that does not need an auth @@ -164,7 +167,9 @@ async def _auth_registry_request( # noqa: C901 if resp_wbasic.status_code == status.HTTP_404_NOT_FOUND: raise ServiceNotAvailableError(service_name=f"{url}") if resp_wbasic.status_code >= status.HTTP_400_BAD_REQUEST: - raise RegistryConnectionError(msg=f"{resp_wbasic}") + raise RegistryConnectionError( + msg=f"{resp_wbasic}: {resp_wbasic.text} for {url}" + ) resp_data = await resp_wbasic.json(content_type=None) resp_headers = resp_wbasic.headers return (resp_data, resp_headers) @@ -198,7 +203,22 @@ async def registry_request( if use_cache and (cached_response := await cache.get(cache_key)): assert isinstance(cached_response, tuple) # nosec return cast(tuple[dict, Mapping], cached_response) - + # Add proper Accept headers for manifest requests for accepting both v1 and v2 + if "manifests/" in path and method.upper() == "GET": + headers = session_kwargs.get("headers", {}) + headers.update( + { + "Accept": ", ".join( + [ + "application/vnd.docker.distribution.manifest.v2+json", + "application/vnd.docker.distribution.manifest.list.v2+json", + "application/vnd.docker.distribution.manifest.v1+prettyjws", + "application/json", + ] + ) + } + ) + session_kwargs["headers"] = headers app_settings = get_application_settings(app) try: response, response_headers = await _retried_request( @@ -375,22 +395,72 @@ async def get_image_labels( app: FastAPI, image: str, tag: str, *, update_cache=False ) -> tuple[dict[str, str], str | None]: """Returns image labels and the image manifest digest""" + with log_context(_logger, logging.DEBUG, msg=f"get {image}:{tag} labels"): + request_result, headers = await registry_request( + app, + path=f"{image}/manifests/{tag}", + method="GET", + use_cache=not update_cache, + ) - _logger.debug("getting image labels of %s:%s", image, tag) - path = f"{image}/manifests/{tag}" - request_result, headers = await registry_request( - app, path=path, method="GET", use_cache=not update_cache - ) - v1_compatibility_key = json_loads(request_result["history"][0]["v1Compatibility"]) - container_config: dict[str, Any] = v1_compatibility_key.get( - "container_config", v1_compatibility_key["config"] - ) - labels: dict[str, str] = container_config["Labels"] + schema_version = request_result["schemaVersion"] + labels: dict[str, str] = {} + match schema_version: + case 2: + # Image Manifest Version 2, Schema 2 -> defaults in registries v3 (https://distribution.github.io/distribution/spec/manifest-v2-2/) + media_type = request_result["mediaType"] + if ( + media_type + == "application/vnd.docker.distribution.manifest.list.v2+json" + ): + # default to x86_64 architecture + _logger.info( + "Image %s:%s is a docker image with multiple architectures. " + "Currently defaulting to first architecture", + image, + tag, + ) + manifests = request_result.get("manifests", []) + if not manifests: + raise DockerRegistryUnsupportedManifestSchemaVersionError( + version=schema_version, + service_name=image, + service_tag=tag, + reason="Manifest list is empty", + ) + first_manifest_digest = manifests[0]["digest"] + request_result, _ = await registry_request( + app, + path=f"{image}/manifests/{first_manifest_digest}", + method="GET", + use_cache=not update_cache, + ) - headers = headers or {} - manifest_digest: str | None = headers.get(_DOCKER_CONTENT_DIGEST_HEADER, None) + config_digest = request_result["config"]["digest"] + # Fetch the config blob + config_result, _ = await registry_request( + app, + path=f"{image}/blobs/{config_digest}", + method="GET", + use_cache=not update_cache, + ) + labels = config_result.get("config", {}).get("Labels", {}) + case 1: + # Image Manifest Version 2, Schema 1 deprecated in docker hub since 2024-11-04 + v1_compatibility_key = json_loads( + request_result["history"][0]["v1Compatibility"] + ) + container_config: dict[str, Any] = v1_compatibility_key.get( + "container_config", v1_compatibility_key.get("config", {}) + ) + labels = container_config.get("Labels", {}) + case _: + raise DockerRegistryUnsupportedManifestSchemaVersionError( + version=schema_version, service_name=image, service_tag=tag + ) - _logger.debug("retrieved labels of image %s:%s", image, tag) + headers = headers or {} + manifest_digest: str | None = headers.get(_DOCKER_CONTENT_DIGEST_HEADER, None) return (labels, manifest_digest) diff --git a/services/director/tests/unit/conftest.py b/services/director/tests/unit/conftest.py index 15b7627e29d6..68c20076f76f 100644 --- a/services/director/tests/unit/conftest.py +++ b/services/director/tests/unit/conftest.py @@ -19,6 +19,7 @@ pytest_plugins = [ "fixtures.fake_services", + "pytest_simcore.asyncio_event_loops", "pytest_simcore.cli_runner", "pytest_simcore.docker", "pytest_simcore.docker_compose", @@ -27,6 +28,7 @@ "pytest_simcore.environment_configs", "pytest_simcore.faker_projects_data", "pytest_simcore.faker_users_data", + "pytest_simcore.logging", "pytest_simcore.repository_paths", "pytest_simcore.simcore_service_library_fixtures", ] diff --git a/services/director/tests/unit/fixtures/fake_services.py b/services/director/tests/unit/fixtures/fake_services.py index 1edb799ee9cb..af867d08d920 100644 --- a/services/director/tests/unit/fixtures/fake_services.py +++ b/services/director/tests/unit/fixtures/fake_services.py @@ -117,7 +117,6 @@ async def _build_and_push_image( bad_json_format: bool = False, app_settings: ApplicationSettings, ) -> ServiceInRegistryInfoDict: - # crate image service_description = _create_service_description(service_type, name, tag) docker_labels = _create_docker_labels( @@ -214,12 +213,13 @@ async def _build_and_push_image( ) -def _clean_registry(registry_url: str, list_of_images: list[ServiceInRegistryInfoDict]): +def _clean_registry(list_of_images: list[ServiceInRegistryInfoDict]): request_headers = {"accept": "application/vnd.docker.distribution.manifest.v2+json"} for image in list_of_images: service_description = image["service_description"] # get the image digest tag = service_description["version"] + registry_url = image["image_path"].split("/")[0] url = "http://{host}/v2/{name}/manifests/{tag}".format( host=registry_url, name=service_description["key"], tag=tag ) @@ -243,15 +243,13 @@ async def __call__( inter_dependent_services: bool = False, bad_json_format: bool = False, version="1.0.", - ) -> list[ServiceInRegistryInfoDict]: - ... + ) -> list[ServiceInRegistryInfoDict]: ... @pytest.fixture def push_services( docker_registry: str, app_settings: ApplicationSettings ) -> Iterator[PushServicesCallable]: - registry_url = docker_registry list_of_pushed_images_tags: list[ServiceInRegistryInfoDict] = [] dependent_images = [] @@ -262,8 +260,13 @@ async def _build_push_images_to_docker_registry( inter_dependent_services=False, bad_json_format=False, version="1.0.", + override_registry_url: str | None = None, ) -> list[ServiceInRegistryInfoDict]: try: + registry_url = docker_registry + if override_registry_url: + _logger.info("Overriding registry URL with %s", override_registry_url) + registry_url = override_registry_url dependent_image = None if inter_dependent_services: dependent_image = await _build_and_push_image( @@ -317,5 +320,5 @@ async def _build_push_images_to_docker_registry( yield _build_push_images_to_docker_registry _logger.info("clean registry") - _clean_registry(registry_url, list_of_pushed_images_tags) - _clean_registry(registry_url, dependent_images) + _clean_registry(list_of_pushed_images_tags) + _clean_registry(dependent_images) diff --git a/services/director/tests/unit/test_core_settings.py b/services/director/tests/unit/test_core_settings.py index 5e8b42c1268c..122d6baea9af 100644 --- a/services/director/tests/unit/test_core_settings.py +++ b/services/director/tests/unit/test_core_settings.py @@ -3,7 +3,6 @@ # pylint: disable=unused-variable # pylint: disable=too-many-arguments - import datetime import pytest @@ -16,14 +15,7 @@ from simcore_service_director.core.settings import ApplicationSettings -def test_valid_web_application_settings(app_environment: EnvVarsDict): - """ - We validate actual envfiles (e.g. repo.config files) by passing them via the CLI - - $ ln -s /path/to/osparc-config/deployments/mydeploy.com/repo.config .secrets - $ pytest --external-envfile=.secrets --pdb tests/unit/test_core_settings.py - - """ +def test_valid_application_settings(app_environment: EnvVarsDict): settings = ApplicationSettings() # type: ignore assert settings diff --git a/services/director/tests/unit/test_registry_proxy.py b/services/director/tests/unit/test_registry_proxy.py index c15ccd7df7f2..74e7c5b7ef34 100644 --- a/services/director/tests/unit/test_registry_proxy.py +++ b/services/director/tests/unit/test_registry_proxy.py @@ -21,7 +21,6 @@ async def test_list_no_services_available( configure_registry_access: EnvVarsDict, app: FastAPI, ): - computational_services = await registry_proxy.list_services( app, registry_proxy.ServiceType.COMPUTATIONAL ) @@ -116,13 +115,36 @@ async def test_list_interactive_service_dependencies( assert image_dependencies[0]["tag"] == docker_dependencies[0]["tag"] +@pytest.fixture( + params=["docker_registry", "docker_registry_v2"], ids=["registry_v3", "registry_v2"] +) +def configure_registry_access_both_versions( + app_environment: EnvVarsDict, + monkeypatch: pytest.MonkeyPatch, + request: pytest.FixtureRequest, +) -> EnvVarsDict: + """Parametrized fixture that tests with both registry v3 and v2 - use only for specific tests that need both""" + registry_url = request.getfixturevalue(request.param) + return app_environment | setenvs_from_dict( + monkeypatch, + envs={ + "REGISTRY_URL": registry_url, + "REGISTRY_PATH": registry_url, + "REGISTRY_SSL": False, + "DIRECTOR_REGISTRY_CACHING": False, + }, + ) + + async def test_get_image_labels( - configure_registry_access: EnvVarsDict, + configure_registry_access_both_versions: EnvVarsDict, app: FastAPI, push_services, ): images = await push_services( - number_of_computational_services=1, number_of_interactive_services=1 + number_of_computational_services=1, + number_of_interactive_services=1, + override_registry_url=configure_registry_access_both_versions["REGISTRY_URL"], ) images_digests = set() for image in images: @@ -278,7 +300,7 @@ def configure_number_concurrency_calls( def test_list_services_performance( - skip_if_external_envfile_dict: None, + skip_if_no_external_envfile: None, configure_external_registry_access: EnvVarsDict, configure_number_concurrency_calls: EnvVarsDict, registry_settings: RegistrySettings, diff --git a/services/docker-api-proxy/requirements/_test.txt b/services/docker-api-proxy/requirements/_test.txt index a69d951d93c9..03053b86c412 100644 --- a/services/docker-api-proxy/requirements/_test.txt +++ b/services/docker-api-proxy/requirements/_test.txt @@ -12,7 +12,7 @@ aiofiles==24.1.0 # via -r requirements/../../../packages/service-library/requirements/_base.in aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -68,16 +68,10 @@ certifi==2025.1.31 # requests charset-normalizer==3.4.1 # via requests -click==8.1.8 +click==8.2.1 # via typer coverage==7.6.12 # via pytest-cov -deprecated==1.2.18 - # via - # opentelemetry-api - # opentelemetry-exporter-otlp-proto-grpc - # opentelemetry-exporter-otlp-proto-http - # opentelemetry-semantic-conventions dnspython==2.7.0 # via email-validator docker==7.1.0 @@ -90,7 +84,7 @@ faker==36.1.1 # via -r requirements/_test.in fast-depends==2.4.12 # via faststream -fastapi==0.115.12 +fastapi==0.116.1 # via # -r requirements/_test.in # fastapi-lifespan-manager @@ -104,7 +98,7 @@ frozenlist==1.5.0 # via # aiohttp # aiosignal -googleapis-common-protos==1.68.0 +googleapis-common-protos==1.70.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http @@ -120,6 +114,10 @@ importlib-metadata==8.5.0 # via opentelemetry-api iniconfig==2.0.0 # via pytest +jsonref==1.1.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema==4.23.0 # via # -r requirements/../../../packages/models-library/requirements/_base.in @@ -134,59 +132,64 @@ multidict==6.1.0 # via # aiohttp # yarl -opentelemetry-api==1.30.0 +opentelemetry-api==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.30.0 +opentelemetry-exporter-otlp==1.34.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.30.0 +opentelemetry-exporter-otlp-proto-common==1.34.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.30.0 +opentelemetry-exporter-otlp-proto-grpc==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.30.0 +opentelemetry-exporter-otlp-proto-http==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.51b0 +opentelemetry-instrumentation==0.55b1 # via # opentelemetry-instrumentation-aio-pika + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aio-pika==0.51b0 +opentelemetry-instrumentation-aio-pika==0.55b1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-asyncpg==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-logging==0.51b0 +opentelemetry-instrumentation-logging==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.51b0 +opentelemetry-instrumentation-redis==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.51b0 +opentelemetry-instrumentation-requests==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.30.0 +opentelemetry-proto==1.34.1 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.30.0 +opentelemetry-sdk==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.51b0 +opentelemetry-semantic-conventions==0.55b1 # via # opentelemetry-instrumentation + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.51b0 +opentelemetry-util-http==0.55b1 # via opentelemetry-instrumentation-requests orjson==3.10.15 # via @@ -217,12 +220,14 @@ packaging==24.2 pamqp==3.3.0 # via aiormq pluggy==1.5.0 - # via pytest + # via + # pytest + # pytest-cov propcache==0.3.0 # via # aiohttp # yarl -protobuf==5.29.3 +protobuf==5.29.5 # via # googleapis-common-protos # opentelemetry-proto @@ -230,7 +235,7 @@ psutil==7.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in pycryptodome==3.21.0 # via stream-zip -pydantic==2.10.6 +pydantic==2.11.7 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -259,9 +264,9 @@ pydantic==2.10.6 # fastapi # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.2 +pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.10.2 +pydantic-extra-types==2.10.5 # via # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -290,20 +295,22 @@ pydantic-settings==2.7.0 # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in pygments==2.19.1 - # via rich + # via + # pytest + # rich pyinstrument==5.0.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -pytest==8.3.5 +pytest==8.4.1 # via # -r requirements/_test.in # pytest-asyncio # pytest-cov # pytest-mock -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via -r requirements/_test.in -pytest-cov==6.0.0 +pytest-cov==6.2.1 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements/_test.in python-dateutil==2.9.0.post0 # via arrow @@ -358,11 +365,11 @@ referencing==0.35.1 # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications -requests==2.32.3 +requests==2.32.4 # via # docker # opentelemetry-exporter-otlp-proto-http -rich==13.9.4 +rich==14.1.0 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in @@ -379,7 +386,7 @@ sniffio==1.3.1 # via # anyio # asgi-lifespan -starlette==0.46.0 +starlette==0.47.2 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -404,26 +411,34 @@ toolz==1.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in tqdm==4.67.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.15.2 +typer==0.16.1 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in types-python-dateutil==2.9.0.20241206 # via arrow -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # aiodebug # anyio # fastapi # faststream + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http # opentelemetry-sdk + # opentelemetry-semantic-conventions # pydantic # pydantic-core # pydantic-extra-types + # starlette # typer + # typing-inspection +typing-inspection==0.4.1 + # via pydantic tzdata==2025.1 # via faker -urllib3==2.3.0 +urllib3==2.5.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -441,7 +456,6 @@ urllib3==2.3.0 # requests wrapt==1.17.2 # via - # deprecated # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-redis diff --git a/services/docker-api-proxy/requirements/_tools.txt b/services/docker-api-proxy/requirements/_tools.txt index 24be1a5cfb83..d00f2a07823e 100644 --- a/services/docker-api-proxy/requirements/_tools.txt +++ b/services/docker-api-proxy/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.8 +click==8.2.1 # via # -c requirements/_test.txt # black @@ -27,9 +27,9 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via -r requirements/../../../requirements/devenv.txt -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via # black # mypy @@ -41,7 +41,9 @@ packaging==24.2 # black # build pathspec==0.12.1 - # via black + # via + # black + # mypy pip==25.0.1 # via pip-tools pip-tools==7.4.1 @@ -66,11 +68,11 @@ pyyaml==6.0.2 # pre-commit ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.2 +setuptools==80.9.0 # via pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_test.txt # mypy diff --git a/services/docker-api-proxy/tests/integration/conftest.py b/services/docker-api-proxy/tests/integration/conftest.py index 10878c70d574..09d5ec3cb9a8 100644 --- a/services/docker-api-proxy/tests/integration/conftest.py +++ b/services/docker-api-proxy/tests/integration/conftest.py @@ -23,6 +23,7 @@ "pytest_simcore.docker_api_proxy", "pytest_simcore.docker_compose", "pytest_simcore.docker_swarm", + "pytest_simcore.logging", "pytest_simcore.repository_paths", "pytest_simcore.simcore_services", ] diff --git a/services/docker-compose-deploy.yml b/services/docker-compose-deploy.yml index e6c21da36dbf..2ad1ca974b94 100644 --- a/services/docker-compose-deploy.yml +++ b/services/docker-compose-deploy.yml @@ -17,8 +17,12 @@ services: image: ${DOCKER_REGISTRY:-itisfoundation}/director:${DOCKER_IMAGE_TAG:-latest} director-v2: image: ${DOCKER_REGISTRY:-itisfoundation}/director-v2:${DOCKER_IMAGE_TAG:-latest} + docker-api-proxy: + image: ${DOCKER_REGISTRY:-itisfoundation}/docker-api-proxy:${DOCKER_IMAGE_TAG:-latest} dynamic-sidecar: image: ${DOCKER_REGISTRY:-itisfoundation}/dynamic-sidecar:${DOCKER_IMAGE_TAG:-latest} + dynamic-scheduler: + image: ${DOCKER_REGISTRY:-itisfoundation}/dynamic-scheduler:${DOCKER_IMAGE_TAG:-latest} efs-guardian: image: ${DOCKER_REGISTRY:-itisfoundation}/efs-guardian:${DOCKER_IMAGE_TAG:-latest} invitations: @@ -29,10 +33,6 @@ services: image: ${DOCKER_REGISTRY:-itisfoundation}/notifications:${DOCKER_IMAGE_TAG:-latest} payments: image: ${DOCKER_REGISTRY:-itisfoundation}/payments:${DOCKER_IMAGE_TAG:-latest} - dynamic-scheduler: - image: ${DOCKER_REGISTRY:-itisfoundation}/dynamic-scheduler:${DOCKER_IMAGE_TAG:-latest} - docker-api-proxy: - image: ${DOCKER_REGISTRY:-itisfoundation}/docker-api-proxy:${DOCKER_IMAGE_TAG:-latest} resource-usage-tracker: image: ${DOCKER_REGISTRY:-itisfoundation}/resource-usage-tracker:${DOCKER_IMAGE_TAG:-latest} service-integration: diff --git a/services/docker-compose-dev-vendors.yml b/services/docker-compose-dev-vendors.yml index 2c885c0ea95d..338bccfd4aa5 100644 --- a/services/docker-compose-dev-vendors.yml +++ b/services/docker-compose-dev-vendors.yml @@ -15,7 +15,7 @@ services: - traefik.enable=true - traefik.swarm.network=${SWARM_STACK_NAME}_default # auth: https://doc.traefik.io/traefik/middlewares/http/forwardauth - - traefik.http.middlewares.${SWARM_STACK_NAME}_manual-auth.forwardauth.address=http://${WEBSERVER_HOST}:${WEBSERVER_PORT}/v0/auth:check + - traefik.http.middlewares.${SWARM_STACK_NAME}_manual-auth.forwardauth.address=http://${WB_AUTH_WEBSERVER_HOST}:${WB_AUTH_WEBSERVER_PORT}/v0/auth:check - traefik.http.middlewares.${SWARM_STACK_NAME}_manual-auth.forwardauth.trustForwardHeader=true - traefik.http.middlewares.${SWARM_STACK_NAME}_manual-auth.forwardauth.authResponseHeaders=Set-Cookie,osparc-sc2 # routing diff --git a/services/docker-compose-ops-registry.yml b/services/docker-compose-ops-registry.yml index ffc4b5d206c2..4b0c087c6325 100644 --- a/services/docker-compose-ops-registry.yml +++ b/services/docker-compose-ops-registry.yml @@ -4,7 +4,7 @@ version: "3.7" services: registry: - image: registry:2 + image: registry:3 container_name: registry init: true environment: diff --git a/services/docker-compose-ops.yml b/services/docker-compose-ops.yml index 3db7af6aed0d..d6a336c9dacc 100644 --- a/services/docker-compose-ops.yml +++ b/services/docker-compose-ops.yml @@ -81,7 +81,7 @@ services: retries: 5 redis-commander: - image: rediscommander/redis-commander:latest + image: ghcr.io/joeferner/redis-commander:latest init: true environment: - >- @@ -92,13 +92,15 @@ services: scheduled_maintenance:${REDIS_HOST}:${REDIS_PORT}:3:${REDIS_PASSWORD}, user_notifications:${REDIS_HOST}:${REDIS_PORT}:4:${REDIS_PASSWORD}, announcements:${REDIS_HOST}:${REDIS_PORT}:5:${REDIS_PASSWORD}, - distributed_identifiers:${REDIS_HOST}:${REDIS_PORT}:6:${REDIS_PASSWORD}, + long_running_tasks:${REDIS_HOST}:${REDIS_PORT}:6:${REDIS_PASSWORD}, deferred_tasks:${REDIS_HOST}:${REDIS_PORT}:7:${REDIS_PASSWORD}, - dynamic_services:${REDIS_HOST}:${REDIS_PORT}:8:${REDIS_PASSWORD} - celery_tasks:${REDIS_HOST}:${REDIS_PORT}:9:${REDIS_PASSWORD} + dynamic_services:${REDIS_HOST}:${REDIS_PORT}:8:${REDIS_PASSWORD}, + celery_tasks:${REDIS_HOST}:${REDIS_PORT}:9:${REDIS_PASSWORD}, + documents:${REDIS_HOST}:${REDIS_PORT}:10:${REDIS_PASSWORD} # If you add/remove a db, do not forget to update the --databases entry in the docker-compose.yml ports: - "18081:8081" + user: redis networks: - simcore_default opentelemetry-collector: diff --git a/services/docker-compose.devel.yml b/services/docker-compose.devel.yml index 2b994b99ffda..28e5a8bfa95c 100644 --- a/services/docker-compose.devel.yml +++ b/services/docker-compose.devel.yml @@ -21,6 +21,16 @@ services: - ../packages:/devel/packages - ${HOST_UV_CACHE_DIR}:/home/scu/.cache/uv + api-worker: + environment: + <<: *common-environment + API_SERVER_PROFILING : ${API_SERVER_PROFILING} + API_SERVER_LOGLEVEL: DEBUG + volumes: + - ./api-server:/devel/services/api-server + - ../packages:/devel/packages + - ${HOST_UV_CACHE_DIR}:/home/scu/.cache/uv + autoscaling: environment: <<: *common-environment @@ -150,9 +160,13 @@ services: WEBSERVER_LOGLEVEL: DEBUG WEBSERVER_PROFILING: ${WEBSERVER_PROFILING} WEBSERVER_REMOTE_DEBUGGING_PORT: 3000 - WEBSERVER_FUNCTIONS: ${WEBSERVER_FUNCTIONS} + wb-auth: + volumes: *webserver_volumes_devel + environment: + <<: *webserver_environment_devel + wb-api-server: volumes: *webserver_volumes_devel environment: diff --git a/services/docker-compose.local.yml b/services/docker-compose.local.yml index 5be8ddc49171..f1b1514ba557 100644 --- a/services/docker-compose.local.yml +++ b/services/docker-compose.local.yml @@ -149,6 +149,15 @@ services: ports: - "8080" - "3022:3000" + + api-worker: + environment: + <<: *common_environment + API_SERVER_REMOTE_DEBUG_PORT : 3000 + ports: + - "8080" + - "3025:3000" + webserver: environment: &webserver_environment_local <<: *common_environment @@ -158,13 +167,21 @@ services: - "3001:3000" deploy: labels: - - traefik.http.services.${SWARM_STACK_NAME}_webserver.loadbalancer.sticky.cookie.secure=false + # locally webserver is accessible through http:// + - traefik.http.services.${SWARM_STACK_NAME}_webserver_sticky.loadbalancer.sticky.cookie.secure=false - traefik.http.routers.${SWARM_STACK_NAME}_webserver_local.service=${SWARM_STACK_NAME}_webserver - traefik.http.routers.${SWARM_STACK_NAME}_webserver_local.entrypoints=http - traefik.http.routers.${SWARM_STACK_NAME}_webserver_local.rule=PathPrefix(`/dev/`) - traefik.http.routers.${SWARM_STACK_NAME}_webserver_local.priority=9 - traefik.http.routers.${SWARM_STACK_NAME}_webserver_local.middlewares=${SWARM_STACK_NAME}_gzip@swarm, ${SWARM_STACK_NAME_NO_HYPHEN}_sslheader@swarm, ${SWARM_STACK_NAME}_webserver_retry + wb-auth: + environment: + <<: *webserver_environment_local + ports: + - "8080" + - "3024:3000" + wb-api-server: environment: <<: *webserver_environment_local diff --git a/services/docker-compose.yml b/services/docker-compose.yml index b2a0e27da0a2..b9e799e4938b 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -1,22 +1,52 @@ x-dask-tls-secrets: &dask_tls_secrets - source: dask_tls_key target: ${DASK_TLS_KEY} - # mode: 444 # not supported by docker stack compose as of 26.0.0 + # mode: 444 # not supported by docker stack compose as of 26.0.0 - source: dask_tls_cert target: ${DASK_TLS_CERT} - # mode: 444 # not supported by docker stack compose as of 26.0.0 +# mode: 444 # not supported by docker stack compose as of 26.0.0 + +x-tracing-open-telemetry: &tracing_open_telemetry_environs + TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} + TRACING_OPENTELEMETRY_COLLECTOR_BATCH_SIZE: ${TRACING_OPENTELEMETRY_COLLECTOR_BATCH_SIZE} + TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} + TRACING_OPENTELEMETRY_COLLECTOR_SAMPLING_PERCENTAGE: ${TRACING_OPENTELEMETRY_COLLECTOR_SAMPLING_PERCENTAGE} + +x-webserver-diagnostics: &webserver_diagnostics_environs + DIAGNOSTICS_HEALTHCHECK_ENABLED: ${DIAGNOSTICS_HEALTHCHECK_ENABLED} + DIAGNOSTICS_MAX_AVG_LATENCY: ${DIAGNOSTICS_MAX_AVG_LATENCY} + DIAGNOSTICS_MAX_TASK_DELAY: ${DIAGNOSTICS_MAX_TASK_DELAY} + DIAGNOSTICS_SLOW_DURATION_SECS: ${DIAGNOSTICS_SLOW_DURATION_SECS} + +x-postgres-settings: &postgres_settings + POSTGRES_DB: ${POSTGRES_DB} + POSTGRES_HOST: ${POSTGRES_HOST} + POSTGRES_MAXSIZE: ${POSTGRES_MAXSIZE} + POSTGRES_MINSIZE: ${POSTGRES_MINSIZE} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} + POSTGRES_MAX_POOLSIZE: ${POSTGRES_MAX_POOLSIZE} + POSTGRES_MAX_OVERFLOW: ${POSTGRES_MAX_OVERFLOW} + POSTGRES_PORT: ${POSTGRES_PORT} + POSTGRES_USER: ${POSTGRES_USER} services: api-server: image: ${DOCKER_REGISTRY:-itisfoundation}/api-server:${DOCKER_IMAGE_TAG:-latest} init: true hostname: "{{.Node.Hostname}}-{{.Task.Slot}}" - environment: + environment: &api_server_environment + <<: + - *tracing_open_telemetry_environs + - *postgres_settings + API_SERVER_DEV_FEATURES_ENABLED: ${API_SERVER_DEV_FEATURES_ENABLED} - API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} API_SERVER_LOG_FILTER_MAPPING : ${LOG_FILTER_MAPPING} + API_SERVER_LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} API_SERVER_LOGLEVEL: ${API_SERVER_LOGLEVEL} API_SERVER_PROFILING: ${API_SERVER_PROFILING} + API_SERVER_TRACING: ${API_SERVER_TRACING} + API_SERVER_WEBSERVER_RPC_NAMESPACE: ${WB_API_WEBSERVER_HOST} + API_SERVER_WORKER_MODE: "false" CATALOG_HOST: ${CATALOG_HOST} CATALOG_PORT: ${CATALOG_PORT} @@ -24,27 +54,27 @@ services: DIRECTOR_V2_HOST: ${DIRECTOR_V2_HOST} DIRECTOR_V2_PORT: ${DIRECTOR_V2_PORT} - POSTGRES_DB: ${POSTGRES_DB} - POSTGRES_HOST: ${POSTGRES_HOST} - POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} - POSTGRES_PORT: ${POSTGRES_PORT} - POSTGRES_USER: ${POSTGRES_USER} - RABBIT_HOST: ${RABBIT_HOST} RABBIT_PASSWORD: ${RABBIT_PASSWORD} RABBIT_PORT: ${RABBIT_PORT} RABBIT_SECURE: ${RABBIT_SECURE} RABBIT_USER: ${RABBIT_USER} + REDIS_HOST: ${REDIS_HOST} + REDIS_PORT: ${REDIS_PORT} + REDIS_SECURE: ${REDIS_SECURE} + REDIS_USER: ${REDIS_USER} + REDIS_PASSWORD: ${REDIS_PASSWORD} + STORAGE_HOST: ${STORAGE_HOST} STORAGE_PORT: ${STORAGE_PORT} + TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} + TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} + WEBSERVER_HOST: ${WB_API_WEBSERVER_HOST} WEBSERVER_PORT: ${WB_API_WEBSERVER_PORT} WEBSERVER_SESSION_SECRET_KEY: ${WEBSERVER_SESSION_SECRET_KEY} - API_SERVER_TRACING: ${API_SERVER_TRACING} - TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} - TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} deploy: labels: @@ -57,13 +87,29 @@ services: - traefik.http.services.${SWARM_STACK_NAME}_api-server.loadbalancer.healthcheck.path=/ - traefik.http.services.${SWARM_STACK_NAME}_api-server.loadbalancer.healthcheck.interval=2000ms - traefik.http.services.${SWARM_STACK_NAME}_api-server.loadbalancer.healthcheck.timeout=1000ms + # NOTE: keep in sync with fallback router (rule and entrypoint) - traefik.http.routers.${SWARM_STACK_NAME}_api-server.rule=(Path(`/`) || Path(`/v0`) || PathPrefix(`/v0/`) || Path(`/api/v0/openapi.json`)) - traefik.http.routers.${SWARM_STACK_NAME}_api-server.entrypoints=simcore_api - traefik.http.routers.${SWARM_STACK_NAME}_api-server.priority=3 - traefik.http.routers.${SWARM_STACK_NAME}_api-server.middlewares=${SWARM_STACK_NAME}_gzip@swarm,ratelimit-${SWARM_STACK_NAME}_api-server,inflightreq-${SWARM_STACK_NAME}_api-server - networks: + networks: &api_server_networks - default + + api-worker: + image: ${DOCKER_REGISTRY:-itisfoundation}/api-server:${DOCKER_IMAGE_TAG:-latest} + init: true + hostname: "api-worker-{{.Node.Hostname}}-{{.Task.Slot}}" + environment: + <<: *api_server_environment + API_SERVER_TRACING: "null" + API_SERVER_WORKER_NAME: "api-worker-{{.Node.Hostname}}-{{.Task.Slot}}-{{.Task.ID}}" + API_SERVER_WORKER_MODE: "true" + CELERY_CONCURRENCY: ${API_SERVER_CELERY_CONCURRENCY} + CELERY_QUEUES: "api_worker_queue" + networks: *api_server_networks + + autoscaling: image: ${DOCKER_REGISTRY:-itisfoundation}/autoscaling:${DOCKER_IMAGE_TAG:-latest} init: true @@ -71,6 +117,7 @@ services: networks: - autoscaling_subnet environment: + <<: *tracing_open_telemetry_environs AUTOSCALING_LOGLEVEL: ${AUTOSCALING_LOGLEVEL} AUTOSCALING_POLL_INTERVAL: ${AUTOSCALING_POLL_INTERVAL} AUTOSCALING_DRAIN_NODES_WITH_LABELS: ${AUTOSCALING_DRAIN_NODES_WITH_LABELS} @@ -93,7 +140,7 @@ services: EC2_INSTANCES_MAX_START_TIME: ${EC2_INSTANCES_MAX_START_TIME} EC2_INSTANCES_NAME_PREFIX: ${EC2_INSTANCES_NAME_PREFIX} EC2_INSTANCES_SECURITY_GROUP_IDS: ${EC2_INSTANCES_SECURITY_GROUP_IDS} - EC2_INSTANCES_SUBNET_ID: ${EC2_INSTANCES_SUBNET_ID} + EC2_INSTANCES_SUBNET_IDS: ${EC2_INSTANCES_SUBNET_IDS} EC2_INSTANCES_KEY_NAME: ${EC2_INSTANCES_KEY_NAME} EC2_INSTANCES_TIME_BEFORE_DRAINING: ${EC2_INSTANCES_TIME_BEFORE_DRAINING} EC2_INSTANCES_TIME_BEFORE_TERMINATION: ${EC2_INSTANCES_TIME_BEFORE_TERMINATION} @@ -112,7 +159,7 @@ services: SSM_REGION_NAME: ${SSM_REGION_NAME} LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} - LOG_FILTER_MAPPING : ${LOG_FILTER_MAPPING} + LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} RABBIT_HOST: ${RABBIT_HOST} RABBIT_PASSWORD: ${RABBIT_PASSWORD} RABBIT_PORT: ${RABBIT_PORT} @@ -129,8 +176,6 @@ services: REGISTRY_SSL: ${REGISTRY_SSL} REGISTRY_AUTH: ${REGISTRY_AUTH} AUTOSCALING_TRACING: ${AUTOSCALING_TRACING} - TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} - TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} volumes: - "/var/run/docker.sock:/var/run/docker.sock" deploy: @@ -150,6 +195,9 @@ services: init: true hostname: "cat-{{.Node.Hostname}}-{{.Task.Slot}}" environment: + <<: + - *postgres_settings + - *tracing_open_telemetry_environs CATALOG_BACKGROUND_TASK_REST_TIME: ${CATALOG_BACKGROUND_TASK_REST_TIME} CATALOG_DEV_FEATURES_ENABLED: ${CATALOG_DEV_FEATURES_ENABLED} CATALOG_LOGLEVEL: ${CATALOG_LOGLEVEL} @@ -161,20 +209,14 @@ services: DIRECTOR_HOST: ${DIRECTOR_HOST:-director} DIRECTOR_PORT: ${DIRECTOR_PORT:-8080} LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} - LOG_FILTER_MAPPING : ${LOG_FILTER_MAPPING} - POSTGRES_DB: ${POSTGRES_DB} - POSTGRES_HOST: ${POSTGRES_HOST} - POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} - POSTGRES_PORT: ${POSTGRES_PORT} - POSTGRES_USER: ${POSTGRES_USER} + LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} + RABBIT_HOST: ${RABBIT_HOST} RABBIT_PASSWORD: ${RABBIT_PASSWORD} RABBIT_PORT: ${RABBIT_PORT} RABBIT_SECURE: ${RABBIT_SECURE} RABBIT_USER: ${RABBIT_USER} CATALOG_TRACING: ${CATALOG_TRACING} - TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} - TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} networks: - default @@ -185,9 +227,12 @@ services: networks: - default environment: + <<: *tracing_open_telemetry_environs CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DOCKER_IMAGE_TAG: ${CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DOCKER_IMAGE_TAG} CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH: ${CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH} + CLUSTERS_KEEPER_DASK_NPROCS: ${CLUSTERS_KEEPER_DASK_NPROCS} CLUSTERS_KEEPER_DASK_NTHREADS: ${CLUSTERS_KEEPER_DASK_NTHREADS} + CLUSTERS_KEEPER_DASK_NTHREADS_MULTIPLIER: ${CLUSTERS_KEEPER_DASK_NTHREADS_MULTIPLIER} CLUSTERS_KEEPER_DASK_WORKER_SATURATION: ${CLUSTERS_KEEPER_DASK_WORKER_SATURATION} CLUSTERS_KEEPER_MAX_MISSED_HEARTBEATS_BEFORE_CLUSTER_TERMINATION: ${CLUSTERS_KEEPER_MAX_MISSED_HEARTBEATS_BEFORE_CLUSTER_TERMINATION} CLUSTERS_KEEPER_TASK_INTERVAL: ${CLUSTERS_KEEPER_TASK_INTERVAL} @@ -204,13 +249,13 @@ services: CLUSTERS_KEEPER_SSM_SECRET_ACCESS_KEY: ${CLUSTERS_KEEPER_SSM_SECRET_ACCESS_KEY} CLUSTERS_KEEPER_EC2_INSTANCES_PREFIX: ${CLUSTERS_KEEPER_EC2_INSTANCES_PREFIX} LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} - LOG_FILTER_MAPPING : ${LOG_FILTER_MAPPING} + LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES: ${CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES} PRIMARY_EC2_INSTANCES_ALLOWED_TYPES: ${PRIMARY_EC2_INSTANCES_ALLOWED_TYPES} PRIMARY_EC2_INSTANCES_KEY_NAME: ${PRIMARY_EC2_INSTANCES_KEY_NAME} PRIMARY_EC2_INSTANCES_MAX_INSTANCES: ${PRIMARY_EC2_INSTANCES_MAX_INSTANCES} PRIMARY_EC2_INSTANCES_SECURITY_GROUP_IDS: ${PRIMARY_EC2_INSTANCES_SECURITY_GROUP_IDS} - PRIMARY_EC2_INSTANCES_SUBNET_ID: ${PRIMARY_EC2_INSTANCES_SUBNET_ID} + PRIMARY_EC2_INSTANCES_SUBNET_IDS: ${PRIMARY_EC2_INSTANCES_SUBNET_IDS} PRIMARY_EC2_INSTANCES_CUSTOM_TAGS: ${PRIMARY_EC2_INSTANCES_CUSTOM_TAGS} PRIMARY_EC2_INSTANCES_ATTACHED_IAM_PROFILE: ${PRIMARY_EC2_INSTANCES_ATTACHED_IAM_PROFILE} PRIMARY_EC2_INSTANCES_SSM_TLS_DASK_CA: ${PRIMARY_EC2_INSTANCES_SSM_TLS_DASK_CA} @@ -240,11 +285,9 @@ services: WORKERS_EC2_INSTANCES_MAX_INSTANCES: ${WORKERS_EC2_INSTANCES_MAX_INSTANCES} WORKERS_EC2_INSTANCES_MAX_START_TIME: ${WORKERS_EC2_INSTANCES_MAX_START_TIME} WORKERS_EC2_INSTANCES_SECURITY_GROUP_IDS: ${WORKERS_EC2_INSTANCES_SECURITY_GROUP_IDS} - WORKERS_EC2_INSTANCES_SUBNET_ID: ${WORKERS_EC2_INSTANCES_SUBNET_ID} + WORKERS_EC2_INSTANCES_SUBNET_IDS: ${WORKERS_EC2_INSTANCES_SUBNET_IDS} WORKERS_EC2_INSTANCES_CUSTOM_TAGS: ${WORKERS_EC2_INSTANCES_CUSTOM_TAGS} CLUSTERS_KEEPER_TRACING: ${CLUSTERS_KEEPER_TRACING} - TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} - TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} secrets: *dask_tls_secrets director: @@ -252,6 +295,9 @@ services: init: true hostname: "{{.Node.Hostname}}-{{.Task.Slot}}" environment: + <<: + - *postgres_settings + - *tracing_open_telemetry_environs DIRECTOR_DEFAULT_MAX_MEMORY: ${DIRECTOR_DEFAULT_MAX_MEMORY} DIRECTOR_DEFAULT_MAX_NANO_CPUS: ${DIRECTOR_DEFAULT_MAX_NANO_CPUS} DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS: ${DIRECTOR_GENERIC_RESOURCE_PLACEMENT_CONSTRAINTS_SUBSTITUTIONS} @@ -264,12 +310,6 @@ services: DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS: ${DIRECTOR_SERVICES_CUSTOM_CONSTRAINTS} DIRECTOR_TRACING: ${DIRECTOR_TRACING} - POSTGRES_DB: ${POSTGRES_DB} - POSTGRES_HOST: ${POSTGRES_HOST} - POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} - POSTGRES_PORT: ${POSTGRES_PORT} - POSTGRES_USER: ${POSTGRES_USER} - REGISTRY_AUTH: ${REGISTRY_AUTH} REGISTRY_PATH: ${REGISTRY_PATH} REGISTRY_PW: ${REGISTRY_PW} @@ -281,10 +321,8 @@ services: STORAGE_ENDPOINT: ${STORAGE_ENDPOINT} SWARM_STACK_NAME: ${SWARM_STACK_NAME} - TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} - TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} - TRAEFIK_SIMCORE_ZONE: ${TRAEFIK_SIMCORE_ZONE} + volumes: - "/var/run/docker.sock:/var/run/docker.sock" deploy: @@ -300,6 +338,9 @@ services: init: true hostname: "{{.Node.Hostname}}-{{.Task.Slot}}" environment: + <<: + - *postgres_settings + - *tracing_open_telemetry_environs AWS_S3_CLI_S3: ${AWS_S3_CLI_S3} CATALOG_HOST: ${CATALOG_HOST} @@ -321,6 +362,8 @@ services: DIRECTOR_V2_PROFILING: ${DIRECTOR_V2_PROFILING} DIRECTOR_V2_DYNAMIC_SIDECAR_SLEEP_AFTER_CONTAINER_REMOVAL: ${DIRECTOR_V2_DYNAMIC_SIDECAR_SLEEP_AFTER_CONTAINER_REMOVAL} + DIRECTOR_V2_DYNAMIC_SCHEDULER_ENABLED: ${DIRECTOR_V2_DYNAMIC_SCHEDULER_ENABLED} + DYNAMIC_SIDECAR_ENDPOINT_SPECS_MODE_DNSRR_ENABLED: ${DYNAMIC_SIDECAR_ENDPOINT_SPECS_MODE_DNSRR_ENABLED} DYNAMIC_SIDECAR_ENABLE_VOLUME_LIMITS: ${DYNAMIC_SIDECAR_ENABLE_VOLUME_LIMITS} DYNAMIC_SIDECAR_IMAGE: ${DYNAMIC_SIDECAR_IMAGE} @@ -330,16 +373,10 @@ services: DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT: ${DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT} LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} - LOG_FILTER_MAPPING : ${LOG_FILTER_MAPPING} + LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} DIRECTOR_V2_LOGLEVEL: ${DIRECTOR_V2_LOGLEVEL} MONITORING_ENABLED: ${MONITORING_ENABLED} - POSTGRES_DB: ${POSTGRES_DB} - POSTGRES_HOST: ${POSTGRES_HOST} - POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} - POSTGRES_PORT: ${POSTGRES_PORT} - POSTGRES_USER: ${POSTGRES_USER} - R_CLONE_OPTION_BUFFER_SIZE: ${R_CLONE_OPTION_BUFFER_SIZE} R_CLONE_OPTION_RETRIES: ${R_CLONE_OPTION_RETRIES} R_CLONE_OPTION_TRANSFERS: ${R_CLONE_OPTION_TRANSFERS} @@ -385,12 +422,11 @@ services: SIMCORE_SERVICES_NETWORK_NAME: ${SIMCORE_SERVICES_NETWORK_NAME} SWARM_STACK_NAME: ${SWARM_STACK_NAME} TRAEFIK_SIMCORE_ZONE: ${TRAEFIK_SIMCORE_ZONE} - DIRECTOR_V2_TRACING: ${DIRECTOR_V2_TRACING} - TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} - TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} + DIRECTOR_V2_WEBSERVER_RPC_NAMESPACE: ${WEBSERVER_HOST} WEBSERVER_HOST: ${WEBSERVER_HOST} WEBSERVER_PORT: ${WEBSERVER_PORT} + volumes: - "/var/run/docker.sock:/var/run/docker.sock" deploy: @@ -410,8 +446,11 @@ services: networks: - default environment: + <<: + - *postgres_settings + - *tracing_open_telemetry_environs LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} - LOG_FILTER_MAPPING : ${LOG_FILTER_MAPPING} + LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} RABBIT_HOST: ${RABBIT_HOST} RABBIT_PASSWORD: ${RABBIT_PASSWORD} RABBIT_PORT: ${RABBIT_PORT} @@ -422,11 +461,6 @@ services: REDIS_PORT: ${REDIS_PORT} REDIS_SECURE: ${REDIS_SECURE} REDIS_USER: ${REDIS_USER} - POSTGRES_DB: ${POSTGRES_DB} - POSTGRES_HOST: ${POSTGRES_HOST} - POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} - POSTGRES_PORT: ${POSTGRES_PORT} - POSTGRES_USER: ${POSTGRES_USER} SC_USER_ID: ${SC_USER_ID} SC_USER_NAME: ${SC_USER_NAME} EFS_USER_ID: ${EFS_USER_ID} @@ -438,8 +472,7 @@ services: EFS_MOUNTED_PATH: ${EFS_MOUNTED_PATH} EFS_PROJECT_SPECIFIC_DATA_DIRECTORY: ${EFS_PROJECT_SPECIFIC_DATA_DIRECTORY} EFS_GUARDIAN_TRACING: ${EFS_GUARDIAN_TRACING} - TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} - TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} + invitations: image: ${DOCKER_REGISTRY:-itisfoundation}/invitations:${DOCKER_IMAGE_TAG:-latest} init: true @@ -447,18 +480,18 @@ services: networks: - default environment: + <<: *tracing_open_telemetry_environs INVITATIONS_DEFAULT_PRODUCT: ${INVITATIONS_DEFAULT_PRODUCT} INVITATIONS_LOGLEVEL: ${INVITATIONS_LOGLEVEL} INVITATIONS_OSPARC_URL: ${INVITATIONS_OSPARC_URL} INVITATIONS_PASSWORD: ${INVITATIONS_PASSWORD} INVITATIONS_SECRET_KEY: ${INVITATIONS_SECRET_KEY} INVITATIONS_SWAGGER_API_DOC_ENABLED: ${INVITATIONS_SWAGGER_API_DOC_ENABLED} + INVITATIONS_TRACING: ${INVITATIONS_TRACING} INVITATIONS_USERNAME: ${INVITATIONS_USERNAME} LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} - LOG_FILTER_MAPPING : ${LOG_FILTER_MAPPING} - INVITATIONS_TRACING: ${INVITATIONS_TRACING} - TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} - TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} + LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} + payments: image: ${DOCKER_REGISTRY:-itisfoundation}/payments:${DOCKER_IMAGE_TAG:-latest} init: true @@ -466,8 +499,12 @@ services: networks: - default environment: + <<: + - *postgres_settings + - *tracing_open_telemetry_environs + + LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} - LOG_FILTER_MAPPING : ${LOG_FILTER_MAPPING} PAYMENTS_ACCESS_TOKEN_EXPIRE_MINUTES: ${PAYMENTS_ACCESS_TOKEN_EXPIRE_MINUTES} PAYMENTS_ACCESS_TOKEN_SECRET_KEY: ${PAYMENTS_ACCESS_TOKEN_SECRET_KEY} PAYMENTS_AUTORECHARGE_DEFAULT_MONTHLY_LIMIT: ${PAYMENTS_AUTORECHARGE_DEFAULT_MONTHLY_LIMIT} @@ -475,19 +512,17 @@ services: PAYMENTS_AUTORECHARGE_ENABLED: ${PAYMENTS_AUTORECHARGE_ENABLED} PAYMENTS_AUTORECHARGE_MIN_BALANCE_IN_CREDITS: ${PAYMENTS_AUTORECHARGE_MIN_BALANCE_IN_CREDITS} PAYMENTS_BCC_EMAIL: ${PAYMENTS_BCC_EMAIL} + PAYMENTS_EMAIL: ${PAYMENTS_EMAIL} PAYMENTS_GATEWAY_API_SECRET: ${PAYMENTS_GATEWAY_API_SECRET} PAYMENTS_GATEWAY_URL: ${PAYMENTS_GATEWAY_URL} PAYMENTS_LOGLEVEL: ${PAYMENTS_LOGLEVEL} PAYMENTS_PASSWORD: ${PAYMENTS_PASSWORD} - PAYMENTS_STRIPE_URL: ${PAYMENTS_STRIPE_URL} PAYMENTS_STRIPE_API_SECRET: ${PAYMENTS_STRIPE_API_SECRET} + PAYMENTS_STRIPE_URL: ${PAYMENTS_STRIPE_URL} PAYMENTS_SWAGGER_API_DOC_ENABLED: ${PAYMENTS_SWAGGER_API_DOC_ENABLED} + PAYMENTS_TRACING: ${PAYMENTS_TRACING} PAYMENTS_USERNAME: ${PAYMENTS_USERNAME} - POSTGRES_DB: ${POSTGRES_DB} - POSTGRES_HOST: ${POSTGRES_HOST} - POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} - POSTGRES_PORT: ${POSTGRES_PORT} - POSTGRES_USER: ${POSTGRES_USER} + PAYMENTS_WEBSERVER_RPC_NAMESPACE: ${WEBSERVER_HOST} RABBIT_HOST: ${RABBIT_HOST} RABBIT_PASSWORD: ${RABBIT_PASSWORD} RABBIT_PORT: ${RABBIT_PORT} @@ -495,15 +530,11 @@ services: RABBIT_USER: ${RABBIT_USER} RESOURCE_USAGE_TRACKER_HOST: ${RESOURCE_USAGE_TRACKER_HOST} RESOURCE_USAGE_TRACKER_PORT: ${RESOURCE_USAGE_TRACKER_EXTERNAL_PORT} - PAYMENTS_EMAIL: ${PAYMENTS_EMAIL} SMTP_HOST: ${SMTP_HOST} SMTP_PASSWORD: ${SMTP_PASSWORD} SMTP_PORT: ${SMTP_PORT} SMTP_PROTOCOL: ${SMTP_PROTOCOL} SMTP_USERNAME: ${SMTP_USERNAME} - PAYMENTS_TRACING: ${PAYMENTS_TRACING} - TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} - TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} resource-usage-tracker: image: ${DOCKER_REGISTRY:-itisfoundation}/resource-usage-tracker:${DOCKER_IMAGE_TAG:-latest} @@ -512,14 +543,13 @@ services: networks: - default environment: + <<: + - *postgres_settings + - *tracing_open_telemetry_environs + LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} - LOG_FILTER_MAPPING : ${LOG_FILTER_MAPPING} - POSTGRES_DB: ${POSTGRES_DB} - POSTGRES_ENDPOINT: ${POSTGRES_ENDPOINT} - POSTGRES_HOST: ${POSTGRES_HOST} - POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} - POSTGRES_PORT: ${POSTGRES_PORT} - POSTGRES_USER: ${POSTGRES_USER} + LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} + PROMETHEUS_URL: ${RESOURCE_USAGE_TRACKER_PROMETHEUS_URL} PROMETHEUS_USERNAME: ${RESOURCE_USAGE_TRACKER_PROMETHEUS_USERNAME} PROMETHEUS_PASSWORD: ${RESOURCE_USAGE_TRACKER_PROMETHEUS_PASSWORD} @@ -539,10 +569,9 @@ services: RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_COUNTER_FAIL: ${RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_COUNTER_FAIL} RESOURCE_USAGE_TRACKER_S3: ${RESOURCE_USAGE_TRACKER_S3} RESOURCE_USAGE_TRACKER_TRACING: ${RESOURCE_USAGE_TRACKER_TRACING} - TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} - TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} RESOURCE_USAGE_TRACKER_PORT: ${RESOURCE_USAGE_TRACKER_PORT} + dynamic-schdlr: image: ${DOCKER_REGISTRY:-itisfoundation}/dynamic-scheduler:${DOCKER_IMAGE_TAG:-latest} init: true @@ -551,6 +580,10 @@ services: - default - docker-api-network environment: + <<: + - *postgres_settings + - *tracing_open_telemetry_environs + CATALOG_HOST: ${CATALOG_HOST} CATALOG_PORT: ${CATALOG_PORT} DIRECTOR_V2_HOST: ${DIRECTOR_V2_HOST} @@ -570,15 +603,9 @@ services: DYNAMIC_SCHEDULER_USE_INTERNAL_SCHEDULER: ${DYNAMIC_SCHEDULER_USE_INTERNAL_SCHEDULER} DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT: ${DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT} - LOG_FILTER_MAPPING : ${LOG_FILTER_MAPPING} + LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} - POSTGRES_DB: ${POSTGRES_DB} - POSTGRES_HOST: ${POSTGRES_HOST} - POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} - POSTGRES_PORT: ${POSTGRES_PORT} - POSTGRES_USER: ${POSTGRES_USER} - RABBIT_HOST: ${RABBIT_HOST} RABBIT_PASSWORD: ${RABBIT_PASSWORD} RABBIT_PORT: ${RABBIT_PORT} @@ -591,14 +618,13 @@ services: REDIS_SECURE: ${REDIS_SECURE} REDIS_USER: ${REDIS_USER} - TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} - TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} + docker-api-proxy: image: ${DOCKER_REGISTRY:-itisfoundation}/docker-api-proxy:${DOCKER_IMAGE_TAG:-latest} init: true environment: DOCKER_API_PROXY_PASSWORD: ${DOCKER_API_PROXY_PASSWORD} - DOCKER_API_PROXY_USER : ${DOCKER_API_PROXY_USER} + DOCKER_API_PROXY_USER: ${DOCKER_API_PROXY_USER} deploy: placement: constraints: @@ -607,7 +633,7 @@ services: volumes: - /var/run/docker.sock:/var/run/docker.sock networks: - - docker-api-network + - docker-api-network static-webserver: image: ${DOCKER_REGISTRY:-itisfoundation}/static-webserver:${DOCKER_IMAGE_TAG:-latest} @@ -628,6 +654,7 @@ services: - traefik.http.services.${SWARM_STACK_NAME}_static_webserver.loadbalancer.healthcheck.interval=2000ms - traefik.http.services.${SWARM_STACK_NAME}_static_webserver.loadbalancer.healthcheck.timeout=1000ms - traefik.http.middlewares.${SWARM_STACK_NAME}_static_webserver_retry.retry.attempts=2 + # NOTE: keep in sync with fallback router (rule and entrypoint) - traefik.http.routers.${SWARM_STACK_NAME}_static_webserver.rule=(Path(`/osparc`) || Path(`/s4l`) || Path(`/s4llite`) || Path(`/s4lacad`) || Path(`/s4lengine`) || Path(`/s4ldesktop`) || Path(`/s4ldesktopacad`) || Path(`/tis`) || Path(`/tiplite`) || Path(`/transpiled`) || Path(`/resource`) || PathPrefix(`/osparc/`) || PathPrefix(`/s4l/`) || PathPrefix(`/s4llite/`) || PathPrefix(`/s4lacad/`) || PathPrefix(`/s4lengine/`) || PathPrefix(`/s4ldesktop/`) || PathPrefix(`/s4ldesktopacad/`) || PathPrefix(`/tis/`) || PathPrefix(`/tiplite/`) || PathPrefix(`/transpiled/`) || PathPrefix(`/resource/`)) - traefik.http.routers.${SWARM_STACK_NAME}_static_webserver.service=${SWARM_STACK_NAME}_static_webserver - traefik.http.routers.${SWARM_STACK_NAME}_static_webserver.entrypoints=http @@ -666,17 +693,26 @@ services: init: true hostname: "wb-{{.Node.Hostname}}-{{.Task.Slot}}" # the hostname is used in conjonction with other services and must be unique see https://github.com/ITISFoundation/osparc-simcore/pull/5931 environment: &webserver_environment + <<: + - *postgres_settings + - *tracing_open_telemetry_environs + - *webserver_diagnostics_environs + AIODEBUG_SLOW_DURATION_SECS: ${AIODEBUG_SLOW_DURATION_SECS} SWARM_STACK_NAME: ${SWARM_STACK_NAME} WEBSERVER_DEV_FEATURES_ENABLED: ${WEBSERVER_DEV_FEATURES_ENABLED} + WEBSERVER_REALTIME_COLLABORATION: ${WEBSERVER_REALTIME_COLLABORATION} WEBSERVER_LOGLEVEL: ${WEBSERVER_LOGLEVEL} WEBSERVER_PROFILING: ${WEBSERVER_PROFILING} WEBSERVER_LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} - WEBSERVER_LOG_FILTER_MAPPING : ${LOG_FILTER_MAPPING} + WEBSERVER_LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} + + # NOTE: keep in sync with the prefix form the hostname + LONG_RUNNING_TASKS_NAMESPACE_SUFFIX: wb # WEBSERVER_SERVER_HOST @@ -699,20 +735,8 @@ services: # WEBSERVER_CREDIT_COMPUTATION WEBSERVER_CREDIT_COMPUTATION_ENABLED: ${WEBSERVER_CREDIT_COMPUTATION_ENABLED} - # WEBSERVER_DB - POSTGRES_DB: ${POSTGRES_DB} - POSTGRES_ENDPOINT: ${POSTGRES_ENDPOINT} - POSTGRES_HOST: ${POSTGRES_HOST} - POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} - POSTGRES_PORT: ${POSTGRES_PORT} - POSTGRES_USER: ${POSTGRES_USER} - # WEBSERVER_DIAGNOSTICS WEBSERVER_DIAGNOSTICS: ${WEBSERVER_DIAGNOSTICS} - DIAGNOSTICS_HEALTHCHECK_ENABLED: ${DIAGNOSTICS_HEALTHCHECK_ENABLED} - DIAGNOSTICS_MAX_AVG_LATENCY: ${DIAGNOSTICS_MAX_AVG_LATENCY} - DIAGNOSTICS_MAX_TASK_DELAY: ${DIAGNOSTICS_MAX_TASK_DELAY} - DIAGNOSTICS_SLOW_DURATION_SECS: ${DIAGNOSTICS_SLOW_DURATION_SECS} # WEBSERVER_DIRECTOR_V2 DIRECTOR_V2_HOST: ${DIRECTOR_V2_HOST} @@ -741,14 +765,14 @@ services: INVITATIONS_USERNAME: ${INVITATIONS_USERNAME} WEBSERVER_LICENSES: ${WEBSERVER_LICENSES} - LICENSES_ITIS_VIP_SYNCER_ENABLED : ${LICENSES_ITIS_VIP_SYNCER_ENABLED} + WEBSERVER_FOGBUGZ: ${WEBSERVER_FOGBUGZ} + LICENSES_ITIS_VIP_SYNCER_ENABLED: ${LICENSES_ITIS_VIP_SYNCER_ENABLED} LICENSES_ITIS_VIP_SYNCER_PERIODICITY: ${LICENSES_ITIS_VIP_SYNCER_PERIODICITY} LICENSES_ITIS_VIP_API_URL: ${LICENSES_ITIS_VIP_API_URL} LICENSES_ITIS_VIP_CATEGORIES: ${LICENSES_ITIS_VIP_CATEGORIES} LICENSES_SPEAG_PHANTOMS_API_URL: ${LICENSES_SPEAG_PHANTOMS_API_URL} LICENSES_SPEAG_PHANTOMS_CATEGORIES: ${LICENSES_SPEAG_PHANTOMS_CATEGORIES} - WEBSERVER_LOGIN: ${WEBSERVER_LOGIN} LOGIN_ACCOUNT_DELETION_RETENTION_DAYS: ${LOGIN_ACCOUNT_DELETION_RETENTION_DAYS} LOGIN_REGISTRATION_CONFIRMATION_REQUIRED: ${LOGIN_REGISTRATION_CONFIRMATION_REQUIRED} @@ -815,10 +839,6 @@ services: STUDIES_DEFAULT_SERVICE_THUMBNAIL: ${STUDIES_DEFAULT_SERVICE_THUMBNAIL} WEBSERVER_TRACING: ${WEBSERVER_TRACING} - TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} - TRACING_OPENTELEMETRY_COLLECTOR_BATCH_SIZE: ${TRACING_OPENTELEMETRY_COLLECTOR_BATCH_SIZE} - TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} - TRACING_OPENTELEMETRY_COLLECTOR_SAMPLING_PERCENTAGE: ${TRACING_OPENTELEMETRY_COLLECTOR_SAMPLING_PERCENTAGE} # WEBSERVER_PROJECTS WEBSERVER_PROJECTS: ${WEBSERVER_PROJECTS} @@ -826,7 +846,6 @@ services: PROJECTS_MAX_COPY_SIZE_BYTES: ${PROJECTS_MAX_COPY_SIZE_BYTES} PROJECTS_MAX_NUM_RUNNING_DYNAMIC_NODES: ${PROJECTS_MAX_NUM_RUNNING_DYNAMIC_NODES} - # WEBSERVER_RABBITMQ RABBIT_HOST: ${RABBIT_HOST} RABBIT_PASSWORD: ${RABBIT_PASSWORD} @@ -837,7 +856,6 @@ services: # WEBSERVER_TRASH TRASH_RETENTION_DAYS: ${TRASH_RETENTION_DAYS} - # ARBITRARY ENV VARS # see [https://docs.gunicorn.org/en/stable/settings.html#timeout], @@ -846,7 +864,6 @@ services: WEBSERVER_DB_LISTENER: ${WEBSERVER_DB_LISTENER} WEBSERVER_ANNOUNCEMENTS: ${WEBSERVER_ANNOUNCEMENTS} WEBSERVER_NOTIFICATIONS: ${WEBSERVER_NOTIFICATIONS} - WEBSERVER_CLUSTERS: ${WEBSERVER_CLUSTERS} WEBSERVER_FUNCTIONS: ${WEBSERVER_FUNCTIONS} # neede for front-end WEBSERVER_GROUPS: ${WEBSERVER_GROUPS} WEBSERVER_PRODUCTS: ${WEBSERVER_PRODUCTS} @@ -857,6 +874,9 @@ services: WEBSERVER_FOLDERS: ${WEBSERVER_FOLDERS} deploy: + # NOTE: having 2 replicas is necessary to detect early on if in-process tasks are mistakenly added to the webserver + # in case this cannot be done otherwise, the sticky rule below will need to be adapted + replicas: 2 labels: - io.simcore.zone=${TRAEFIK_SIMCORE_ZONE} # gzip compression @@ -868,18 +888,38 @@ services: - traefik.http.services.${SWARM_STACK_NAME}_webserver.loadbalancer.healthcheck.path=/v0/ - traefik.http.services.${SWARM_STACK_NAME}_webserver.loadbalancer.healthcheck.interval=2000ms - traefik.http.services.${SWARM_STACK_NAME}_webserver.loadbalancer.healthcheck.timeout=1000ms - # NOTE: stickyness must remain until the long running tasks in the webserver are removed - # and also https://github.com/ITISFoundation/osparc-simcore/pull/4180 is resolved. - - traefik.http.services.${SWARM_STACK_NAME}_webserver.loadbalancer.sticky.cookie=true - - traefik.http.services.${SWARM_STACK_NAME}_webserver.loadbalancer.sticky.cookie.samesite=lax - - traefik.http.services.${SWARM_STACK_NAME}_webserver.loadbalancer.sticky.cookie.httponly=true - - traefik.http.services.${SWARM_STACK_NAME}_webserver.loadbalancer.sticky.cookie.secure=true + # NOTE: stickyness must remain only for specific endpoints, see https://github.com/ITISFoundation/osparc-simcore/pull/4180 - traefik.http.middlewares.${SWARM_STACK_NAME}_webserver_retry.retry.attempts=2 - traefik.http.routers.${SWARM_STACK_NAME}_webserver.service=${SWARM_STACK_NAME}_webserver + # NOTE: keep in sync with fallback router (rule and entrypoint) - traefik.http.routers.${SWARM_STACK_NAME}_webserver.rule=(Path(`/`) || Path(`/v0`) || Path(`/socket.io/`) || Path(`/static-frontend-data.json`) || PathRegexp(`^/study/(?P\b[0-9a-f]{8}\b-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-\b[0-9a-f]{12}\b)`) || Path(`/view`) || Path(`/#/view`) || Path(`/#/error`) || PathPrefix(`/v0/`)) - traefik.http.routers.${SWARM_STACK_NAME}_webserver.entrypoints=http - traefik.http.routers.${SWARM_STACK_NAME}_webserver.priority=6 - traefik.http.routers.${SWARM_STACK_NAME}_webserver.middlewares=${SWARM_STACK_NAME}_gzip@swarm, ${SWARM_STACK_NAME_NO_HYPHEN}_sslheader@swarm, ${SWARM_STACK_NAME}_webserver_retry + # Create a dedicated sticky service for specific endpoints + - traefik.http.services.${SWARM_STACK_NAME}_webserver_sticky.loadbalancer.server.port=8080 + - traefik.http.services.${SWARM_STACK_NAME}_webserver_sticky.loadbalancer.healthcheck.path=/v0/ + - traefik.http.services.${SWARM_STACK_NAME}_webserver_sticky.loadbalancer.healthcheck.interval=2000ms + - traefik.http.services.${SWARM_STACK_NAME}_webserver_sticky.loadbalancer.healthcheck.timeout=1000ms + - traefik.http.services.${SWARM_STACK_NAME}_webserver_sticky.loadbalancer.sticky.cookie=true + - traefik.http.services.${SWARM_STACK_NAME}_webserver_sticky.loadbalancer.sticky.cookie.secure=true + - traefik.http.services.${SWARM_STACK_NAME}_webserver_sticky.loadbalancer.sticky.cookie.httpOnly=true + - traefik.http.services.${SWARM_STACK_NAME}_webserver_sticky.loadbalancer.sticky.cookie.sameSite=lax + # Single consolidated router for all sticky endpoints + - traefik.http.routers.${SWARM_STACK_NAME}_webserver_sticky.rule=Path(`/v0/projects`) || + Path(`/v0/projects:clone`) || + PathRegexp(`^/v0/projects/[0-9a-fA-F-]+/nodes/[0-9a-fA-F-]+:stop`) || + PathRegexp(`^/v0/projects/[0-9a-fA-F-]+/nodes/[0-9a-fA-F-]+:open`) || + PathRegexp(`^/v0/projects/[0-9a-fA-F-]+/nodes/[0-9a-fA-F-]+:close`) || + PathRegexp(`^/v0/storage/locations/[0-9]+/paths/.+:size`) || + PathRegexp(`^/v0/storage/locations/[0-9]+/-/paths:batchDelete`) || + PathRegexp(`^/v0/storage/locations/[0-9]+/export-data`) || + PathRegexp(`^/v0/tasks-legacy/.+`) + # NOTE: the sticky router must have a higher priority than the webserver router but below dy-proxies + - traefik.http.routers.${SWARM_STACK_NAME}_webserver_sticky.priority=8 + - traefik.http.routers.${SWARM_STACK_NAME}_webserver_sticky.entrypoints=http + - traefik.http.routers.${SWARM_STACK_NAME}_webserver_sticky.service=${SWARM_STACK_NAME}_webserver_sticky + - traefik.http.routers.${SWARM_STACK_NAME}_webserver_sticky.middlewares=${SWARM_STACK_NAME}_gzip@swarm, ${SWARM_STACK_NAME_NO_HYPHEN}_sslheader@swarm, ${SWARM_STACK_NAME}_webserver_retry networks: &webserver_networks - default - interactive_services_subnet @@ -895,6 +935,8 @@ services: WEBSERVER_STATICWEB: "null" WEBSERVER_FUNCTIONS: ${WEBSERVER_FUNCTIONS} # needed for api-server + # NOTE: keep in sync with the prefix form the hostname + LONG_RUNNING_TASKS_NAMESPACE_SUFFIX: api networks: *webserver_networks @@ -903,19 +945,16 @@ services: init: true hostname: "db-{{.Node.Hostname}}-{{.Task.Slot}}" # the hostname is used in conjonction with other services and must be unique see https://github.com/ITISFoundation/osparc-simcore/pull/5931 environment: + <<: + - *postgres_settings WEBSERVER_LOGLEVEL: ${WB_DB_EL_LOGLEVEL} + # NOTE: keep in sync with the prefix form the hostname + LONG_RUNNING_TASKS_NAMESPACE_SUFFIX: db + WEBSERVER_HOST: ${WEBSERVER_HOST} WEBSERVER_PORT: ${WEBSERVER_PORT} - # WEBSERVER_DB - POSTGRES_DB: ${POSTGRES_DB} - POSTGRES_ENDPOINT: ${POSTGRES_ENDPOINT} - POSTGRES_HOST: ${POSTGRES_HOST} - POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} - POSTGRES_PORT: ${POSTGRES_PORT} - POSTGRES_USER: ${POSTGRES_USER} - DIRECTOR_V2_HOST: ${DIRECTOR_V2_HOST} DIRECTOR_V2_PORT: ${DIRECTOR_V2_PORT} @@ -927,13 +966,12 @@ services: GUNICORN_CMD_ARGS: ${WEBSERVER_GUNICORN_CMD_ARGS} LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} - LOG_FILTER_MAPPING : ${LOG_FILTER_MAPPING} + LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} SWARM_STACK_NAME: ${SWARM_STACK_NAME} SESSION_SECRET_KEY: ${WEBSERVER_SESSION_SECRET_KEY} WEBSERVER_ACTIVITY: ${WB_DB_EL_ACTIVITY} WEBSERVER_ANNOUNCEMENTS: ${WB_DB_EL_ANNOUNCEMENTS} WEBSERVER_CATALOG: ${WB_DB_EL_CATALOG} - WEBSERVER_CLUSTERS: ${WB_DB_EL_CLUSTERS} WEBSERVER_DB_LISTENER: ${WB_DB_EL_DB_LISTENER} WEBSERVER_DIAGNOSTICS: ${WB_DB_EL_DIAGNOSTICS} WEBSERVER_EMAIL: ${WB_DB_EL_EMAIL} @@ -944,7 +982,8 @@ services: WEBSERVER_GARBAGE_COLLECTOR: ${WB_DB_EL_GARBAGE_COLLECTOR} WEBSERVER_GROUPS: ${WB_DB_EL_GROUPS} WEBSERVER_INVITATIONS: ${WB_DB_EL_INVITATIONS} - WEBSERVER_LICENSES: null + WEBSERVER_LICENSES: "null" + WEBSERVER_FOGBUGZ: "null" WEBSERVER_LOGIN: ${WB_DB_EL_LOGIN} WEBSERVER_PAYMENTS: ${WB_DB_EL_PAYMENTS} WEBSERVER_NOTIFICATIONS: ${WB_DB_EL_NOTIFICATIONS} @@ -990,6 +1029,9 @@ services: init: true hostname: "gc-{{.Node.Hostname}}-{{.Task.Slot}}" # the hostname is used in conjonction with other services and must be unique see https://github.com/ITISFoundation/osparc-simcore/pull/5931 environment: + <<: + - *postgres_settings + - *tracing_open_telemetry_environs # WEBSERVER_DIRECTOR_V2 DIRECTOR_V2_HOST: ${DIRECTOR_V2_HOST} @@ -997,16 +1039,14 @@ services: GUNICORN_CMD_ARGS: ${WEBSERVER_GUNICORN_CMD_ARGS} - LOG_FILTER_MAPPING : ${LOG_FILTER_MAPPING} + LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} - # WEBSERVER_DB - POSTGRES_DB: ${POSTGRES_DB} - POSTGRES_ENDPOINT: ${POSTGRES_ENDPOINT} - POSTGRES_HOST: ${POSTGRES_HOST} - POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} - POSTGRES_PORT: ${POSTGRES_PORT} - POSTGRES_USER: ${POSTGRES_USER} + # NOTE: keep in sync with the prefix form the hostname + LONG_RUNNING_TASKS_NAMESPACE_SUFFIX: gc + + + # WEBSERVER_RABBITMQ RABBIT_HOST: ${RABBIT_HOST} @@ -1046,7 +1086,6 @@ services: WEBSERVER_ACTIVITY: ${WB_GC_ACTIVITY} WEBSERVER_ANNOUNCEMENTS: ${WB_GC_ANNOUNCEMENTS} WEBSERVER_CATALOG: ${WB_GC_CATALOG} - WEBSERVER_CLUSTERS: ${WB_GC_CLUSTERS} WEBSERVER_DB_LISTENER: ${WB_GC_DB_LISTENER} WEBSERVER_DIAGNOSTICS: ${WB_GC_DIAGNOSTICS} WEBSERVER_EMAIL: ${WB_GC_EMAIL} @@ -1058,7 +1097,8 @@ services: WEBSERVER_GROUPS: ${WB_GC_GROUPS} WEBSERVER_HOST: ${WEBSERVER_HOST} WEBSERVER_INVITATIONS: ${WB_GC_INVITATIONS} - WEBSERVER_LICENSES: null + WEBSERVER_LICENSES: "null" + WEBSERVER_FOGBUGZ: "null" WEBSERVER_LOGIN: ${WB_GC_LOGIN} WEBSERVER_LOGLEVEL: ${WB_GC_LOGLEVEL} WEBSERVER_NOTIFICATIONS: ${WB_GC_NOTIFICATIONS} @@ -1076,11 +1116,85 @@ services: WEBSERVER_USERS: ${WB_GC_USERS} WEBSERVER_WALLETS: ${WB_GC_WALLETS} - networks: - default - interactive_services_subnet + wb-auth: + image: ${DOCKER_REGISTRY:-itisfoundation}/webserver:${DOCKER_IMAGE_TAG:-latest} + init: true + hostname: "auth-{{.Node.Hostname}}-{{.Task.Slot}}" # the hostname is used in conjonction with other services and must be unique see https://github.com/ITISFoundation/osparc-simcore/pull/5931 + environment: + <<: + - *postgres_settings + - *tracing_open_telemetry_environs + - *webserver_diagnostics_environs + + APP_NAME: "simcore_service_wb_auth" + WEBSERVER_APP_FACTORY_NAME: WEBSERVER_AUTHZ_APP_FACTORY + WEBSERVER_LOGLEVEL: ${WB_AUTH_LOGLEVEL} + + # NOTE: keep in sync with the prefix form the hostname + LONG_RUNNING_TASKS_NAMESPACE_SUFFIX: auth + + GUNICORN_CMD_ARGS: ${WEBSERVER_GUNICORN_CMD_ARGS} + + + # WEBSERVER_DIAGNOSTICS + WEBSERVER_DIAGNOSTICS: ${WB_AUTH_DIAGNOSTICS} + + # WEBSERVER_REST + REST_SWAGGER_API_DOC_ENABLED: 0 + + # WEBSERVER_SERVER_HOST + WEBSERVER_HOST: ${WB_AUTH_WEBSERVER_HOST} + WEBSERVER_PORT: ${WB_AUTH_WEBSERVER_PORT} + + # WEBSERVER_SESSION Enabled + SESSION_SECRET_KEY: ${WEBSERVER_SESSION_SECRET_KEY} + SESSION_COOKIE_MAX_AGE: ${SESSION_COOKIE_MAX_AGE} + SESSION_COOKIE_SAMESITE: ${SESSION_COOKIE_SAMESITE} + SESSION_COOKIE_SECURE: ${SESSION_COOKIE_SECURE} + SESSION_COOKIE_HTTPONLY: ${SESSION_COOKIE_HTTPONLY} + + + WEBSERVER_ACTIVITY: "null" + WEBSERVER_ANNOUNCEMENTS: 0 + WEBSERVER_CATALOG: "null" + WEBSERVER_DB_LISTENER: 0 + WEBSERVER_DIRECTOR_V2: "null" + WEBSERVER_EMAIL: "null" + WEBSERVER_EXPORTER: "null" + WEBSERVER_FOLDERS: 0 + WEBSERVER_FRONTEND: "null" + WEBSERVER_FUNCTIONS: 0 + WEBSERVER_GARBAGE_COLLECTOR: "null" + WEBSERVER_GROUPS: 0 + WEBSERVER_INVITATIONS: "null" + WEBSERVER_LICENSES: "null" + WEBSERVER_FOGBUGZ: "null" + WEBSERVER_LOGIN: "null" + WEBSERVER_NOTIFICATIONS: 0 + WEBSERVER_PAYMENTS: "null" + WEBSERVER_PROFILING: ${WB_AUTH_PROFILING} + WEBSERVER_PRODUCTS: 1 + WEBSERVER_PROJECTS: "null" + WEBSERVER_PUBLICATIONS: 0 + WEBSERVER_RABBITMQ: "null" + WEBSERVER_REALTIME_COLLABORATION: "null" + WEBSERVER_REDIS: "null" + WEBSERVER_RESOURCE_USAGE_TRACKER: "null" + WEBSERVER_SCICRUNCH: "null" + WEBSERVER_SOCKETIO: 0 + WEBSERVER_STATICWEB: "null" + WEBSERVER_STORAGE: "null" + WEBSERVER_STUDIES_DISPATCHER: "null" + WEBSERVER_TAGS: 0 + WEBSERVER_TRACING: ${WB_AUTH_TRACING} + WEBSERVER_USERS: "null" + networks: + - default + agent: image: ${DOCKER_REGISTRY:-itisfoundation}/agent:${DOCKER_IMAGE_TAG:-latest} init: true @@ -1097,7 +1211,7 @@ services: environment: AGENT_LOGLEVEL: ${AGENT_LOGLEVEL} LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} - LOG_FILTER_MAPPING : ${LOG_FILTER_MAPPING} + LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} AGENT_VOLUMES_CLEANUP_S3_ENDPOINT: ${AGENT_VOLUMES_CLEANUP_S3_ENDPOINT} AGENT_VOLUMES_CLEANUP_S3_REGION: ${AGENT_VOLUMES_CLEANUP_S3_REGION} AGENT_VOLUMES_CLEANUP_S3_ACCESS_KEY: ${AGENT_VOLUMES_CLEANUP_S3_ACCESS_KEY} @@ -1112,8 +1226,7 @@ services: RABBIT_SECURE: ${RABBIT_SECURE} AGENT_TRACING: ${AGENT_TRACING} - TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} - TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} + <<: *tracing_open_telemetry_environs notifications: image: ${DOCKER_REGISTRY:-itisfoundation}/notifications:${DOCKER_IMAGE_TAG:-latest} @@ -1121,29 +1234,21 @@ services: hostname: "{{.Node.Hostname}}-{{.Task.Slot}}" environment: - LOG_FILTER_MAPPING : ${LOG_FILTER_MAPPING} + <<: + - *postgres_settings + - *tracing_open_telemetry_environs + LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} NOTIFICATIONS_LOGLEVEL: ${NOTIFICATIONS_LOGLEVEL} NOTIFICATIONS_TRACING: ${NOTIFICATIONS_TRACING} - POSTGRES_DB: ${POSTGRES_DB} - POSTGRES_ENDPOINT: ${POSTGRES_ENDPOINT} - POSTGRES_HOST: ${POSTGRES_HOST} - POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} - POSTGRES_PORT: ${POSTGRES_PORT} - POSTGRES_USER: ${POSTGRES_USER} - RABBIT_HOST: ${RABBIT_HOST} RABBIT_PASSWORD: ${RABBIT_PASSWORD} RABBIT_PORT: ${RABBIT_PORT} RABBIT_SECURE: ${RABBIT_SECURE} RABBIT_USER: ${RABBIT_USER} - TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} - TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} - - dask-sidecar: image: ${DOCKER_REGISTRY:-itisfoundation}/dask-sidecar:${DOCKER_IMAGE_TAG:-latest} init: true @@ -1164,7 +1269,7 @@ services: DASK_TLS_CERT: ${DASK_TLS_CERT} DASK_SCHEDULER_HOST: ${DASK_SCHEDULER_HOST:-dask-scheduler} DASK_LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} - DASK_LOG_FILTER_MAPPING : ${LOG_FILTER_MAPPING} + DASK_LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} DASK_SIDECAR_LOGLEVEL: ${DASK_SIDECAR_LOGLEVEL} SIDECAR_COMP_SERVICES_SHARED_VOLUME_NAME: ${SWARM_STACK_NAME}_computational_shared_data SIDECAR_COMP_SERVICES_SHARED_FOLDER: ${SIDECAR_COMP_SERVICES_SHARED_FOLDER:-/home/scu/computational_shared_data} @@ -1196,26 +1301,22 @@ services: networks: - storage_subnet environment: - DATCORE_ADAPTER_LOG_FILTER_MAPPING : ${LOG_FILTER_MAPPING} + DATCORE_ADAPTER_LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} DATCORE_ADAPTER_LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} DATCORE_ADAPTER_TRACING: ${DATCORE_ADAPTER_TRACING} - TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} - TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} + <<: *tracing_open_telemetry_environs storage: image: ${DOCKER_REGISTRY:-itisfoundation}/storage:${DOCKER_IMAGE_TAG:-latest} init: true hostname: "sto-{{.Node.Hostname}}-{{.Task.Slot}}" environment: &storage_environment + <<: + - *postgres_settings + - *tracing_open_telemetry_environs DATCORE_ADAPTER_HOST: ${DATCORE_ADAPTER_HOST:-datcore-adapter} LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} - LOG_FILTER_MAPPING : ${LOG_FILTER_MAPPING} - POSTGRES_DB: ${POSTGRES_DB} - POSTGRES_ENDPOINT: ${POSTGRES_ENDPOINT} - POSTGRES_HOST: ${POSTGRES_HOST} - POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} - POSTGRES_PORT: ${POSTGRES_PORT} - POSTGRES_USER: ${POSTGRES_USER} + LOG_FILTER_MAPPING: ${LOG_FILTER_MAPPING} RABBIT_HOST: ${RABBIT_HOST} RABBIT_PASSWORD: ${RABBIT_PASSWORD} RABBIT_PORT: ${RABBIT_PORT} @@ -1236,8 +1337,7 @@ services: STORAGE_MONITORING_ENABLED: 1 STORAGE_PROFILING: ${STORAGE_PROFILING} STORAGE_PORT: ${STORAGE_PORT} - TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} - TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} + STORAGE_TRACING: ${STORAGE_TRACING} networks: &storage_networks - default - interactive_services_subnet @@ -1249,6 +1349,8 @@ services: hostname: "sto-worker-{{.Node.Hostname}}-{{.Task.Slot}}" environment: <<: *storage_environment + STORAGE_TRACING: "null" + STORAGE_WORKER_NAME: "sto-worker-{{.Node.Hostname}}-{{.Task.Slot}}-{{.Task.ID}}" STORAGE_WORKER_MODE: "true" CELERY_CONCURRENCY: 100 networks: *storage_networks @@ -1259,13 +1361,15 @@ services: hostname: "sto-worker-cpu-bound-{{.Node.Hostname}}-{{.Task.Slot}}" environment: <<: *storage_environment + STORAGE_TRACING: "null" + STORAGE_WORKER_NAME: "sto-worker-cpu-bound-{{.Node.Hostname}}-{{.Task.Slot}}-{{.Task.ID}}" STORAGE_WORKER_MODE: "true" CELERY_CONCURRENCY: 1 CELERY_QUEUES: "cpu_bound" networks: *storage_networks rabbit: - image: itisfoundation/rabbitmq:3.13.7-management + image: itisfoundation/rabbitmq:4.1.2-management init: true hostname: "{{.Node.Hostname}}-{{.Task.Slot}}" environment: @@ -1291,12 +1395,7 @@ services: init: true hostname: "{{.Node.Hostname}}-{{.Task.Slot}}" environment: - POSTGRES_DB: ${POSTGRES_DB} - POSTGRES_ENDPOINT: ${POSTGRES_ENDPOINT} - POSTGRES_HOST: ${POSTGRES_HOST} - POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} - POSTGRES_PORT: ${POSTGRES_PORT} - POSTGRES_USER: ${POSTGRES_USER} + <<: *postgres_settings networks: - default # actually needed for the postgres service only @@ -1331,7 +1430,21 @@ services: # - net.ipv4.tcp_keepalive_intvl=600 # - net.ipv4.tcp_keepalive_probes=9 # - net.ipv4.tcp_keepalive_time=600 - command: [ "postgres", "-c", "tcp_keepalives_idle=600", "-c", "tcp_keepalives_interval=600", "-c", "tcp_keepalives_count=5", "-c", "max_connections=413", "-c", "shared_buffers=256MB" ] + # + command: + [ + "postgres", + "-c", "tcp_keepalives_idle=600", + "-c", "tcp_keepalives_interval=600", + "-c", "tcp_keepalives_count=5", + "-c", "max_connections=413", + "-c", "shared_buffers=256MB", + # statement_timeout is set to 120 seconds (120_000 in ms), so that long running queries + # are killed after 2 minutes. Since simcore services have timeout of 1 minute, so longer + # queries will not be used. Setting >1 minutes to be safe + # https://github.com/ITISFoundation/osparc-simcore/issues/7682#issuecomment-2923048445 + "-c", "statement_timeout=120000" + ] redis: image: "redis:6.2.6@sha256:4bed291aa5efb9f0d77b76ff7d4ab71eee410962965d052552db1fb80576431d" @@ -1342,22 +1455,11 @@ services: # also aof (append only) is also enabled such that we get full durability at the expense # of backup size. The backup is written into /data. # https://redis.io/topics/persistence - [ - "redis-server", - "--save", - "60 1", - "--loglevel", - "verbose", - "--databases", - "10", - "--appendonly", - "yes", - "--requirepass", - "${REDIS_PASSWORD}" - ] + [ "redis-server", "--save", "60 1", "--loglevel", "verbose", "--databases", "11", "--appendonly", "yes", "--requirepass", "${REDIS_PASSWORD}" ] networks: - default - autoscaling_subnet + - interactive_services_subnet volumes: - redis-data:/data healthcheck: @@ -1367,7 +1469,7 @@ services: retries: 50 traefik: - image: "traefik:v3.4.0@sha256:4cf907247939b5d20bf4eff73abd21cb413c339600dde76dbc94a874b2578a27" + image: "traefik:v3.5.2@sha256:07ff0c6c2114233b82e1de8e9f4fee9974470cd8d42c22e4e158538d950e19ae" init: true hostname: "{{.Node.Hostname}}-{{.Task.Slot}}" command: @@ -1385,10 +1487,12 @@ services: - "--metrics.prometheus.entryPoint=metrics" - "--entryPoints.http.address=:80" - "--entryPoints.http.forwardedHeaders.insecure" - - "--entryPoints.http.transport.respondingTimeouts.readTimeout=21600s" #6h, for https://github.com/traefik/traefik/issues/10805 - "--entryPoints.simcore_api.address=:10081" + - "--entryPoints.http.transport.respondingTimeouts.readTimeout=21600s" #6h, for https://github.com/traefik/traefik/issues/10805 large file uploads + - "--entryPoints.http.transport.respondingTimeouts.writeTimeout=21600s" #6h, for https://github.com/traefik/traefik/issues/10805 large file downloads - "--entryPoints.simcore_api.address=:10081" - "--entryPoints.simcore_api.forwardedHeaders.insecure" - - "--entryPoints.simcore_api.transport.respondingTimeouts.readTimeout=21600s" #6h, for https://github.com/traefik/traefik/issues/10805 + - "--entryPoints.simcore_api.transport.respondingTimeouts.readTimeout=21600s" #6h, for https://github.com/traefik/traefik/issues/10805 large file uploads + - "--entryPoints.simcore_api.transport.respondingTimeouts.writeTimeout=21600s" #6h, for https://github.com/traefik/traefik/issues/10805 large file downloads - "--entryPoints.traefik_monitor.address=:8080" - "--entryPoints.traefik_monitor.forwardedHeaders.insecure" - "--providers.swarm.endpoint=unix:///var/run/docker.sock" @@ -1401,6 +1505,14 @@ services: - "--tracing.addinternals" - "--tracing.otlp=true" - "--tracing.otlp.http=true" + healthcheck: + # NOTE: this healthcheck to check if traefik is up and running must be run on the ping entrypoint defined in command! + test: traefik healthcheck --ping --ping.entryPoint=ping --entryPoints.ping.address=:9082 + interval: 10s + timeout: 5s + retries: 5 + start_period: 10s + start_interval: 1s volumes: # So that Traefik can listen to the Docker events - /var/run/docker.sock:/var/run/docker.sock @@ -1426,6 +1538,62 @@ services: - default - interactive_services_subnet # for legacy dynamic services + # use to define fallback routes for simcore services + # if docker healthcheck fails, container's traefik configuration is removed + # leading to 404 https://github.com/traefik/traefik/issues/7842 + # + # use fallback routes to return proper 503 (instead of 404) + # this service must be running at all times + traefik-config-placeholder: + image: busybox:1.35.0 + command: sleep infinity + networks: + - default + deploy: + labels: + # route to internal traefik + - traefik.enable=true + - io.simcore.zone=${TRAEFIK_SIMCORE_ZONE} + + ### Fallback for api-server + - traefik.http.routers.${SWARM_STACK_NAME}_api-server_fallback.rule=(Path(`/`) || Path(`/v0`) || PathPrefix(`/v0/`) || Path(`/api/v0/openapi.json`)) + - traefik.http.routers.${SWARM_STACK_NAME}_api-server_fallback.service=${SWARM_STACK_NAME}_api-server_fallback + - traefik.http.routers.${SWARM_STACK_NAME}_api-server_fallback.entrypoints=simcore_api + - traefik.http.routers.${SWARM_STACK_NAME}_api-server_fallback.priority=1 + # always fail and return 503 via unhealthy loadbalancer healthcheck + - traefik.http.services.${SWARM_STACK_NAME}_api-server_fallback.loadbalancer.server.port=0 # port is required (otherwise traefik service is not created) + - traefik.http.services.${SWARM_STACK_NAME}_api-server_fallback.loadbalancer.healthcheck.path=/some/invalid/path/to/generate/a/503 + - traefik.http.services.${SWARM_STACK_NAME}_api-server_fallback.loadbalancer.healthcheck.interval=10s + - traefik.http.services.${SWARM_STACK_NAME}_api-server_fallback.loadbalancer.healthcheck.timeout=1ms + + ### Fallback for webserver + - traefik.http.routers.${SWARM_STACK_NAME}_webserver_fallback.service=${SWARM_STACK_NAME}_webserver_fallback + - traefik.http.routers.${SWARM_STACK_NAME}_webserver_fallback.rule=(Path(`/`) || Path(`/v0`) || Path(`/socket.io/`) || Path(`/static-frontend-data.json`) || PathRegexp(`^/study/(?P\b[0-9a-f]{8}\b-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-\b[0-9a-f]{12}\b)`) || Path(`/view`) || Path(`/#/view`) || Path(`/#/error`) || PathPrefix(`/v0/`)) + - traefik.http.routers.${SWARM_STACK_NAME}_webserver_fallback.entrypoints=http + - traefik.http.routers.${SWARM_STACK_NAME}_webserver_fallback.priority=1 + # always fail and return 503 via unhealthy loadbalancer healthcheck + - traefik.http.services.${SWARM_STACK_NAME}_webserver_fallback.loadbalancer.server.port=0 + - traefik.http.services.${SWARM_STACK_NAME}_webserver_fallback.loadbalancer.healthcheck.path=/v0/ + - traefik.http.services.${SWARM_STACK_NAME}_webserver_fallback.loadbalancer.healthcheck.interval=10s + - traefik.http.services.${SWARM_STACK_NAME}_webserver_fallback.loadbalancer.healthcheck.timeout=1ms + + ### Fallback for static-webserver + - traefik.http.routers.${SWARM_STACK_NAME}_static_webserver_fallback.rule=(Path(`/osparc`) || Path(`/s4l`) || Path(`/s4llite`) || Path(`/s4lacad`) || Path(`/s4lengine`) || Path(`/s4ldesktop`) || Path(`/s4ldesktopacad`) || Path(`/tis`) || Path(`/tiplite`) || Path(`/transpiled`) || Path(`/resource`) || PathPrefix(`/osparc/`) || PathPrefix(`/s4l/`) || PathPrefix(`/s4llite/`) || PathPrefix(`/s4lacad/`) || PathPrefix(`/s4lengine/`) || PathPrefix(`/s4ldesktop/`) || PathPrefix(`/s4ldesktopacad/`) || PathPrefix(`/tis/`) || PathPrefix(`/tiplite/`) || PathPrefix(`/transpiled/`) || PathPrefix(`/resource/`)) + - traefik.http.routers.${SWARM_STACK_NAME}_static_webserver_fallback.service=${SWARM_STACK_NAME}_static_webserver_fallback + - traefik.http.routers.${SWARM_STACK_NAME}_static_webserver_fallback.entrypoints=http + - traefik.http.routers.${SWARM_STACK_NAME}_static_webserver_fallback.priority=1 + # always fail and return 503 via unhealthy loadbalancer healthcheck + - traefik.http.services.${SWARM_STACK_NAME}_static_webserver_fallback.loadbalancer.server.port=0 + - traefik.http.services.${SWARM_STACK_NAME}_static_webserver_fallback.loadbalancer.healthcheck.path=/some/invalid/path/to/generate/a/503 + - traefik.http.services.${SWARM_STACK_NAME}_static_webserver_fallback.loadbalancer.healthcheck.interval=10s + - traefik.http.services.${SWARM_STACK_NAME}_static_webserver_fallback.loadbalancer.healthcheck.timeout=1ms + healthcheck: + test: command -v sleep + interval: 10s + timeout: 1s + start_period: 1s + retries: 3 + volumes: postgres_data: name: ${SWARM_STACK_NAME}_postgres_data diff --git a/services/dynamic-scheduler/Dockerfile b/services/dynamic-scheduler/Dockerfile index 1fba0ab21ccc..380ac0eaba66 100644 --- a/services/dynamic-scheduler/Dockerfile +++ b/services/dynamic-scheduler/Dockerfile @@ -2,7 +2,7 @@ # Define arguments in the global scope ARG PYTHON_VERSION="3.11.9" -ARG UV_VERSION="0.6" +ARG UV_VERSION="0.7" FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build FROM python:${PYTHON_VERSION}-slim-bookworm AS base-arm64 @@ -31,6 +31,7 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=private \ set -eux && \ apt-get update && \ apt-get install -y --no-install-recommends \ + fd-find \ gosu \ && apt-get clean -y \ && rm -rf /var/lib/apt/lists/* \ @@ -87,10 +88,7 @@ RUN uv venv "${VIRTUAL_ENV}" -RUN --mount=type=cache,target=/root/.cache/uv \ - uv pip install --upgrade \ - wheel \ - setuptools + WORKDIR /build @@ -107,6 +105,9 @@ WORKDIR /build FROM build AS prod-only-deps ENV SC_BUILD_TARGET=prod-only-deps +# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode +ENV UV_COMPILE_BYTECODE=1 \ + UV_LINK_MODE=copy WORKDIR /build/services/dynamic-scheduler @@ -132,8 +133,6 @@ ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production ENV PYTHONOPTIMIZE=TRUE -# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode -ENV UV_COMPILE_BYTECODE=1 WORKDIR /home/scu diff --git a/services/dynamic-scheduler/Makefile b/services/dynamic-scheduler/Makefile index f46337a76671..4d98e392aa3a 100644 --- a/services/dynamic-scheduler/Makefile +++ b/services/dynamic-scheduler/Makefile @@ -15,4 +15,4 @@ openapi.json: .env-ignore ## produces openapi.json @set -o allexport; \ source $<; \ set +o allexport; \ - python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(the_app.openapi(), indent=2) )" > $@ + python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(app_factory().openapi(), indent=2) )" > $@ diff --git a/services/dynamic-scheduler/docker/boot.sh b/services/dynamic-scheduler/docker/boot.sh index dae7ea09e1b2..382bfd14d015 100755 --- a/services/dynamic-scheduler/docker/boot.sh +++ b/services/dynamic-scheduler/docker/boot.sh @@ -24,7 +24,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/dynamic-scheduler - uv pip --quiet sync requirements/dev.txt + uv pip --quiet sync --link-mode=copy requirements/dev.txt cd - echo "$INFO" "PIP :" uv pip list @@ -33,7 +33,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy if command -v uv >/dev/null 2>&1; then - uv pip install debugpy + uv pip install --link-mode=copy debugpy else pip install debugpy fi @@ -48,19 +48,22 @@ SERVER_LOG_LEVEL=$(echo "${APP_LOG_LEVEL}" | tr '[:upper:]' '[:lower:]') echo "$INFO" "Log-level app/server: $APP_LOG_LEVEL/$SERVER_LOG_LEVEL" if [ "${SC_BOOT_MODE}" = "debug" ]; then - reload_dir_packages=$(find /devel/packages -maxdepth 3 -type d -path "*/src/*" ! -path "*.*" -exec echo '--reload-dir {} \' \;) + reload_dir_packages=$(fdfind src /devel/packages --exec echo '--reload-dir {} ' | tr '\n' ' ') exec sh -c " cd services/dynamic-scheduler/src/simcore_service_dynamic_scheduler && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${DYNAMIC_SCHEDULER_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${DYNAMIC_SCHEDULER_REMOTE_DEBUGGING_PORT} -m \ + uvicorn \ + --factory main:app_factory \ --host 0.0.0.0 \ --reload \ - $reload_dir_packages + $reload_dir_packages \ --reload-dir . \ --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_dynamic_scheduler.main:the_app \ + exec uvicorn \ + --factory simcore_service_dynamic_scheduler.main:app_factory \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/dynamic-scheduler/docker/entrypoint.sh b/services/dynamic-scheduler/docker/entrypoint.sh index 25153a6b2a2a..357d8b604d98 100755 --- a/services/dynamic-scheduler/docker/entrypoint.sh +++ b/services/dynamic-scheduler/docker/entrypoint.sh @@ -19,6 +19,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" # # DEVELOPMENT MODE @@ -56,10 +57,9 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME" echo "$INFO" "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \; - # change user property of files already around + fdfind --owner ":$SC_USER_ID" --exclude proc --exec-batch chgrp --no-dereference "$CONT_GROUPNAME" . '/' echo "$INFO" "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \; + fdfind --owner "$SC_USER_ID:" --exclude proc --exec-batch chown --no-dereference "$SC_USER_NAME" . '/' fi fi diff --git a/services/dynamic-scheduler/openapi.json b/services/dynamic-scheduler/openapi.json index 9f6867c68728..8f234eaad54b 100644 --- a/services/dynamic-scheduler/openapi.json +++ b/services/dynamic-scheduler/openapi.json @@ -224,6 +224,12 @@ ], "title": "Service Message", "description": "additional information related to service state" + }, + "is_collaborative": { + "type": "boolean", + "title": "Is Collaborative", + "description": "True if service allows collaboration (multi-tenant access)", + "default": false } }, "type": "object", diff --git a/services/dynamic-scheduler/requirements/_base.in b/services/dynamic-scheduler/requirements/_base.in index 93a56922f0b2..db00ff78645f 100644 --- a/services/dynamic-scheduler/requirements/_base.in +++ b/services/dynamic-scheduler/requirements/_base.in @@ -18,5 +18,5 @@ nicegui packaging python-socketio -typer[all] +typer u-msgpack-python diff --git a/services/dynamic-scheduler/requirements/_base.txt b/services/dynamic-scheduler/requirements/_base.txt index 9dc7a93fb0ea..efc8a40b39d5 100644 --- a/services/dynamic-scheduler/requirements/_base.txt +++ b/services/dynamic-scheduler/requirements/_base.txt @@ -12,7 +12,7 @@ aiofiles==24.1.0 # nicegui aiohappyeyeballs==2.5.0 # via aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -51,6 +51,8 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi asyncpg==0.30.0 @@ -82,19 +84,14 @@ certifi==2025.1.31 # httpx # nicegui # requests + # sentry-sdk charset-normalizer==3.4.1 # via requests -click==8.1.8 +click==8.2.1 # via # rich-toolkit # typer # uvicorn -deprecated==1.2.18 - # via - # opentelemetry-api - # opentelemetry-exporter-otlp-proto-grpc - # opentelemetry-exporter-otlp-proto-http - # opentelemetry-semantic-conventions dnspython==2.7.0 # via email-validator docutils==0.21.2 @@ -107,13 +104,15 @@ exceptiongroup==1.2.2 # via aio-pika fast-depends==2.4.12 # via faststream -fastapi==0.115.12 +fastapi==0.116.1 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi-lifespan-manager # nicegui -fastapi-cli==0.0.7 +fastapi-cli==0.0.8 # via fastapi +fastapi-cloud-cli==0.1.5 + # via fastapi-cli fastapi-lifespan-manager==0.1.4 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in faststream==0.5.35 @@ -122,7 +121,7 @@ frozenlist==1.5.0 # via # aiohttp # aiosignal -googleapis-common-protos==1.69.1 +googleapis-common-protos==1.70.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http @@ -130,16 +129,17 @@ greenlet==3.1.1 # via sqlalchemy grpcio==1.70.0 # via opentelemetry-exporter-otlp-proto-grpc -h11==0.14.0 +h11==0.16.0 # via # httpcore + # nicegui # uvicorn # wsproto h2==4.2.0 # via httpx hpack==4.1.0 # via h2 -httpcore==1.0.7 +httpcore==1.0.9 # via httpx httptools==0.6.4 # via uvicorn @@ -161,6 +161,7 @@ httpx==0.28.1 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi + # fastapi-cloud-cli # nicegui hyperframe==6.1.0 # via h2 @@ -195,13 +196,17 @@ jinja2==3.1.6 # -c requirements/../../../requirements/constraints.txt # fastapi # nicegui +jsonref==1.1.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema==4.23.0 # via # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema-specifications==2024.10.1 # via jsonschema -mako==1.3.9 +mako==1.3.10 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -232,9 +237,9 @@ multidict==6.1.0 # via # aiohttp # yarl -nicegui==2.12.1 +nicegui==2.23.3 # via -r requirements/_base.in -opentelemetry-api==1.30.0 +opentelemetry-api==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc @@ -250,17 +255,17 @@ opentelemetry-api==1.30.0 # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.30.0 +opentelemetry-exporter-otlp==1.34.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.30.0 +opentelemetry-exporter-otlp-proto-common==1.34.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.30.0 +opentelemetry-exporter-otlp-proto-grpc==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.30.0 +opentelemetry-exporter-otlp-proto-http==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.51b0 +opentelemetry-instrumentation==0.55b1 # via # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-asgi @@ -270,33 +275,33 @@ opentelemetry-instrumentation==0.51b0 # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aio-pika==0.51b0 +opentelemetry-instrumentation-aio-pika==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-asgi==0.51b0 +opentelemetry-instrumentation-asgi==0.55b1 # via opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-asyncpg==0.51b0 - # via -r requirements/../../../packages/postgres-database/requirements/_base.in -opentelemetry-instrumentation-fastapi==0.51b0 +opentelemetry-instrumentation-asyncpg==0.55b1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-fastapi==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-httpx==0.51b0 +opentelemetry-instrumentation-httpx==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-logging==0.51b0 +opentelemetry-instrumentation-logging==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.51b0 +opentelemetry-instrumentation-redis==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.51b0 +opentelemetry-instrumentation-requests==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.30.0 +opentelemetry-proto==1.34.1 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.30.0 +opentelemetry-sdk==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.51b0 +opentelemetry-semantic-conventions==0.55b1 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi @@ -306,7 +311,7 @@ opentelemetry-semantic-conventions==0.51b0 # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.51b0 +opentelemetry-util-http==0.55b1 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi @@ -350,7 +355,7 @@ propcache==0.3.0 # via # aiohttp # yarl -protobuf==5.29.3 +protobuf==5.29.5 # via # googleapis-common-protos # opentelemetry-proto @@ -362,7 +367,7 @@ psycopg2-binary==2.9.10 # via sqlalchemy pycryptodome==3.21.0 # via stream-zip -pydantic==2.10.6 +pydantic==2.11.7 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -393,11 +398,12 @@ pydantic==2.10.6 # -r requirements/../../../packages/settings-library/requirements/_base.in # fast-depends # fastapi + # fastapi-cloud-cli # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.2 +pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.10.2 +pydantic-extra-types==2.10.5 # via # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -440,13 +446,15 @@ python-dotenv==1.0.1 # via # pydantic-settings # uvicorn -python-engineio==4.11.2 - # via python-socketio +python-engineio==4.12.2 + # via + # nicegui + # python-socketio python-multipart==0.0.20 # via # fastapi # nicegui -python-socketio==5.12.1 +python-socketio==5.13.0 # via # -r requirements/_base.in # nicegui @@ -503,22 +511,26 @@ referencing==0.35.1 # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications -requests==2.32.3 - # via - # nicegui - # opentelemetry-exporter-otlp-proto-http -rich==13.9.4 +requests==2.32.4 + # via opentelemetry-exporter-otlp-proto-http +rich==14.1.0 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # rich-toolkit # typer -rich-toolkit==0.14.7 - # via fastapi-cli +rich-toolkit==0.15.0 + # via + # fastapi-cli + # fastapi-cloud-cli +rignore==0.6.4 + # via fastapi-cloud-cli rpds-py==0.23.1 # via # jsonschema # referencing +sentry-sdk==2.35.0 + # via fastapi-cloud-cli shellingham==1.5.4 # via typer simple-websocket==1.1.0 @@ -526,7 +538,9 @@ simple-websocket==1.1.0 six==1.17.0 # via python-dateutil sniffio==1.3.1 - # via anyio + # via + # anyio + # asgi-lifespan sqlalchemy==1.4.54 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -545,7 +559,7 @@ sqlalchemy==1.4.54 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/postgres-database/requirements/_base.in # alembic -starlette==0.46.0 +starlette==0.47.2 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -562,6 +576,7 @@ starlette==0.46.0 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # fastapi + # nicegui stream-zip==0.0.83 # via -r requirements/../../../packages/service-library/requirements/_base.in tenacity==9.0.0 @@ -570,15 +585,16 @@ toolz==1.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in tqdm==4.67.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.15.2 +typer==0.16.1 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in # fastapi-cli + # fastapi-cloud-cli types-python-dateutil==2.9.0.20241206 # via arrow -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # aiodebug # alembic @@ -586,15 +602,23 @@ typing-extensions==4.12.2 # fastapi # faststream # nicegui + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http # opentelemetry-sdk + # opentelemetry-semantic-conventions # pydantic # pydantic-core # pydantic-extra-types # rich-toolkit + # starlette # typer + # typing-inspection +typing-inspection==0.4.1 + # via pydantic u-msgpack-python==2.8.0 # via -r requirements/_base.in -urllib3==2.3.0 +urllib3==2.5.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -610,12 +634,13 @@ urllib3==2.3.0 # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt - # nicegui # requests + # sentry-sdk uvicorn==0.34.2 # via # fastapi # fastapi-cli + # fastapi-cloud-cli # nicegui uvloop==0.21.0 # via uvicorn @@ -629,7 +654,6 @@ websockets==15.0.1 # via uvicorn wrapt==1.17.2 # via - # deprecated # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-httpx diff --git a/services/dynamic-scheduler/requirements/_test.in b/services/dynamic-scheduler/requirements/_test.in index 840e5093b130..9a5d5c342ddb 100644 --- a/services/dynamic-scheduler/requirements/_test.in +++ b/services/dynamic-scheduler/requirements/_test.in @@ -15,6 +15,7 @@ asgi_lifespan coverage docker faker +fakeredis[lua] hypercorn playwright pytest diff --git a/services/dynamic-scheduler/requirements/_test.txt b/services/dynamic-scheduler/requirements/_test.txt index 07f543c069a3..69fccfb42b89 100644 --- a/services/dynamic-scheduler/requirements/_test.txt +++ b/services/dynamic-scheduler/requirements/_test.txt @@ -3,7 +3,9 @@ anyio==4.8.0 # -c requirements/_base.txt # httpx asgi-lifespan==2.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in certifi==2025.1.31 # via # -c requirements/../../../requirements/constraints.txt @@ -23,12 +25,14 @@ docker==7.1.0 # via -r requirements/_test.in faker==36.2.2 # via -r requirements/_test.in +fakeredis==2.30.3 + # via -r requirements/_test.in greenlet==3.1.1 # via # -c requirements/_base.txt # playwright # sqlalchemy -h11==0.14.0 +h11==0.16.0 # via # -c requirements/_base.txt # httpcore @@ -42,7 +46,7 @@ hpack==4.1.0 # via # -c requirements/_base.txt # h2 -httpcore==1.0.7 +httpcore==1.0.9 # via # -c requirements/_base.txt # httpx @@ -67,7 +71,9 @@ idna==3.10 # requests iniconfig==2.0.0 # via pytest -mypy==1.15.0 +lupa==2.5 + # via fakeredis +mypy==1.16.1 # via sqlalchemy mypy-extensions==1.1.0 # via mypy @@ -76,17 +82,25 @@ packaging==24.2 # -c requirements/_base.txt # pytest # pytest-sugar +pathspec==0.12.1 + # via mypy playwright==1.50.0 # via -r requirements/_test.in pluggy==1.5.0 - # via pytest + # via + # pytest + # pytest-cov pprintpp==0.4.0 # via pytest-icdiff priority==2.0.0 # via hypercorn pyee==12.1.1 # via playwright -pytest==8.3.5 +pygments==2.19.1 + # via + # -c requirements/_base.txt + # pytest +pytest==8.4.1 # via # -r requirements/_test.in # pytest-asyncio @@ -94,13 +108,13 @@ pytest==8.3.5 # pytest-icdiff # pytest-mock # pytest-sugar -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via -r requirements/_test.in -pytest-cov==6.0.0 +pytest-cov==6.2.1 # via -r requirements/_test.in pytest-icdiff==0.9 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in @@ -110,7 +124,12 @@ python-dotenv==1.0.1 # via # -c requirements/_base.txt # -r requirements/_test.in -requests==2.32.3 +redis==5.2.1 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # fakeredis +requests==2.32.4 # via # -c requirements/_base.txt # docker @@ -121,6 +140,8 @@ sniffio==1.3.1 # -c requirements/_base.txt # anyio # asgi-lifespan +sortedcontainers==2.4.0 + # via fakeredis sqlalchemy==1.4.54 # via # -c requirements/../../../requirements/constraints.txt @@ -132,7 +153,7 @@ termcolor==2.5.0 # via pytest-sugar types-psycopg2==2.9.21.20250318 # via -r requirements/_test.in -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # anyio @@ -141,7 +162,7 @@ typing-extensions==4.12.2 # sqlalchemy2-stubs tzdata==2025.1 # via faker -urllib3==2.3.0 +urllib3==2.5.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt diff --git a/services/dynamic-scheduler/requirements/_tools.txt b/services/dynamic-scheduler/requirements/_tools.txt index 24f125f8a3c7..ef3b0a8f84fc 100644 --- a/services/dynamic-scheduler/requirements/_tools.txt +++ b/services/dynamic-scheduler/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # black @@ -27,7 +27,7 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt @@ -45,7 +45,10 @@ packaging==24.2 # black # build pathspec==0.12.1 - # via black + # via + # -c requirements/_test.txt + # black + # mypy pip==25.0.1 # via pip-tools pip-tools==7.4.1 @@ -70,11 +73,11 @@ pyyaml==6.0.2 # pre-commit ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.2 +setuptools==80.9.0 # via pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # -c requirements/_test.txt diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/_meta.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/_meta.py index 4e33eee92267..1d47d3a74f2f 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/_meta.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/_meta.py @@ -1,6 +1,5 @@ -""" Application's metadata +"""Application's metadata""" -""" from typing import Final from models_library.basic_types import VersionStr @@ -14,7 +13,7 @@ PROJECT_NAME: Final[str] = info.project_name VERSION: Final[Version] = info.version API_VERSION: Final[VersionStr] = info.__version__ -APP_NAME = PROJECT_NAME +APP_NAME: Final[str] = info.app_name API_VTAG: Final[str] = info.api_prefix_path_tag SUMMARY: Final[str] = info.get_summary() diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/_setup.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/_setup.py index d56da5f43f4f..2a7f0487871a 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/_setup.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/_setup.py @@ -2,14 +2,17 @@ from fastapi import FastAPI from ...core.settings import ApplicationSettings +from . import routes_external_scheduler, routes_internal_scheduler from ._utils import set_parent_app -from .routes import router def initialize_frontend(app: FastAPI) -> None: settings: ApplicationSettings = app.state.settings - nicegui.app.include_router(router) + if settings.DYNAMIC_SCHEDULER_USE_INTERNAL_SCHEDULER: + nicegui.app.include_router(routes_internal_scheduler.router) + else: + nicegui.app.include_router(routes_external_scheduler.router) nicegui.ui.run_with( app, diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes/__init__.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes_external_scheduler/__init__.py similarity index 100% rename from services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes/__init__.py rename to services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes_external_scheduler/__init__.py diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes/_index.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes_external_scheduler/_index.py similarity index 97% rename from services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes/_index.py rename to services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes_external_scheduler/_index.py index b6f7d5b1c919..99b6f3eecf74 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes/_index.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes_external_scheduler/_index.py @@ -1,3 +1,4 @@ +import arrow import httpx from common_library.json_serialization import json_dumps, json_loads from fastapi import FastAPI @@ -10,7 +11,7 @@ from ....services.service_tracker import TrackedServiceModel, get_all_tracked_services from ....services.service_tracker._models import SchedulerServiceState from .._utils import get_parent_app, get_settings -from ._render_utils import base_page, get_iso_formatted_date +from ._render_utils import base_page router = APIRouter() @@ -21,7 +22,7 @@ def _render_service_details(node_id: NodeID, service: TrackedServiceModel) -> No "Display State": ("label", service.current_state), "Last State Change": ( "label", - get_iso_formatted_date(service.last_state_change), + arrow.get(service.last_state_change).isoformat(), ), "UserID": ("copy", f"{service.user_id}"), "ProjectID": ("copy", f"{service.project_id}"), diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes/_render_utils.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes_external_scheduler/_render_utils.py similarity index 81% rename from services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes/_render_utils.py rename to services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes_external_scheduler/_render_utils.py index c3a315be2d70..d34191d21f48 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes/_render_utils.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes_external_scheduler/_render_utils.py @@ -1,7 +1,6 @@ from collections.abc import Iterator from contextlib import contextmanager -import arrow from nicegui import ui @@ -17,7 +16,3 @@ def base_page(*, title: str | None = None) -> Iterator[None]: ui.label(display_title) yield None - - -def get_iso_formatted_date(timestamp: float) -> str: - return arrow.get(timestamp).isoformat() diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes/_service.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes_external_scheduler/_service.py similarity index 98% rename from services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes/_service.py rename to services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes_external_scheduler/_service.py index ac073072a44a..c0a734f5baba 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes/_service.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes_external_scheduler/_service.py @@ -9,9 +9,9 @@ stop_dynamic_service, ) from settings_library.utils_service import DEFAULT_FASTAPI_PORT -from simcore_service_dynamic_scheduler.services.rabbitmq import get_rabbitmq_rpc_client from ....core.settings import ApplicationSettings +from ....services.rabbitmq import get_rabbitmq_rpc_client from ....services.service_tracker import get_tracked_service, remove_tracked_service from .._utils import get_parent_app, get_settings from ._render_utils import base_page diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes_internal_scheduler/__init__.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes_internal_scheduler/__init__.py new file mode 100644 index 000000000000..9b40dcf6ddd2 --- /dev/null +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes_internal_scheduler/__init__.py @@ -0,0 +1,9 @@ +from nicegui import APIRouter + +from . import _index + +router = APIRouter() + +router.include_router(_index.router) + +__all__: tuple[str, ...] = ("router",) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes_internal_scheduler/_index.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes_internal_scheduler/_index.py new file mode 100644 index 000000000000..7c16cdbc7817 --- /dev/null +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/api/frontend/routes_internal_scheduler/_index.py @@ -0,0 +1,8 @@ +from nicegui import APIRouter, ui + +router = APIRouter() + + +@router.page("/") +async def index(): + ui.label("PLACEHOLDER for internal scheduler UI") diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/application.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/application.py index 9f59f29859eb..a2617cec567e 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/application.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/application.py @@ -1,4 +1,5 @@ from fastapi import FastAPI +from servicelib.fastapi.lifespan_utils import Lifespan from servicelib.fastapi.monitoring import ( initialize_prometheus_instrumentation, ) @@ -13,7 +14,10 @@ from .settings import ApplicationSettings -def create_app(settings: ApplicationSettings | None = None) -> FastAPI: +def create_app( + settings: ApplicationSettings | None = None, + logging_lifespan: Lifespan | None = None, +) -> FastAPI: app_settings = settings or ApplicationSettings.create_from_envs() app = FastAPI( @@ -25,7 +29,9 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI: "/doc" if app_settings.DYNAMIC_SCHEDULER_SWAGGER_API_DOC_ENABLED else None ), redoc_url=None, - lifespan=events.create_app_lifespan(settings=app_settings), + lifespan=events.create_app_lifespan( + settings=app_settings, logging_lifespan=logging_lifespan + ), ) override_fastapi_openapi_method(app) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/events.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/events.py index 492834a99e3f..1c293e78e710 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/events.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/events.py @@ -6,6 +6,7 @@ create_remote_docker_client_input_state, remote_docker_client_lifespan, ) +from servicelib.fastapi.lifespan_utils import Lifespan from servicelib.fastapi.monitoring import ( create_prometheus_instrumentationmain_input_state, prometheus_instrumentation_lifespan, @@ -51,8 +52,12 @@ async def _settings_lifespan(app: FastAPI) -> AsyncIterator[State]: } -def create_app_lifespan(settings: ApplicationSettings) -> LifespanManager: +def create_app_lifespan( + settings: ApplicationSettings, logging_lifespan: Lifespan | None +) -> LifespanManager: app_lifespan = LifespanManager() + if logging_lifespan: + app_lifespan.add(logging_lifespan) app_lifespan.add(_settings_lifespan) if settings.DYNAMIC_SCHEDULER_TRACING: diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py index f1ce9b13d333..4083ef85f332 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/core/settings.py @@ -1,9 +1,11 @@ import datetime -from typing import Annotated +from functools import cached_property +from typing import Annotated, cast from common_library.basic_types import DEFAULT_FACTORY +from common_library.logging.logging_utils_filtering import LoggerName, MessageSubstring from pydantic import AliasChoices, Field, SecretStr, TypeAdapter, field_validator -from servicelib.logging_utils_filtering import LoggerName, MessageSubstring +from servicelib.logging_utils import LogLevelInt from settings_library.application import BaseApplicationSettings from settings_library.basic_types import LogLevel, VersionTag from settings_library.catalog import CatalogSettings @@ -105,6 +107,10 @@ class _BaseApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): ), ] = False + @cached_property + def log_level(self) -> LogLevelInt: + return cast(LogLevelInt, self.DYNAMIC_SCHEDULER_LOGLEVEL) + @field_validator("DYNAMIC_SCHEDULER_LOGLEVEL", mode="before") @classmethod def _validate_log_level(cls, value: str) -> str: diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/main.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/main.py index ab7268832370..bf1f22e27e61 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/main.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/main.py @@ -1,22 +1,39 @@ -"""Main application to be deployed by uvicorn (or equivalent) server - -""" +"""Main application to be deployed by uvicorn (or equivalent) server""" import logging +from typing import Final +from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.logging_utils import config_all_loggers +from servicelib.fastapi.logging_lifespan import ( + create_logging_lifespan, +) from simcore_service_dynamic_scheduler.core.application import create_app from simcore_service_dynamic_scheduler.core.settings import ApplicationSettings -_the_settings = ApplicationSettings.create_from_envs() -logging.basicConfig(level=_the_settings.DYNAMIC_SCHEDULER_LOGLEVEL.value) -logging.root.setLevel(_the_settings.DYNAMIC_SCHEDULER_LOGLEVEL.value) -config_all_loggers( - log_format_local_dev_enabled=_the_settings.DYNAMIC_SCHEDULER_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=_the_settings.DYNAMIC_SCHEDULER_LOG_FILTER_MAPPING, - tracing_settings=_the_settings.DYNAMIC_SCHEDULER_TRACING, +_logger = logging.getLogger(__name__) + +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aiobotocore", + "aio_pika", + "aiormq", + "botocore", + "werkzeug", ) -# SINGLETON FastAPI app -the_app: FastAPI = create_app(_the_settings) + +def app_factory() -> FastAPI: + app_settings = ApplicationSettings.create_from_envs() + logging_lifespan = create_logging_lifespan( + log_format_local_dev_enabled=app_settings.DYNAMIC_SCHEDULER_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.DYNAMIC_SCHEDULER_LOG_FILTER_MAPPING, + tracing_settings=app_settings.DYNAMIC_SCHEDULER_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) + + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + return create_app(settings=app_settings, logging_lifespan=logging_lifespan) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_public_client.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_public_client.py index d833d3169bf6..e45b0ae1725f 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_public_client.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_public_client.py @@ -80,7 +80,7 @@ async def stop_dynamic_service( node_id: NodeID, simcore_user_agent: str, save_state: bool, - timeout: datetime.timedelta # noqa: ASYNC109 + timeout: datetime.timedelta, # noqa: ASYNC109 ) -> None: try: await self.thin_client.delete_dynamic_service( @@ -95,7 +95,8 @@ async def stop_dynamic_service( == status.HTTP_409_CONFLICT ): raise ServiceWaitingForManualInterventionError( - node_id=node_id + node_id=node_id, + unexpected_status_error=f"{e}", ) from None if ( e.response.status_code # type: ignore[attr-defined] # pylint:disable=no-member @@ -110,7 +111,7 @@ async def retrieve_inputs( *, node_id: NodeID, port_keys: list[ServicePortKey], - timeout: datetime.timedelta # noqa: ASYNC109 + timeout: datetime.timedelta, # noqa: ASYNC109 ) -> RetrieveDataOutEnveloped: response = await self.thin_client.dynamic_service_retrieve( node_id=node_id, port_keys=port_keys, timeout=timeout diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_notifier.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_notifier.py index c869a368ab21..a642b3a64fde 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_notifier.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/notifier/_notifier.py @@ -11,7 +11,7 @@ ) from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle from models_library.api_schemas_webserver.socketio import SocketIORoomStr -from models_library.users import UserID +from models_library.projects import ProjectID from servicelib.fastapi.app_state import SingletonInAppStateMixin from servicelib.services_utils import get_status_as_dict @@ -23,20 +23,22 @@ def __init__(self, sio_manager: socketio.AsyncAioPikaManager): self._sio_manager = sio_manager async def notify_service_status( - self, user_id: UserID, status: NodeGet | DynamicServiceGet | NodeGetIdle + self, project_id: ProjectID, status: NodeGet | DynamicServiceGet | NodeGetIdle ) -> None: await self._sio_manager.emit( SOCKET_IO_SERVICE_STATUS_EVENT, data=jsonable_encoder(get_status_as_dict(status)), - room=SocketIORoomStr.from_user_id(user_id), + room=SocketIORoomStr.from_project_id(project_id), ) async def notify_service_status_change( - app: FastAPI, user_id: UserID, status: NodeGet | DynamicServiceGet | NodeGetIdle + app: FastAPI, + project_id: ProjectID, + status: NodeGet | DynamicServiceGet | NodeGetIdle, ) -> None: notifier: Notifier = Notifier.get_from_app_state(app) - await notifier.notify_service_status(user_id=user_id, status=status) + await notifier.notify_service_status(project_id=project_id, status=status) async def lifespan(app: FastAPI) -> AsyncIterator[State]: diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/__init__.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/__init__.py index fee6fc069f35..797492baab62 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/__init__.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/__init__.py @@ -1,8 +1,8 @@ from ._api import ( NORMAL_RATE_POLL_INTERVAL, get_all_tracked_services, + get_project_id_for_service, get_tracked_service, - get_user_id_for_service, remove_tracked_service, set_frontend_notified_for_service, set_if_status_changed_for_service, @@ -17,11 +17,11 @@ __all__: tuple[str, ...] = ( "get_all_tracked_services", + "get_project_id_for_service", "get_tracked_service", - "get_user_id_for_service", - "service_tracker_lifespan", "NORMAL_RATE_POLL_INTERVAL", "remove_tracked_service", + "service_tracker_lifespan", "set_frontend_notified_for_service", "set_if_status_changed_for_service", "set_request_as_running", diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_api.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_api.py index 09e4c3b965f9..caf38548af98 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_api.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_api.py @@ -11,12 +11,16 @@ DynamicServiceStop, ) from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle +from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.services_enums import ServiceState -from models_library.users import UserID from servicelib.deferred_tasks import TaskUID -from ._models import SchedulerServiceState, TrackedServiceModel, UserRequestedState +from ._models import ( + SchedulerServiceState, + TrackedServiceModel, + UserRequestedState, +) from ._setup import get_tracker _logger = logging.getLogger(__name__) @@ -242,7 +246,7 @@ async def get_all_tracked_services(app: FastAPI) -> dict[NodeID, TrackedServiceM return await get_tracker(app).all() -async def get_user_id_for_service(app: FastAPI, node_id: NodeID) -> UserID | None: - """returns user_id for the service""" +async def get_project_id_for_service(app: FastAPI, node_id: NodeID) -> ProjectID | None: + """returns project_id for the service""" model: TrackedServiceModel | None = await get_tracker(app).load(node_id) - return model.user_id if model else None + return model.project_id if model else None diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_tracker.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_tracker.py index 489cee153105..da57c84f3497 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_tracker.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/service_tracker/_tracker.py @@ -34,7 +34,12 @@ async def delete(self, node_id: NodeID) -> None: await self.redis_client_sdk.redis.delete(_get_key(node_id)) async def all(self) -> dict[NodeID, TrackedServiceModel]: - found_keys = await self.redis_client_sdk.redis.keys(f"{_KEY_PREFIX}*") + found_keys = [ + x + async for x in self.redis_client_sdk.redis.scan_iter( + match=f"{_KEY_PREFIX}*" + ) + ] found_values = await self.redis_client_sdk.redis.mget(found_keys) return { diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_deferred_get_status.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_deferred_get_status.py index 3f6efbfaecb0..84653c76b8c6 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_deferred_get_status.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_deferred_get_status.py @@ -7,8 +7,8 @@ RunningDynamicServiceDetails, ) from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle +from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID -from models_library.users import UserID from servicelib.deferred_tasks import BaseDeferredHandler, TaskUID from servicelib.deferred_tasks._base_deferred_handler import DeferredContext @@ -69,15 +69,15 @@ async def on_result( if await service_tracker.should_notify_frontend_for_service( app, node_id, status_changed=status_changed ): - user_id: UserID | None = await service_tracker.get_user_id_for_service( - app, node_id + project_id: ProjectID | None = ( + await service_tracker.get_project_id_for_service(app, node_id) ) - if user_id: - await notify_service_status_change(app, user_id, result) + if project_id: + await notify_service_status_change(app, project_id, result) await service_tracker.set_frontend_notified_for_service(app, node_id) else: _logger.info( - "Did not find a user for '%s', skipping status delivery of: %s", + "Did not find a project for '%s', skipping status delivery of: %s", node_id, result, ) diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_monitor.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_monitor.py index 750b0dbdc637..5e05384c990a 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_monitor.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/status_monitor/_monitor.py @@ -5,10 +5,10 @@ from typing import Final import arrow +from common_library.async_tools import cancel_wait_task from fastapi import FastAPI from models_library.projects_nodes_io import NodeID from pydantic import NonNegativeFloat, NonNegativeInt -from servicelib.async_utils import cancel_wait_task from servicelib.background_task_utils import exclusive_periodic from servicelib.utils import limited_gather from settings_library.redis import RedisDatabase @@ -79,9 +79,9 @@ async def _worker_check_services_require_status_update(self) -> None: # NOTE: this worker runs on only once across all instances of the scheduler - models: dict[ - NodeID, TrackedServiceModel - ] = await service_tracker.get_all_tracked_services(self.app) + models: dict[NodeID, TrackedServiceModel] = ( + await service_tracker.get_all_tracked_services(self.app) + ) to_remove: list[NodeID] = [] to_start: list[NodeID] = [] diff --git a/services/dynamic-scheduler/tests/conftest.py b/services/dynamic-scheduler/tests/conftest.py index 2cb7f135829c..5543ad0665da 100644 --- a/services/dynamic-scheduler/tests/conftest.py +++ b/services/dynamic-scheduler/tests/conftest.py @@ -21,12 +21,14 @@ from simcore_service_dynamic_scheduler.core.application import create_app pytest_plugins = [ + "pytest_simcore.asyncio_event_loops", "pytest_simcore.cli_runner", "pytest_simcore.docker_compose", "pytest_simcore.docker_swarm", "pytest_simcore.environment_configs", "pytest_simcore.faker_projects_data", "pytest_simcore.faker_users_data", + "pytest_simcore.logging", "pytest_simcore.postgres_service", "pytest_simcore.rabbit_service", "pytest_simcore.redis_service", diff --git a/services/dynamic-scheduler/tests/unit/api_frontend/_routes_internal_scheduler/conftest.py b/services/dynamic-scheduler/tests/unit/api_frontend/_routes_internal_scheduler/conftest.py new file mode 100644 index 000000000000..cf8b4f75d70e --- /dev/null +++ b/services/dynamic-scheduler/tests/unit/api_frontend/_routes_internal_scheduler/conftest.py @@ -0,0 +1,6 @@ +import pytest + + +@pytest.fixture +def use_internal_scheduler() -> bool: + return True diff --git a/services/dynamic-scheduler/tests/unit/api_frontend/_routes_internal_scheduler/test__index_.py b/services/dynamic-scheduler/tests/unit/api_frontend/_routes_internal_scheduler/test__index_.py new file mode 100644 index 000000000000..b919e371ba1c --- /dev/null +++ b/services/dynamic-scheduler/tests/unit/api_frontend/_routes_internal_scheduler/test__index_.py @@ -0,0 +1,26 @@ +# pylint:disable=redefined-outer-name +# pylint:disable=unused-argument + +from helpers import assert_contains_text +from playwright.async_api import Page +from simcore_service_dynamic_scheduler.api.frontend._utils import get_settings + +pytest_simcore_core_services_selection = [ + "postgres", + "rabbit", + "redis", +] + +pytest_simcore_ops_services_selection = [ + "redis-commander", +] + + +async def test_placeholder_index( + app_runner: None, async_page: Page, server_host_port: str +): + await async_page.goto( + f"{server_host_port}{get_settings().DYNAMIC_SCHEDULER_UI_MOUNT_PATH}" + ) + + await assert_contains_text(async_page, "PLACEHOLDER for internal scheduler UI") diff --git a/services/dynamic-scheduler/tests/unit/api_frontend/conftest.py b/services/dynamic-scheduler/tests/unit/api_frontend/conftest.py index 663091247d10..3aef679dfa3f 100644 --- a/services/dynamic-scheduler/tests/unit/api_frontend/conftest.py +++ b/services/dynamic-scheduler/tests/unit/api_frontend/conftest.py @@ -5,10 +5,7 @@ import subprocess from collections.abc import AsyncIterable from contextlib import suppress -from typing import Final -from unittest.mock import AsyncMock -import nicegui import pytest import sqlalchemy as sa from fastapi import FastAPI, status @@ -17,6 +14,7 @@ from hypercorn.config import Config from playwright.async_api import Page, async_playwright from pytest_mock import MockerFixture +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.postgres_tools import PostgresTestConfig from pytest_simcore.helpers.typing_env import EnvVarsDict from settings_library.rabbit import RabbitSettings @@ -26,37 +24,24 @@ from simcore_service_dynamic_scheduler.core.settings import ApplicationSettings from tenacity import AsyncRetrying, stop_after_delay, wait_fixed -_MODULE: Final["str"] = "simcore_service_dynamic_scheduler" - @pytest.fixture def disable_status_monitor_background_task(mocker: MockerFixture) -> None: mocker.patch( - f"{_MODULE}.services.status_monitor._monitor.Monitor._worker_check_services_require_status_update" + "simcore_service_dynamic_scheduler.services.status_monitor._monitor.Monitor._worker_check_services_require_status_update" ) @pytest.fixture -def mock_stop_dynamic_service(mocker: MockerFixture) -> AsyncMock: - async_mock = AsyncMock() - mocker.patch( - f"{_MODULE}.api.frontend.routes._service.stop_dynamic_service", async_mock - ) - return async_mock - - -@pytest.fixture -def mock_remove_tracked_service(mocker: MockerFixture) -> AsyncMock: - async_mock = AsyncMock() - mocker.patch( - f"{_MODULE}.api.frontend.routes._service.remove_tracked_service", async_mock - ) - return async_mock +def use_internal_scheduler() -> bool: + pytest.fail("please define use_internal_scheduler fixture in your tests folder") @pytest.fixture def app_environment( + monkeypatch: pytest.MonkeyPatch, app_environment: EnvVarsDict, + use_internal_scheduler: bool, postgres_db: sa.engine.Engine, postgres_host_config: PostgresTestConfig, disable_status_monitor_background_task: None, @@ -64,7 +49,11 @@ def app_environment( redis_service: RedisSettings, remove_redis_data: None, ) -> EnvVarsDict: - return app_environment + to_set = { + "DYNAMIC_SCHEDULER_USE_INTERNAL_SCHEDULER": f"{use_internal_scheduler}", + } + setenvs_from_dict(monkeypatch, to_set) + return {**app_environment, **to_set} @pytest.fixture @@ -72,11 +61,39 @@ def server_host_port() -> str: return f"127.0.0.1:{DEFAULT_FASTAPI_PORT}" +def _reset_nicegui_app() -> None: + # forces rebuild of middleware stack on next test + + # below is based on nicegui.testing.general_fixtures.nicegui_reset_globals + + from nicegui import Client, app + from starlette.routing import Route + + for route in list(app.routes): + if isinstance(route, Route) and route.path.startswith("/_nicegui/auto/static/"): + app.remove_route(route.path) + + all_page_routes = set(Client.page_routes.values()) + all_page_routes.add("/") + for path in all_page_routes: + app.remove_route(path) + + for route in list(app.routes): + if ( + isinstance(route, Route) + and "{" in route.path + and "}" in route.path + and not route.path.startswith("/_nicegui/") + ): + app.remove_route(route.path) + + app.middleware_stack = None + app.user_middleware.clear() + + @pytest.fixture def not_initialized_app(app_environment: EnvVarsDict) -> FastAPI: - # forces rebuild of middleware stack on next test - nicegui.app.user_middleware.clear() - nicegui.app.middleware_stack = None + _reset_nicegui_app() return create_app() diff --git a/services/dynamic-scheduler/tests/unit/api_frontend/helpers.py b/services/dynamic-scheduler/tests/unit/api_frontend/helpers.py index 91c2058c869c..79cf9ad27ebc 100644 --- a/services/dynamic-scheduler/tests/unit/api_frontend/helpers.py +++ b/services/dynamic-scheduler/tests/unit/api_frontend/helpers.py @@ -92,13 +92,11 @@ async def assert_not_contains_text( def get_new_style_service_status(state: str) -> DynamicServiceGet: return TypeAdapter(DynamicServiceGet).validate_python( - DynamicServiceGet.model_config["json_schema_extra"]["examples"][0] - | {"state": state} + DynamicServiceGet.model_json_schema()["examples"][0] | {"state": state} ) def get_legacy_service_status(state: str) -> NodeGet: return TypeAdapter(NodeGet).validate_python( - NodeGet.model_config["json_schema_extra"]["examples"][0] - | {"service_state": state} + NodeGet.model_json_schema()["examples"][0] | {"service_state": state} ) diff --git a/services/dynamic-scheduler/tests/unit/api_frontend/routes_external_scheduler/conftest.py b/services/dynamic-scheduler/tests/unit/api_frontend/routes_external_scheduler/conftest.py new file mode 100644 index 000000000000..233f9a5afec8 --- /dev/null +++ b/services/dynamic-scheduler/tests/unit/api_frontend/routes_external_scheduler/conftest.py @@ -0,0 +1,30 @@ +# pylint:disable=redefined-outer-name + +from typing import Final +from unittest.mock import AsyncMock + +import pytest +from pytest_mock import MockerFixture + +_MODULE: Final["str"] = ( + "simcore_service_dynamic_scheduler.api.frontend.routes_external_scheduler._service" +) + + +@pytest.fixture +def use_internal_scheduler() -> bool: + return False + + +@pytest.fixture +def mock_stop_dynamic_service(mocker: MockerFixture) -> AsyncMock: + async_mock = AsyncMock() + mocker.patch(f"{_MODULE}.stop_dynamic_service", async_mock) + return async_mock + + +@pytest.fixture +def mock_remove_tracked_service(mocker: MockerFixture) -> AsyncMock: + async_mock = AsyncMock() + mocker.patch(f"{_MODULE}.remove_tracked_service", async_mock) + return async_mock diff --git a/services/dynamic-scheduler/tests/unit/api_frontend/test_api_frontend_routes_index.py b/services/dynamic-scheduler/tests/unit/api_frontend/routes_external_scheduler/test__index.py similarity index 99% rename from services/dynamic-scheduler/tests/unit/api_frontend/test_api_frontend_routes_index.py rename to services/dynamic-scheduler/tests/unit/api_frontend/routes_external_scheduler/test__index.py index 8ba68fbe632d..d59d1468b5f4 100644 --- a/services/dynamic-scheduler/tests/unit/api_frontend/test_api_frontend_routes_index.py +++ b/services/dynamic-scheduler/tests/unit/api_frontend/routes_external_scheduler/test__index.py @@ -38,7 +38,7 @@ ] pytest_simcore_ops_services_selection = [ - # "redis-commander", + "redis-commander", ] diff --git a/services/dynamic-scheduler/tests/unit/api_frontend/test_api_frontend_routes_service.py b/services/dynamic-scheduler/tests/unit/api_frontend/routes_external_scheduler/test__service.py similarity index 99% rename from services/dynamic-scheduler/tests/unit/api_frontend/test_api_frontend_routes_service.py rename to services/dynamic-scheduler/tests/unit/api_frontend/routes_external_scheduler/test__service.py index a4f0c3993d02..52524e7ad6da 100644 --- a/services/dynamic-scheduler/tests/unit/api_frontend/test_api_frontend_routes_service.py +++ b/services/dynamic-scheduler/tests/unit/api_frontend/routes_external_scheduler/test__service.py @@ -34,7 +34,7 @@ ] pytest_simcore_ops_services_selection = [ - # "redis-commander", + "redis-commander", ] diff --git a/services/dynamic-scheduler/tests/unit/service_tracker/test__api.py b/services/dynamic-scheduler/tests/unit/service_tracker/test__api.py index 5ce6c8c3d1c1..b1041b1a8963 100644 --- a/services/dynamic-scheduler/tests/unit/service_tracker/test__api.py +++ b/services/dynamic-scheduler/tests/unit/service_tracker/test__api.py @@ -126,11 +126,12 @@ async def test_services_tracer_workflow( ], *[ DynamicServiceGet.model_validate(o) - for o in DynamicServiceGet.model_config["json_schema_extra"]["examples"] + for o in DynamicServiceGet.model_json_schema()["examples"] + ], + *[ + NodeGetIdle.model_validate(o) + for o in NodeGetIdle.model_json_schema()["examples"] ], - NodeGetIdle.model_validate( - NodeGetIdle.model_config["json_schema_extra"]["example"] - ), ], ) async def test_set_if_status_changed( @@ -182,14 +183,15 @@ async def test_set_service_status_task_uid( TypeAdapter(DynamicServiceGet).validate_python(o), NORMAL_RATE_POLL_INTERVAL, ) - for o in DynamicServiceGet.model_config["json_schema_extra"]["examples"] + for o in DynamicServiceGet.model_json_schema()["examples"] + ], + *[ + ( + TypeAdapter(NodeGetIdle).validate_python(o), + _LOW_RATE_POLL_INTERVAL, + ) + for o in NodeGetIdle.model_json_schema()["examples"] ], - ( - TypeAdapter(NodeGetIdle).validate_python( - NodeGetIdle.model_config["json_schema_extra"]["example"] - ), - _LOW_RATE_POLL_INTERVAL, - ), ], ) def test__get_poll_interval( @@ -208,7 +210,7 @@ def _get_node_get_from(service_state: ServiceState) -> NodeGet: def _get_dynamic_service_get_from( service_state: ServiceState, ) -> DynamicServiceGet: - dict_data = DynamicServiceGet.model_config["json_schema_extra"]["examples"][1] + dict_data = DynamicServiceGet.model_json_schema()["examples"][1] assert "service_state" in dict_data dict_data["service_state"] = service_state return TypeAdapter(DynamicServiceGet).validate_python(dict_data) @@ -216,7 +218,7 @@ def _get_dynamic_service_get_from( def _get_node_get_idle() -> NodeGetIdle: return TypeAdapter(NodeGetIdle).validate_python( - NodeGetIdle.model_config["json_schema_extra"]["example"] + NodeGetIdle.model_json_schema()["examples"][0] ) diff --git a/services/dynamic-scheduler/tests/unit/status_monitor/test_services_status_monitor__monitor.py b/services/dynamic-scheduler/tests/unit/status_monitor/test_services_status_monitor__monitor.py index 4b59a9683ab9..067a900d27bd 100644 --- a/services/dynamic-scheduler/tests/unit/status_monitor/test_services_status_monitor__monitor.py +++ b/services/dynamic-scheduler/tests/unit/status_monitor/test_services_status_monitor__monitor.py @@ -94,9 +94,7 @@ def _get_node_get_with(state: str, node_id: NodeID = _DEFAULT_NODE_ID) -> NodeGe def _get_dynamic_service_get_legacy_with( state: str, node_id: NodeID = _DEFAULT_NODE_ID ) -> DynamicServiceGet: - dict_data = deepcopy( - DynamicServiceGet.model_config["json_schema_extra"]["examples"][0] - ) + dict_data = deepcopy(DynamicServiceGet.model_json_schema()["examples"][0]) _add_to_dict( dict_data, [ @@ -110,9 +108,7 @@ def _get_dynamic_service_get_legacy_with( def _get_dynamic_service_get_new_style_with( state: str, node_id: NodeID = _DEFAULT_NODE_ID ) -> DynamicServiceGet: - dict_data = deepcopy( - DynamicServiceGet.model_config["json_schema_extra"]["examples"][1] - ) + dict_data = deepcopy(DynamicServiceGet.model_json_schema()["examples"][1]) _add_to_dict( dict_data, [ @@ -124,7 +120,7 @@ def _get_dynamic_service_get_new_style_with( def _get_node_get_idle(node_id: NodeID = _DEFAULT_NODE_ID) -> NodeGetIdle: - dict_data = NodeGetIdle.model_config["json_schema_extra"]["example"] + dict_data = NodeGetIdle.model_json_schema()["examples"][0] _add_to_dict( dict_data, [ @@ -190,7 +186,6 @@ async def _assert_result( retry=retry_if_exception_type(AssertionError), ): with attempt: - assert deferred_status_spies["on_result"].call_count == len(timeline) assert [ x.args[0] for x in deferred_status_spies["on_result"].call_args_list diff --git a/services/dynamic-scheduler/tests/unit/test_core_settings.py b/services/dynamic-scheduler/tests/unit/test_core_settings.py new file mode 100644 index 000000000000..d874f5a15b90 --- /dev/null +++ b/services/dynamic-scheduler/tests/unit/test_core_settings.py @@ -0,0 +1,17 @@ +# pylint: disable=unused-variable +# pylint: disable=unused-argument +# pylint: disable=redefined-outer-name + + +from pytest_simcore.helpers.monkeypatch_envs import ( + EnvVarsDict, +) +from simcore_service_dynamic_scheduler.core.settings import ApplicationSettings + + +def test_valid_application_settings(app_environment: EnvVarsDict): + assert app_environment + + settings = ApplicationSettings() # type: ignore + assert settings + assert settings == ApplicationSettings.create_from_envs() diff --git a/services/dynamic-scheduler/tests/unit/test_repository_postgres_networks.py b/services/dynamic-scheduler/tests/unit/test_repository_postgres_networks.py index e0374fb31dc4..9ed34d603d42 100644 --- a/services/dynamic-scheduler/tests/unit/test_repository_postgres_networks.py +++ b/services/dynamic-scheduler/tests/unit/test_repository_postgres_networks.py @@ -17,9 +17,11 @@ PostgresTestConfig, insert_and_get_row_lifespan, ) +from pytest_simcore.helpers.postgres_users import ( + insert_and_get_user_and_secrets_lifespan, +) from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_postgres_database.models.projects import projects -from simcore_postgres_database.models.users import users from simcore_service_dynamic_scheduler.repository.events import ( get_project_networks_repo, ) @@ -77,17 +79,14 @@ async def user_in_db( user_id: UserID, ) -> AsyncIterator[dict[str, Any]]: """ - injects a user in db + injects a user + secrets in db """ assert user_id == user["id"] - async with insert_and_get_row_lifespan( + async with insert_and_get_user_and_secrets_lifespan( engine, - table=users, - values=user, - pk_col=users.c.id, - pk_value=user["id"], - ) as row: - yield row + **user, + ) as user_row: + yield user_row @pytest.fixture diff --git a/services/dynamic-sidecar/Dockerfile b/services/dynamic-sidecar/Dockerfile index 59522ae03bc5..87f5e07fe083 100644 --- a/services/dynamic-sidecar/Dockerfile +++ b/services/dynamic-sidecar/Dockerfile @@ -2,7 +2,7 @@ # Define arguments in the global scope ARG PYTHON_VERSION="3.11.9" -ARG UV_VERSION="0.6" +ARG UV_VERSION="0.7" FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build FROM python:${PYTHON_VERSION}-slim-bookworm AS base-arm64 @@ -48,6 +48,7 @@ RUN \ && apt-get install -y --no-install-recommends \ docker-ce-cli=${DOCKER_APT_VERSION} \ docker-compose-plugin=${DOCKER_COMPOSE_APT_VERSION} \ + fd-find \ gosu \ ca-certificates \ # required by python-magic @@ -143,13 +144,6 @@ RUN uv venv "${VIRTUAL_ENV}" \ && mkdir -p "${DYNAMIC_SIDECAR_DY_VOLUMES_MOUNT_DIR}" -RUN --mount=type=cache,target=/root/.cache/uv \ - echo ${UV_CONCURRENT_INSTALLS} && \ - uv pip install --upgrade \ - wheel \ - setuptools - - WORKDIR /build @@ -166,6 +160,9 @@ COPY --chown=root:root services/dynamic-sidecar/scripts/Makefile /root FROM build AS prod-only-deps ENV SC_BUILD_TARGET=prod-only-deps +# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode +ENV UV_COMPILE_BYTECODE=1 \ + UV_LINK_MODE=copy WORKDIR /build/services/dynamic-sidecar @@ -190,8 +187,6 @@ ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production ENV PYTHONOPTIMIZE=TRUE -# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode -ENV UV_COMPILE_BYTECODE=1 WORKDIR /home/scu diff --git a/services/dynamic-sidecar/VERSION b/services/dynamic-sidecar/VERSION index 26aaba0e8663..6085e946503a 100644 --- a/services/dynamic-sidecar/VERSION +++ b/services/dynamic-sidecar/VERSION @@ -1 +1 @@ -1.2.0 +1.2.1 diff --git a/services/dynamic-sidecar/docker/boot.sh b/services/dynamic-sidecar/docker/boot.sh index 152fc8a04c04..984ef554a3b8 100755 --- a/services/dynamic-sidecar/docker/boot.sh +++ b/services/dynamic-sidecar/docker/boot.sh @@ -25,7 +25,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then # NOTE: uv does not like this requirement file... cd /devel/services/dynamic-sidecar - uv pip --quiet sync requirements/dev.txt + uv pip --quiet sync --link-mode=copy requirements/dev.txt cd - echo "$INFO" "PIP :" pip list | sed 's/^/ /' @@ -34,7 +34,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy if command -v uv >/dev/null 2>&1; then - uv pip install debugpy + uv pip install --link-mode=copy debugpy else pip install debugpy fi @@ -49,19 +49,22 @@ SERVER_LOG_LEVEL=$(echo "${APP_LOG_LEVEL}" | tr '[:upper:]' '[:lower:]') echo "$INFO" "Log-level app/server: $APP_LOG_LEVEL/$SERVER_LOG_LEVEL" if [ "${SC_BOOT_MODE}" = "debug" ]; then - reload_dir_packages=$(find /devel/packages -maxdepth 3 -type d -path "*/src/*" ! -path "*.*" -exec echo '--reload-dir {} \' \;) + reload_dir_packages=$(fdfind src /devel/packages --exec echo '--reload-dir {} ' | tr '\n' ' ') exec sh -c " cd services/dynamic-sidecar/src/simcore_service_dynamic_sidecar && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${DYNAMIC_SIDECAR_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${DYNAMIC_SIDECAR_REMOTE_DEBUGGING_PORT} -m \ + uvicorn \ + --factory main:app_factory \ --host 0.0.0.0 \ --reload \ - $reload_dir_packages + $reload_dir_packages \ --reload-dir . \ --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_dynamic_sidecar.main:the_app \ + exec uvicorn \ + --factory simcore_service_dynamic_sidecar.main:app_factory \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/dynamic-sidecar/docker/entrypoint.sh b/services/dynamic-sidecar/docker/entrypoint.sh index 73815d447dd5..8f5a3799d786 100755 --- a/services/dynamic-sidecar/docker/entrypoint.sh +++ b/services/dynamic-sidecar/docker/entrypoint.sh @@ -27,6 +27,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" # # DEVELOPMENT MODE @@ -67,10 +68,9 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME" echo "$INFO" "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \; - # change user property of files already around + fdfind --owner ":$SC_USER_ID" --exclude proc --exec-batch chgrp --no-dereference "$CONT_GROUPNAME" . '/' echo "$INFO" "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \; + fdfind --owner "$SC_USER_ID:" --exclude proc --exec-batch chown --no-dereference "$SC_USER_NAME" . '/' fi fi diff --git a/services/dynamic-sidecar/openapi.json b/services/dynamic-sidecar/openapi.json index 442005a03c19..782c15700e96 100644 --- a/services/dynamic-sidecar/openapi.json +++ b/services/dynamic-sidecar/openapi.json @@ -3,7 +3,7 @@ "info": { "title": "simcore-service-dynamic-sidecar", "description": "Implements a sidecar service to manage user's dynamic/interactive services", - "version": "1.2.0" + "version": "1.2.1" }, "servers": [ { @@ -72,9 +72,9 @@ "tags": [ "containers" ], - "summary": "Store Compose Spec", + "summary": "Create Compose Spec", "description": "Validates and stores the docker compose spec for the user services.", - "operationId": "store_compose_spec_v1_containers_compose_spec_post", + "operationId": "create_compose_spec_v1_containers_compose_spec_post", "requestBody": { "content": { "application/json": { @@ -139,6 +139,7 @@ "application/json": { "schema": { "type": "object", + "additionalProperties": true, "title": "Response Containers Docker Inspect V1 Containers Get" } } @@ -163,8 +164,9 @@ "tags": [ "containers" ], - "summary": "Starts the containers as defined in ContainerCreate by:\n- cleaning up resources from previous runs if any\n- starting the containers\n\nProgress may be obtained through URL\nProcess may be cancelled through URL", - "operationId": "create_service_containers_task_v1_containers_post", + "summary": "Create Containers", + "description": "Starts the containers as defined in ContainerCreate by:\n- cleaning up resources from previous runs if any\n- starting the containers\n\nProgress may be obtained through URL\nProcess may be cancelled through URL", + "operationId": "create_containers_v1_containers_post", "requestBody": { "required": true, "content": { @@ -182,7 +184,7 @@ "application/json": { "schema": { "type": "string", - "title": "Response Create Service Containers Task V1 Containers Post" + "title": "Response Create Containers V1 Containers Post" } } } @@ -206,6 +208,7 @@ "containers" ], "summary": "Get Containers Activity", + "description": "If user service declared an inactivity hook, this endpoint provides\ninformation about how much time has passed since the service became inactive.", "operationId": "get_containers_activity_v1_containers_activity_get", "responses": { "200": { @@ -261,7 +264,8 @@ "type": "string" }, { - "type": "object" + "type": "object", + "additionalProperties": true } ], "title": "Response Get Containers Name V1 Containers Name Get" @@ -304,6 +308,7 @@ "application/json": { "schema": { "type": "object", + "additionalProperties": true, "title": "Response Inspect Container V1 Containers Id Get" } } @@ -333,7 +338,8 @@ "tags": [ "containers" ], - "summary": "Enable/disable ports i/o", + "summary": "Toggle Ports Io", + "description": "Enable/disable ports i/o", "operationId": "toggle_ports_io_v1_containers_ports_io_patch", "requestBody": { "content": { @@ -367,7 +373,8 @@ "tags": [ "containers" ], - "summary": "Creates the output directories declared by the docker images's labels. It is more convenient to pass the labels from director-v2, since it already has all the machinery to call into director-v0 to retrieve them.", + "summary": "Create Output Dirs", + "description": "Creates the output directories declared by the docker images's labels.\nIt is more convenient to pass the labels from director-v2,\nsince it already has all the machinery to call into director-v0\nto retrieve them.", "operationId": "create_output_dirs_v1_containers_ports_outputs_dirs_post", "requestBody": { "content": { @@ -401,7 +408,8 @@ "tags": [ "containers" ], - "summary": "attach container to a network, if not already attached", + "summary": "Attach Container To Network", + "description": "attach container to a network, if not already attached", "operationId": "attach_container_to_network_v1_containers__id__networks_attach_post", "parameters": [ { @@ -446,7 +454,8 @@ "tags": [ "containers" ], - "summary": "detach container from a network, if not already detached", + "summary": "Detach Container From Network", + "description": "detach container from a network, if not already detached", "operationId": "detach_container_from_network_v1_containers__id__networks_detach_post", "parameters": [ { @@ -491,8 +500,9 @@ "tags": [ "containers" ], - "summary": "Pulls all the docker container images for the user services", - "operationId": "pull_user_servcices_docker_images_v1_containers_images_pull_post", + "summary": "Pull Container Images", + "description": "Pulls all the docker container images for the user services", + "operationId": "pull_container_images_v1_containers_images_pull_post", "responses": { "202": { "description": "Successful Response", @@ -500,7 +510,7 @@ "application/json": { "schema": { "type": "string", - "title": "Response Pull User Servcices Docker Images V1 Containers Images Pull Post" + "title": "Response Pull Container Images V1 Containers Images Pull Post" } } } @@ -513,8 +523,9 @@ "tags": [ "containers" ], - "summary": "Remove the previously started containers", - "operationId": "runs_docker_compose_down_task_v1_containers_down_post", + "summary": "Down Containers", + "description": "Remove the previously started containers", + "operationId": "down_containers_v1_containers_down_post", "responses": { "202": { "description": "Successful Response", @@ -522,7 +533,7 @@ "application/json": { "schema": { "type": "string", - "title": "Response Runs Docker Compose Down Task V1 Containers Down Post" + "title": "Response Down Containers V1 Containers Down Post" } } } @@ -535,8 +546,9 @@ "tags": [ "containers" ], - "summary": "Restores the state of the dynamic service", - "operationId": "state_restore_task_v1_containers_state_restore_post", + "summary": "Restore Containers State Paths", + "description": "Restores the state of the dynamic service", + "operationId": "restore_containers_state_paths_v1_containers_state_restore_post", "responses": { "202": { "description": "Successful Response", @@ -544,7 +556,7 @@ "application/json": { "schema": { "type": "string", - "title": "Response State Restore Task V1 Containers State Restore Post" + "title": "Response Restore Containers State Paths V1 Containers State Restore Post" } } } @@ -557,8 +569,9 @@ "tags": [ "containers" ], - "summary": "Stores the state of the dynamic service", - "operationId": "state_save_task_v1_containers_state_save_post", + "summary": "Save Containers State Paths", + "description": "Stores the state of the dynamic service", + "operationId": "save_containers_state_paths_v1_containers_state_save_post", "responses": { "202": { "description": "Successful Response", @@ -566,7 +579,7 @@ "application/json": { "schema": { "type": "string", - "title": "Response State Save Task V1 Containers State Save Post" + "title": "Response Save Containers State Paths V1 Containers State Save Post" } } } @@ -579,8 +592,9 @@ "tags": [ "containers" ], - "summary": "Pull input ports data", - "operationId": "ports_inputs_pull_task_v1_containers_ports_inputs_pull_post", + "summary": "Pull Container Port Inputs", + "description": "Pull input ports data", + "operationId": "pull_container_port_inputs_v1_containers_ports_inputs_pull_post", "requestBody": { "content": { "application/json": { @@ -608,7 +622,7 @@ "application/json": { "schema": { "type": "string", - "title": "Response Ports Inputs Pull Task V1 Containers Ports Inputs Pull Post" + "title": "Response Pull Container Port Inputs V1 Containers Ports Inputs Pull Post" } } } @@ -631,8 +645,9 @@ "tags": [ "containers" ], - "summary": "Pull output ports data", - "operationId": "ports_outputs_pull_task_v1_containers_ports_outputs_pull_post", + "summary": "Pull Container Port Outputs", + "description": "Pull output ports data", + "operationId": "pull_container_port_outputs_v1_containers_ports_outputs_pull_post", "requestBody": { "content": { "application/json": { @@ -660,7 +675,7 @@ "application/json": { "schema": { "type": "string", - "title": "Response Ports Outputs Pull Task V1 Containers Ports Outputs Pull Post" + "title": "Response Pull Container Port Outputs V1 Containers Ports Outputs Pull Post" } } } @@ -683,8 +698,9 @@ "tags": [ "containers" ], - "summary": "Push output ports data", - "operationId": "ports_outputs_push_task_v1_containers_ports_outputs_push_post", + "summary": "Push Container Port Outputs", + "description": "Push output ports data", + "operationId": "push_container_port_outputs_v1_containers_ports_outputs_push_post", "responses": { "202": { "description": "Successful Response", @@ -692,7 +708,7 @@ "application/json": { "schema": { "type": "string", - "title": "Response Ports Outputs Push Task V1 Containers Ports Outputs Push Post" + "title": "Response Push Container Port Outputs V1 Containers Ports Outputs Push Post" } } } @@ -705,8 +721,9 @@ "tags": [ "containers" ], - "summary": "Restarts previously started containers", - "operationId": "containers_restart_task_v1_containers_restart_post", + "summary": "Restart Containers", + "description": "Restarts previously started user services", + "operationId": "restart_containers_v1_containers_restart_post", "responses": { "202": { "description": "Successful Response", @@ -714,7 +731,7 @@ "application/json": { "schema": { "type": "string", - "title": "Response Containers Restart Task V1 Containers Restart Post" + "title": "Response Restart Containers V1 Containers Restart Post" } } } @@ -727,7 +744,8 @@ "tags": [ "volumes" ], - "summary": "Updates the state of the volume", + "summary": "Put Volume State", + "description": "Updates the state of the volume", "operationId": "put_volume_state_v1_volumes__id__put", "parameters": [ { @@ -771,7 +789,8 @@ "tags": [ "disk" ], - "summary": "Frees up reserved disk space", + "summary": "Free Reserved Disk Space", + "description": "Frees up reserved disk space", "operationId": "free_reserved_disk_space_v1_disk_reserved_free_post", "responses": { "204": { @@ -986,6 +1005,7 @@ "title": "Service Resources" }, "service_additional_metadata": { + "additionalProperties": true, "type": "object", "title": "Service Additional Metadata" } @@ -1230,6 +1250,7 @@ "contentSchema": { "anyOf": [ { + "additionalProperties": true, "type": "object" }, { diff --git a/services/dynamic-sidecar/requirements/_base.txt b/services/dynamic-sidecar/requirements/_base.txt index ca212ee21be6..74f35a571b89 100644 --- a/services/dynamic-sidecar/requirements/_base.txt +++ b/services/dynamic-sidecar/requirements/_base.txt @@ -25,7 +25,7 @@ aiofiles==24.1.0 # -r requirements/_base.in aiohappyeyeballs==2.5.0 # via aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -84,6 +84,8 @@ arrow==1.3.0 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi asyncpg==0.30.0 @@ -128,19 +130,14 @@ certifi==2025.1.31 # httpcore # httpx # requests + # sentry-sdk charset-normalizer==3.4.1 # via requests -click==8.1.8 +click==8.2.1 # via # rich-toolkit # typer # uvicorn -deprecated==1.2.18 - # via - # opentelemetry-api - # opentelemetry-exporter-otlp-proto-grpc - # opentelemetry-exporter-otlp-proto-http - # opentelemetry-semantic-conventions dnspython==2.7.0 # via email-validator email-validator==2.2.0 @@ -151,12 +148,14 @@ exceptiongroup==1.2.2 # via aio-pika fast-depends==2.4.12 # via faststream -fastapi==0.115.12 +fastapi==0.116.1 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi-lifespan-manager -fastapi-cli==0.0.7 +fastapi-cli==0.0.8 # via fastapi +fastapi-cloud-cli==0.1.5 + # via fastapi-cli fastapi-lifespan-manager==0.1.4 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in faststream==0.5.35 @@ -171,7 +170,7 @@ frozenlist==1.5.0 # via # aiohttp # aiosignal -googleapis-common-protos==1.69.1 +googleapis-common-protos==1.70.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http @@ -179,7 +178,7 @@ greenlet==3.1.1 # via sqlalchemy grpcio==1.70.0 # via opentelemetry-exporter-otlp-proto-grpc -h11==0.14.0 +h11==0.16.0 # via # httpcore # uvicorn @@ -188,7 +187,7 @@ h2==4.2.0 # via httpx hpack==4.1.0 # via h2 -httpcore==1.0.7 +httpcore==1.0.9 # via httpx httptools==0.6.4 # via uvicorn @@ -223,7 +222,9 @@ httpx==0.28.1 # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in + # -r requirements/../../../packages/simcore-sdk/requirements/_base.in # fastapi + # fastapi-cloud-cli hyperframe==6.1.0 # via h2 idna==3.10 @@ -266,6 +267,12 @@ jinja2==3.1.6 # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # fastapi +jsonref==1.1.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema==4.23.0 # via # -r requirements/../../../packages/models-library/requirements/_base.in @@ -274,7 +281,7 @@ jsonschema==4.23.0 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema-specifications==2024.10.1 # via jsonschema -mako==1.3.9 +mako==1.3.10 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -317,7 +324,7 @@ multidict==6.1.0 # via # aiohttp # yarl -opentelemetry-api==1.30.0 +opentelemetry-api==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in @@ -334,19 +341,19 @@ opentelemetry-api==1.30.0 # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.30.0 +opentelemetry-exporter-otlp==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.30.0 +opentelemetry-exporter-otlp-proto-common==1.34.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.30.0 +opentelemetry-exporter-otlp-proto-grpc==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.30.0 +opentelemetry-exporter-otlp-proto-http==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.51b0 +opentelemetry-instrumentation==0.55b1 # via # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-asgi @@ -356,44 +363,44 @@ opentelemetry-instrumentation==0.51b0 # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aio-pika==0.51b0 +opentelemetry-instrumentation-aio-pika==0.55b1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-asgi==0.51b0 +opentelemetry-instrumentation-asgi==0.55b1 # via opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-asyncpg==0.51b0 +opentelemetry-instrumentation-asyncpg==0.55b1 # via - # -r requirements/../../../packages/postgres-database/requirements/_base.in - # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in -opentelemetry-instrumentation-fastapi==0.51b0 + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-fastapi==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-httpx==0.51b0 +opentelemetry-instrumentation-httpx==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-logging==0.51b0 +opentelemetry-instrumentation-logging==0.55b1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.51b0 +opentelemetry-instrumentation-redis==0.55b1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.51b0 +opentelemetry-instrumentation-requests==0.55b1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.30.0 +opentelemetry-proto==1.34.1 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.30.0 +opentelemetry-sdk==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.51b0 +opentelemetry-semantic-conventions==0.55b1 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi @@ -403,7 +410,7 @@ opentelemetry-semantic-conventions==0.51b0 # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.51b0 +opentelemetry-util-http==0.55b1 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi @@ -473,7 +480,7 @@ propcache==0.3.0 # via # aiohttp # yarl -protobuf==5.29.3 +protobuf==5.29.5 # via # googleapis-common-protos # opentelemetry-proto @@ -486,7 +493,7 @@ psycopg2-binary==2.9.10 # via sqlalchemy pycryptodome==3.21.0 # via stream-zip -pydantic==2.10.6 +pydantic==2.11.7 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -546,11 +553,12 @@ pydantic==2.10.6 # -r requirements/_base.in # fast-depends # fastapi + # fastapi-cloud-cli # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.2 +pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.10.2 +pydantic-extra-types==2.10.5 # via # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -626,7 +634,7 @@ python-magic==0.4.27 # via -r requirements/_base.in python-multipart==0.0.20 # via fastapi -python-socketio==5.12.1 +python-socketio==5.13.0 # via -r requirements/_base.in pyyaml==6.0.2 # via @@ -726,9 +734,9 @@ referencing==0.35.1 # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications -requests==2.32.3 +requests==2.32.4 # via opentelemetry-exporter-otlp-proto-http -rich==13.9.4 +rich==14.1.0 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in @@ -736,12 +744,18 @@ rich==13.9.4 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in # rich-toolkit # typer -rich-toolkit==0.14.7 - # via fastapi-cli +rich-toolkit==0.15.0 + # via + # fastapi-cli + # fastapi-cloud-cli +rignore==0.6.4 + # via fastapi-cloud-cli rpds-py==0.23.1 # via # jsonschema # referencing +sentry-sdk==2.35.0 + # via fastapi-cloud-cli shellingham==1.5.4 # via typer simple-websocket==1.1.0 @@ -749,7 +763,9 @@ simple-websocket==1.1.0 six==1.17.0 # via python-dateutil sniffio==1.3.1 - # via anyio + # via + # anyio + # asgi-lifespan sqlalchemy==1.4.54 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -784,7 +800,7 @@ sqlalchemy==1.4.54 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/_base.in # alembic -starlette==0.46.0 +starlette==0.47.2 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -833,16 +849,17 @@ tqdm==4.67.1 # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/_base.in -typer==0.15.2 +typer==0.16.1 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in # fastapi-cli + # fastapi-cloud-cli types-python-dateutil==2.9.0.20241206 # via arrow -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # aiodebug # alembic @@ -851,16 +868,24 @@ typing-extensions==4.12.2 # faststream # flexcache # flexparser + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http # opentelemetry-sdk + # opentelemetry-semantic-conventions # pint # pydantic # pydantic-core # pydantic-extra-types # rich-toolkit + # starlette # typer + # typing-inspection +typing-inspection==0.4.1 + # via pydantic u-msgpack-python==2.8.0 # via -r requirements/_base.in -urllib3==2.3.0 +urllib3==2.5.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -891,10 +916,12 @@ urllib3==2.3.0 # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # requests + # sentry-sdk uvicorn==0.34.2 # via # fastapi # fastapi-cli + # fastapi-cloud-cli uvloop==0.21.0 # via uvicorn watchdog==6.0.0 @@ -905,7 +932,6 @@ websockets==15.0.1 # via uvicorn wrapt==1.17.2 # via - # deprecated # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-httpx diff --git a/services/dynamic-sidecar/requirements/_test.in b/services/dynamic-sidecar/requirements/_test.in index 35f081991de1..7203b0a9320c 100644 --- a/services/dynamic-sidecar/requirements/_test.in +++ b/services/dynamic-sidecar/requirements/_test.in @@ -8,6 +8,7 @@ asgi_lifespan async-asgi-testclient # replacement for fastapi.testclient.TestClient [see b) below] docker faker +fakeredis[lua] flaky pytest pytest-asyncio diff --git a/services/dynamic-sidecar/requirements/_test.txt b/services/dynamic-sidecar/requirements/_test.txt index 7cf2d4f763c2..06bb5e5b8eb7 100644 --- a/services/dynamic-sidecar/requirements/_test.txt +++ b/services/dynamic-sidecar/requirements/_test.txt @@ -10,7 +10,7 @@ aiohappyeyeballs==2.5.0 # via # -c requirements/_base.txt # aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -22,7 +22,9 @@ aiosignal==1.3.2 # -c requirements/_base.txt # aiohttp asgi-lifespan==2.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in async-asgi-testclient==1.4.11 # via -r requirements/_test.in attrs==25.1.0 @@ -51,6 +53,8 @@ docker==7.1.0 # via -r requirements/_test.in faker==36.2.2 # via -r requirements/_test.in +fakeredis==2.30.3 + # via -r requirements/_test.in flaky==3.8.1 # via -r requirements/_test.in frozenlist==1.5.0 @@ -74,6 +78,8 @@ jmespath==1.0.1 # aiobotocore # boto3 # botocore +lupa==2.5 + # via fakeredis multidict==6.1.0 # via # -c requirements/_base.txt @@ -81,32 +87,40 @@ multidict==6.1.0 # aiohttp # async-asgi-testclient # yarl -mypy==1.15.0 +mypy==1.16.1 # via sqlalchemy -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via mypy packaging==24.2 # via # -c requirements/_base.txt # pytest +pathspec==0.12.1 + # via mypy pluggy==1.5.0 - # via pytest + # via + # pytest + # pytest-cov propcache==0.3.0 # via # -c requirements/_base.txt # aiohttp # yarl -pytest==8.3.5 +pygments==2.19.1 + # via + # -c requirements/_base.txt + # pytest +pytest==8.4.1 # via # -r requirements/_test.in # pytest-asyncio # pytest-cov # pytest-mock -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via -r requirements/_test.in -pytest-cov==6.0.0 +pytest-cov==6.2.1 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements/_test.in python-dateutil==2.9.0.post0 # via @@ -117,7 +131,12 @@ python-dotenv==1.0.1 # via # -c requirements/_base.txt # -r requirements/_test.in -requests==2.32.3 +redis==5.2.1 + # via + # -c requirements/../../../requirements/constraints.txt + # -c requirements/_base.txt + # fakeredis +requests==2.32.4 # via # -c requirements/_base.txt # async-asgi-testclient @@ -132,6 +151,8 @@ sniffio==1.3.1 # via # -c requirements/_base.txt # asgi-lifespan +sortedcontainers==2.4.0 + # via fakeredis sqlalchemy==1.4.54 # via # -c requirements/../../../requirements/constraints.txt @@ -147,7 +168,7 @@ types-psutil==7.0.0.20250218 # via -r requirements/_test.in types-pyyaml==6.0.12.20241230 # via -r requirements/_test.in -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # mypy @@ -155,7 +176,7 @@ typing-extensions==4.12.2 # types-aiobotocore-s3 tzdata==2025.1 # via faker -urllib3==2.3.0 +urllib3==2.5.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt diff --git a/services/dynamic-sidecar/requirements/_tools.txt b/services/dynamic-sidecar/requirements/_tools.txt index 404d7858ecaa..1f09fdfb0ae7 100644 --- a/services/dynamic-sidecar/requirements/_tools.txt +++ b/services/dynamic-sidecar/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # black @@ -27,11 +27,11 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via # -c requirements/_test.txt # black @@ -45,7 +45,10 @@ packaging==24.2 # black # build pathspec==0.12.1 - # via black + # via + # -c requirements/_test.txt + # black + # mypy pip==25.0.1 # via pip-tools pip-tools==7.4.1 @@ -71,11 +74,11 @@ pyyaml==6.0.2 # pre-commit ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.2 +setuptools==80.9.0 # via pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # -c requirements/_test.txt diff --git a/services/dynamic-sidecar/setup.cfg b/services/dynamic-sidecar/setup.cfg index e02fb54d4629..92696537f9a3 100644 --- a/services/dynamic-sidecar/setup.cfg +++ b/services/dynamic-sidecar/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.2.0 +current_version = 1.2.1 commit = True message = services/dynamic-sidecar version: {current_version} → {new_version} tag = False @@ -10,9 +10,9 @@ commit_args = --no-verify [tool:pytest] asyncio_mode = auto asyncio_default_fixture_loop_scope = function -markers = +markers = testit: "marks test to run during development" [mypy] -plugins = +plugins = pydantic.mypy diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/_meta.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/_meta.py index 2c9fb9d5c508..e825970ad840 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/_meta.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/_meta.py @@ -1,12 +1,12 @@ -""" Package Metadata - -""" +"""Package Metadata""" from importlib.metadata import distribution, version _current_distribution = distribution("simcore-service-dynamic-sidecar") + PROJECT_NAME: str = _current_distribution.metadata["Name"] +APP_NAME: str = _current_distribution.metadata["Name"] API_VERSION: str = version("simcore-service-dynamic-sidecar") MAJOR, MINOR, PATCH = API_VERSION.split(".") diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/_dependencies.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/_dependencies.py index a3b3f8081736..23302bdbc89e 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/_dependencies.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/_dependencies.py @@ -1,7 +1,5 @@ -""" Free functions to inject dependencies in routes handlers -""" +"""Free functions to inject dependencies in routes handlers""" -from asyncio import Lock from typing import Annotated, cast from fastapi import Depends, FastAPI, Request @@ -10,12 +8,7 @@ from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient from ...core import rabbitmq -from ...core.settings import ApplicationSettings from ...models.schemas.application_health import ApplicationHealth -from ...models.shared_store import SharedStore -from ...modules.inputs import InputsState -from ...modules.mounted_fs import MountedVolumes -from ...modules.outputs import OutputsContext, OutputsManager from ...modules.prometheus_metrics import UserServicesMetrics @@ -23,71 +16,29 @@ def get_application(request: Request) -> FastAPI: return cast(FastAPI, request.app) -def get_app_state(request: Request) -> State: +def _get_app_state(request: Request) -> State: return cast(State, request.app.state) def get_application_health( - app_state: Annotated[State, Depends(get_app_state)] + app_state: Annotated[State, Depends(_get_app_state)], ) -> ApplicationHealth: return cast(ApplicationHealth, app_state.application_health) -def get_settings( - app_state: Annotated[State, Depends(get_app_state)] -) -> ApplicationSettings: - return cast(ApplicationSettings, app_state.settings) - - -def get_shared_store( - app_state: Annotated[State, Depends(get_app_state)] -) -> SharedStore: - return cast(SharedStore, app_state.shared_store) - - -def get_mounted_volumes( - app_state: Annotated[State, Depends(get_app_state)] -) -> MountedVolumes: - return cast(MountedVolumes, app_state.mounted_volumes) - - -def get_container_restart_lock( - app_state: Annotated[State, Depends(get_app_state)] -) -> Lock: - return cast(Lock, app_state.container_restart_lock) - - -def get_outputs_manager( - app_state: Annotated[State, Depends(get_app_state)] -) -> OutputsManager: - return cast(OutputsManager, app_state.outputs_manager) - - -def get_outputs_context( - app_state: Annotated[State, Depends(get_app_state)] -) -> OutputsContext: - return cast(OutputsContext, app_state.outputs_context) - - -def get_inputs_state( - app_state: Annotated[State, Depends(get_app_state)] -) -> InputsState: - return cast(InputsState, app_state.inputs_state) - - def get_user_services_metrics( - app_state: Annotated[State, Depends(get_app_state)] + app_state: Annotated[State, Depends(_get_app_state)], ) -> UserServicesMetrics: return cast(UserServicesMetrics, app_state.user_service_metrics) def get_rabbitmq_client( - app: Annotated[FastAPI, Depends(get_application)] + app: Annotated[FastAPI, Depends(get_application)], ) -> RabbitMQClient: return rabbitmq.get_rabbitmq_client(app) def get_rabbitmq_rpc_server( - app: Annotated[FastAPI, Depends(get_application)] + app: Annotated[FastAPI, Depends(get_application)], ) -> RabbitMQRPCClient: return rabbitmq.get_rabbitmq_rpc_server(app) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/_routing.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/_routing.py index bff0bf162443..16e8e67337a9 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/_routing.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/_routing.py @@ -1,4 +1,4 @@ -""" Module to collect, tag and prefix all routes under 'main_router' +"""Module to collect, tag and prefix all routes under 'main_router' Setup and register all routes here form different modules """ diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/containers.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/containers.py index 43dc75c75f54..8715fd6841bf 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/containers.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/containers.py @@ -1,56 +1,21 @@ -# pylint: disable=too-many-arguments +from typing import Any -import logging -from asyncio import Lock -from typing import Annotated, Any, Final - -from common_library.json_serialization import json_loads -from fastapi import APIRouter, Depends, HTTPException +from fastapi import APIRouter, HTTPException from fastapi import Path as PathParam from fastapi import Query, Request, status +from models_library.api_schemas_directorv2.dynamic_services import ContainersComposeSpec from models_library.api_schemas_dynamic_sidecar.containers import ( - ActivityInfo, ActivityInfoOrNone, ) -from pydantic import TypeAdapter, ValidationError from servicelib.fastapi.requests_decorators import cancel_on_disconnect -from ...core.docker_utils import docker_client -from ...core.errors import ( - ContainerExecCommandFailedError, - ContainerExecContainerNotFoundError, - ContainerExecTimeoutError, -) -from ...core.settings import ApplicationSettings -from ...core.validation import ( - ComposeSpecValidation, - get_and_validate_compose_spec, - parse_compose_spec, +from ...services import containers +from ...services.containers import ( + ContainerIsMissingError, + ContainerNotFoundError, + InvalidFilterFormatError, + MissingDockerComposeDownSpecError, ) -from ...models.schemas.containers import ContainersComposeSpec -from ...models.shared_store import SharedStore -from ...modules.container_utils import run_command_in_container -from ...modules.mounted_fs import MountedVolumes -from ._dependencies import ( - get_container_restart_lock, - get_mounted_volumes, - get_settings, - get_shared_store, -) - -_INACTIVE_FOR_LONG_TIME: Final[int] = 2**63 - 1 - -_logger = logging.getLogger(__name__) - - -def _raise_if_container_is_missing( - container_id: str, container_names: list[str] -) -> None: - if container_id not in container_names: - message = f"No container '{container_id}' was started. Started containers '{container_names}'" - _logger.warning(message) - raise HTTPException(status.HTTP_404_NOT_FOUND, detail=message) - router = APIRouter() @@ -63,35 +28,17 @@ def _raise_if_container_is_missing( }, ) @cancel_on_disconnect -async def store_compose_spec( - request: Request, - settings: Annotated[ApplicationSettings, Depends(get_settings)], - containers_compose_spec: ContainersComposeSpec, - shared_store: Annotated[SharedStore, Depends(get_shared_store)], - mounted_volumes: Annotated[MountedVolumes, Depends(get_mounted_volumes)], +async def create_compose_spec( + request: Request, containers_compose_spec: ContainersComposeSpec ): """ Validates and stores the docker compose spec for the user services. """ _ = request - async with shared_store: - compose_spec_validation: ComposeSpecValidation = ( - await get_and_validate_compose_spec( - settings=settings, - compose_file_content=containers_compose_spec.docker_compose_yaml, - mounted_volumes=mounted_volumes, - ) - ) - shared_store.compose_spec = compose_spec_validation.compose_spec - shared_store.container_names = compose_spec_validation.current_container_names - shared_store.original_to_container_names = ( - compose_spec_validation.original_to_current_container_names - ) - - _logger.info("Validated compose-spec:\n%s", f"{shared_store.compose_spec}") - - assert shared_store.compose_spec # nosec + await containers.create_compose_spec( + app=request.app, containers_compose_spec=containers_compose_spec + ) @router.get( @@ -103,8 +50,6 @@ async def store_compose_spec( @cancel_on_disconnect async def containers_docker_inspect( request: Request, - shared_store: Annotated[SharedStore, Depends(get_shared_store)], - container_restart_lock: Annotated[Lock, Depends(get_container_restart_lock)], only_status: bool = Query( # noqa: FBT001 default=False, description="if True only show the status of the container" ), @@ -114,91 +59,20 @@ async def containers_docker_inspect( the status of the containers is returned """ _ = request + return await containers.containers_docker_inspect( + app=request.app, only_status=only_status + ) - def _format_result(container_inspect: dict[str, Any]) -> dict[str, Any]: - if only_status: - container_state = container_inspect.get("State", {}) - # pending is another fake state use to share more information with the frontend - return { - "Status": container_state.get("Status", "pending"), - "Error": container_state.get("Error", ""), - } - - return container_inspect - - async with container_restart_lock, docker_client() as docker: - container_names = shared_store.container_names - - results = {} - - for container in container_names: - container_instance = await docker.containers.get(container) - container_inspect = await container_instance.show() - results[container] = _format_result(container_inspect) - - return results - - -@router.get( - "/containers/activity", -) +@router.get("/containers/activity") @cancel_on_disconnect -async def get_containers_activity( - request: Request, - settings: Annotated[ApplicationSettings, Depends(get_settings)], - shared_store: Annotated[SharedStore, Depends(get_shared_store)], -) -> ActivityInfoOrNone: +async def get_containers_activity(request: Request) -> ActivityInfoOrNone: + """ + If user service declared an inactivity hook, this endpoint provides + information about how much time has passed since the service became inactive. + """ _ = request - inactivity_command = settings.DY_SIDECAR_CALLBACKS_MAPPING.inactivity - if inactivity_command is None: - return None - - container_name = inactivity_command.service - - try: - inactivity_response = await run_command_in_container( - shared_store.original_to_container_names[inactivity_command.service], - command=inactivity_command.command, - timeout=inactivity_command.timeout, - ) - except ( - ContainerExecContainerNotFoundError, - ContainerExecCommandFailedError, - ContainerExecTimeoutError, - ): - _logger.warning( - "Could not run inactivity command '%s' in container '%s'", - inactivity_command.command, - container_name, - exc_info=True, - ) - return ActivityInfo(seconds_inactive=_INACTIVE_FOR_LONG_TIME) - - try: - return TypeAdapter(ActivityInfo).validate_json(inactivity_response) - except ValidationError: - _logger.warning( - "Could not parse command result '%s' as '%s'", - inactivity_response, - ActivityInfo.__name__, - exc_info=True, - ) - - return ActivityInfo(seconds_inactive=_INACTIVE_FOR_LONG_TIME) - - -# Some of the operations and sub-resources on containers are implemented as long-running tasks. -# Handlers for these operations can be found in: -# -# POST /containers : SEE containers_long_running_tasks::create_service_containers_task -# POST /containers:down : SEE containers_long_running_tasks::runs_docker_compose_down_task -# POST /containers/state:restore : SEE containers_long_running_tasks::state_restore_task -# POST /containers/state:save : SEE containers_long_running_tasks::state_save_task -# POST /containers/ports/inputs:pull : SEE containers_long_running_tasks::ports_inputs_pull_task -# POST /containers/ports/outputs:pull : SEE containers_long_running_tasks::ports_outputs_pull_task -# POST /containers/ports/outputs:push : SEE containers_long_running_tasks::ports_outputs_push_task -# + return await containers.get_containers_activity(app=request.app) @router.get( @@ -215,7 +89,6 @@ async def get_containers_activity( @cancel_on_disconnect async def get_containers_name( request: Request, - shared_store: Annotated[SharedStore, Depends(get_shared_store)], filters: str = Query( ..., description=( @@ -236,43 +109,12 @@ async def get_containers_name( """ _ = request - filters_dict: dict[str, str] = json_loads(filters) - if not isinstance(filters_dict, dict): - raise HTTPException( - status.HTTP_422_UNPROCESSABLE_ENTITY, - detail=f"Provided filters, could not parsed {filters_dict}", - ) - network_name: str | None = filters_dict.get("network", None) - exclude: str | None = filters_dict.get("exclude", None) - - stored_compose_content = shared_store.compose_spec - if stored_compose_content is None: - raise HTTPException( - status.HTTP_404_NOT_FOUND, - detail="No spec for docker compose down was found", - ) - - compose_spec = parse_compose_spec(stored_compose_content) - - container_name = None - - spec_services = compose_spec["services"] - for service in spec_services: - service_content = spec_services[service] - if network_name in service_content.get("networks", {}): - if exclude is not None and exclude in service_content["container_name"]: - # removing this container from results - continue - container_name = service_content["container_name"] - break - - if container_name is None: - raise HTTPException( - status.HTTP_404_NOT_FOUND, - detail=f"No container found for network={network_name}", - ) - - return f"{container_name}" + try: + return await containers.get_containers_name(app=request.app, filters=filters) + except InvalidFilterFormatError as e: + raise HTTPException(status.HTTP_422_UNPROCESSABLE_ENTITY, detail=f"{e}") from e + except (MissingDockerComposeDownSpecError, ContainerNotFoundError) as e: + raise HTTPException(status.HTTP_404_NOT_FOUND, detail=f"{e}") from e @router.get( @@ -284,16 +126,14 @@ async def get_containers_name( ) @cancel_on_disconnect async def inspect_container( - request: Request, - shared_store: Annotated[SharedStore, Depends(get_shared_store)], - container_id: str = PathParam(..., alias="id"), + request: Request, container_id: str = PathParam(..., alias="id") ) -> dict[str, Any]: """Returns information about the container, like docker inspect command""" _ = request - _raise_if_container_is_missing(container_id, shared_store.container_names) - - async with docker_client() as docker: - container_instance = await docker.containers.get(container_id) - inspect_result: dict[str, Any] = await container_instance.show() - return inspect_result + try: + return await containers.inspect_container( + app=request.app, container_id=container_id + ) + except ContainerIsMissingError as e: + raise HTTPException(status.HTTP_404_NOT_FOUND, detail=f"{e}") from e diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/containers_extension.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/containers_extension.py index d5cf21b87230..913216c9a290 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/containers_extension.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/containers_extension.py @@ -1,25 +1,13 @@ -import logging from typing import Annotated -from aiodocker.networks import DockerNetwork from fastapi import APIRouter, Depends, FastAPI from fastapi import Path as PathParam -from fastapi import Request, Response, status +from fastapi import Response, status from models_library.services import ServiceOutput from pydantic.main import BaseModel -from simcore_sdk.node_ports_v2.port_utils import is_file_type - -from ...core.docker_utils import docker_client -from ...modules.inputs import disable_inputs_pulling, enable_inputs_pulling -from ...modules.mounted_fs import MountedVolumes -from ...modules.outputs import ( - OutputsContext, - disable_event_propagation, - enable_event_propagation, -) -from ._dependencies import get_application, get_mounted_volumes, get_outputs_context -_logger = logging.getLogger(__name__) +from ...services import container_extensions +from ._dependencies import get_application class CreateDirsRequestItem(BaseModel): @@ -51,7 +39,6 @@ class DetachContainerFromNetworkItem(_BaseNetworkItem): @router.patch( "/containers/ports/io", - summary="Enable/disable ports i/o", response_class=Response, status_code=status.HTTP_204_NO_CONTENT, ) @@ -59,96 +46,53 @@ async def toggle_ports_io( patch_ports_io_item: PatchPortsIOItem, app: Annotated[FastAPI, Depends(get_application)], ) -> None: - if patch_ports_io_item.enable_outputs: - await enable_event_propagation(app) - else: - await disable_event_propagation(app) - - if patch_ports_io_item.enable_inputs: - enable_inputs_pulling(app) - else: - disable_inputs_pulling(app) + """Enable/disable ports i/o""" + await container_extensions.toggle_ports_io( + app, + enable_outputs=patch_ports_io_item.enable_outputs, + enable_inputs=patch_ports_io_item.enable_inputs, + ) @router.post( "/containers/ports/outputs/dirs", - summary=( - "Creates the output directories declared by the docker images's labels. " - "It is more convenient to pass the labels from director-v2, " - "since it already has all the machinery to call into director-v0 " - "to retrieve them." - ), response_class=Response, status_code=status.HTTP_204_NO_CONTENT, ) async def create_output_dirs( request_mode: CreateDirsRequestItem, - mounted_volumes: Annotated[MountedVolumes, Depends(get_mounted_volumes)], - outputs_context: Annotated[OutputsContext, Depends(get_outputs_context)], + app: Annotated[FastAPI, Depends(get_application)], ) -> None: - outputs_path = mounted_volumes.disk_outputs_path - file_type_port_keys = [] - non_file_port_keys = [] - for port_key, service_output in request_mode.outputs_labels.items(): - _logger.debug("Parsing output labels, detected: %s", f"{port_key=}") - if is_file_type(service_output.property_type): - dir_to_create = outputs_path / port_key - dir_to_create.mkdir(parents=True, exist_ok=True) - file_type_port_keys.append(port_key) - else: - non_file_port_keys.append(port_key) - - _logger.debug( - "Setting: %s, %s", f"{file_type_port_keys=}", f"{non_file_port_keys=}" + """ + Creates the output directories declared by the docker images's labels. + It is more convenient to pass the labels from director-v2, + since it already has all the machinery to call into director-v0 + to retrieve them. + """ + await container_extensions.create_output_dirs( + app, outputs_labels=request_mode.outputs_labels ) - await outputs_context.set_file_type_port_keys(file_type_port_keys) - outputs_context.non_file_type_port_keys = non_file_port_keys @router.post( "/containers/{id}/networks:attach", - summary="attach container to a network, if not already attached", response_class=Response, status_code=status.HTTP_204_NO_CONTENT, ) async def attach_container_to_network( - request: Request, item: AttachContainerToNetworkItem, container_id: Annotated[str, PathParam(..., alias="id")], ) -> None: - assert request # nosec - - async with docker_client() as docker: - container_instance = await docker.containers.get(container_id) - container_inspect = await container_instance.show() - - attached_network_ids: set[str] = { - x["NetworkID"] - for x in container_inspect["NetworkSettings"]["Networks"].values() - } - - if item.network_id in attached_network_ids: - _logger.debug( - "Container %s already attached to network %s", - container_id, - item.network_id, - ) - return - - # NOTE: A docker network is only visible on a docker node when it is - # used by a container - network = DockerNetwork(docker=docker, id_=item.network_id) - await network.connect( - { - "Container": container_id, - "EndpointConfig": {"Aliases": item.network_aliases}, - } - ) + """attach container to a network, if not already attached""" + await container_extensions.attach_container_to_network( + container_id=container_id, + network_id=item.network_id, + network_aliases=item.network_aliases, + ) @router.post( "/containers/{id}/networks:detach", - summary="detach container from a network, if not already detached", response_class=Response, status_code=status.HTTP_204_NO_CONTENT, ) @@ -156,23 +100,8 @@ async def detach_container_from_network( item: DetachContainerFromNetworkItem, container_id: Annotated[str, PathParam(..., alias="id")], ) -> None: - async with docker_client() as docker: - container_instance = await docker.containers.get(container_id) - container_inspect = await container_instance.show() - - attached_network_ids: set[str] = set( - container_inspect["NetworkSettings"]["Networks"].keys() - ) - - if item.network_id not in attached_network_ids: - _logger.debug( - "Container %s already detached from network %s", - container_id, - item.network_id, - ) - return - - # NOTE: A docker network is only visible on a docker node when it is - # used by a container - network = DockerNetwork(docker=docker, id_=item.network_id) - await network.disconnect({"Container": container_id, "Force": True}) + """detach container from a network, if not already detached""" + await container_extensions.detach_container_from_network( + container_id=container_id, + network_id=item.network_id, + ) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/containers_long_running_tasks.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/containers_long_running_tasks.py index af857013a821..6aebf17f0ee8 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/containers_long_running_tasks.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/containers_long_running_tasks.py @@ -1,318 +1,189 @@ -from textwrap import dedent -from typing import Annotated, cast +from typing import Annotated -from fastapi import APIRouter, Depends, FastAPI, Request, status -from servicelib.fastapi.long_running_tasks.server import ( - TaskAlreadyRunningError, - TaskId, - TasksManager, - get_tasks_manager, - start_task, -) +from fastapi import APIRouter, Depends, Request, status +from models_library.api_schemas_directorv2.dynamic_services import ContainersCreate +from servicelib.fastapi.long_running_tasks._manager import FastAPILongRunningManager +from servicelib.fastapi.long_running_tasks.server import get_long_running_manager from servicelib.fastapi.requests_decorators import cancel_on_disconnect +from servicelib.long_running_tasks.models import TaskId -from ...core.settings import ApplicationSettings -from ...models.schemas.application_health import ApplicationHealth -from ...models.schemas.containers import ContainersCreate -from ...models.shared_store import SharedStore -from ...modules.inputs import InputsState -from ...modules.long_running_tasks import ( - task_containers_restart, - task_create_service_containers, - task_ports_inputs_pull, - task_ports_outputs_pull, - task_ports_outputs_push, - task_pull_user_servcices_docker_images, - task_restore_state, - task_runs_docker_compose_down, - task_save_state, -) -from ...modules.mounted_fs import MountedVolumes -from ...modules.outputs import OutputsManager -from ._dependencies import ( - get_application, - get_application_health, - get_inputs_state, - get_mounted_volumes, - get_outputs_manager, - get_settings, - get_shared_store, -) +from ...services import containers_long_running_tasks router = APIRouter() @router.post( "/containers/images:pull", - summary="Pulls all the docker container images for the user services", status_code=status.HTTP_202_ACCEPTED, response_model=TaskId, ) @cancel_on_disconnect -async def pull_user_servcices_docker_images( +async def pull_container_images( request: Request, - tasks_manager: Annotated[TasksManager, Depends(get_tasks_manager)], - shared_store: Annotated[SharedStore, Depends(get_shared_store)], - app: Annotated[FastAPI, Depends(get_application)], + long_running_manager: Annotated[ + FastAPILongRunningManager, Depends(get_long_running_manager) + ], ) -> TaskId: - assert request # nosec - - try: - return start_task( - tasks_manager, - task=task_pull_user_servcices_docker_images, - unique=True, - app=app, - shared_store=shared_store, - ) - except TaskAlreadyRunningError as e: - return cast(str, e.managed_task.task_id) # type: ignore[attr-defined] # pylint:disable=no-member - + """Pulls all the docker container images for the user services""" + _ = request + return await containers_long_running_tasks.pull_user_services_images( + long_running_manager.rpc_client, long_running_manager.lrt_namespace + ) -@router.post( - "/containers", - summary=dedent( - """ - Starts the containers as defined in ContainerCreate by: - - cleaning up resources from previous runs if any - - starting the containers - Progress may be obtained through URL - Process may be cancelled through URL - """ - ).strip(), - status_code=status.HTTP_202_ACCEPTED, - response_model=TaskId, -) +@router.post("/containers", status_code=status.HTTP_202_ACCEPTED, response_model=TaskId) @cancel_on_disconnect -async def create_service_containers_task( # pylint: disable=too-many-arguments +async def create_containers( # pylint: disable=too-many-arguments request: Request, containers_create: ContainersCreate, - tasks_manager: Annotated[TasksManager, Depends(get_tasks_manager)], - settings: Annotated[ApplicationSettings, Depends(get_settings)], - shared_store: Annotated[SharedStore, Depends(get_shared_store)], - app: Annotated[FastAPI, Depends(get_application)], - application_health: Annotated[ApplicationHealth, Depends(get_application_health)], + long_running_manager: Annotated[ + FastAPILongRunningManager, Depends(get_long_running_manager) + ], ) -> TaskId: - assert request # nosec - - try: - return start_task( - tasks_manager, - task=task_create_service_containers, - unique=True, - settings=settings, - containers_create=containers_create, - shared_store=shared_store, - app=app, - application_health=application_health, - ) - except TaskAlreadyRunningError as e: - return cast(str, e.managed_task.task_id) # type: ignore[attr-defined] # pylint:disable=no-member + """ + Starts the containers as defined in ContainerCreate by: + - cleaning up resources from previous runs if any + - starting the containers + + Progress may be obtained through URL + Process may be cancelled through URL + """ + _ = request + return await containers_long_running_tasks.create_user_services( + long_running_manager.rpc_client, + long_running_manager.lrt_namespace, + containers_create, + ) @router.post( - "/containers:down", - summary="Remove the previously started containers", - status_code=status.HTTP_202_ACCEPTED, - response_model=TaskId, + "/containers:down", status_code=status.HTTP_202_ACCEPTED, response_model=TaskId ) @cancel_on_disconnect -async def runs_docker_compose_down_task( +async def down_containers( request: Request, - tasks_manager: Annotated[TasksManager, Depends(get_tasks_manager)], - settings: Annotated[ApplicationSettings, Depends(get_settings)], - shared_store: Annotated[SharedStore, Depends(get_shared_store)], - app: Annotated[FastAPI, Depends(get_application)], - mounted_volumes: Annotated[MountedVolumes, Depends(get_mounted_volumes)], + long_running_manager: Annotated[ + FastAPILongRunningManager, Depends(get_long_running_manager) + ], ) -> TaskId: - assert request # nosec - - try: - return start_task( - tasks_manager, - task=task_runs_docker_compose_down, - unique=True, - app=app, - shared_store=shared_store, - settings=settings, - mounted_volumes=mounted_volumes, - ) - except TaskAlreadyRunningError as e: - return cast(str, e.managed_task.task_id) # type: ignore[attr-defined] # pylint:disable=no-member + """Remove the previously started containers""" + _ = request + return await containers_long_running_tasks.remove_user_services( + long_running_manager.rpc_client, long_running_manager.lrt_namespace + ) @router.post( "/containers/state:restore", - summary="Restores the state of the dynamic service", status_code=status.HTTP_202_ACCEPTED, response_model=TaskId, ) @cancel_on_disconnect -async def state_restore_task( +async def restore_containers_state_paths( request: Request, - tasks_manager: Annotated[TasksManager, Depends(get_tasks_manager)], - settings: Annotated[ApplicationSettings, Depends(get_settings)], - mounted_volumes: Annotated[MountedVolumes, Depends(get_mounted_volumes)], - app: Annotated[FastAPI, Depends(get_application)], + long_running_manager: Annotated[ + FastAPILongRunningManager, Depends(get_long_running_manager) + ], ) -> TaskId: - assert request # nosec - - try: - return start_task( - tasks_manager, - task=task_restore_state, - unique=True, - settings=settings, - mounted_volumes=mounted_volumes, - app=app, - ) - except TaskAlreadyRunningError as e: - return cast(str, e.managed_task.task_id) # type: ignore[attr-defined] # pylint:disable=no-member + """Restores the state of the dynamic service""" + _ = request + return await containers_long_running_tasks.restore_user_services_state_paths( + long_running_manager.rpc_client, long_running_manager.lrt_namespace + ) @router.post( "/containers/state:save", - summary="Stores the state of the dynamic service", status_code=status.HTTP_202_ACCEPTED, response_model=TaskId, ) @cancel_on_disconnect -async def state_save_task( +async def save_containers_state_paths( request: Request, - tasks_manager: Annotated[TasksManager, Depends(get_tasks_manager)], - app: Annotated[FastAPI, Depends(get_application)], - mounted_volumes: Annotated[MountedVolumes, Depends(get_mounted_volumes)], - settings: Annotated[ApplicationSettings, Depends(get_settings)], + long_running_manager: Annotated[ + FastAPILongRunningManager, Depends(get_long_running_manager) + ], ) -> TaskId: - assert request # nosec - - try: - return start_task( - tasks_manager, - task=task_save_state, - unique=True, - settings=settings, - mounted_volumes=mounted_volumes, - app=app, - ) - except TaskAlreadyRunningError as e: - return cast(str, e.managed_task.task_id) # type: ignore[attr-defined] # pylint:disable=no-member + """Stores the state of the dynamic service""" + _ = request + return await containers_long_running_tasks.save_user_services_state_paths( + long_running_manager.rpc_client, long_running_manager.lrt_namespace + ) @router.post( "/containers/ports/inputs:pull", - summary="Pull input ports data", status_code=status.HTTP_202_ACCEPTED, response_model=TaskId, ) @cancel_on_disconnect -async def ports_inputs_pull_task( +async def pull_container_port_inputs( request: Request, - tasks_manager: Annotated[TasksManager, Depends(get_tasks_manager)], - app: Annotated[FastAPI, Depends(get_application)], - settings: Annotated[ApplicationSettings, Depends(get_settings)], - mounted_volumes: Annotated[MountedVolumes, Depends(get_mounted_volumes)], - inputs_state: Annotated[InputsState, Depends(get_inputs_state)], + long_running_manager: Annotated[ + FastAPILongRunningManager, Depends(get_long_running_manager) + ], port_keys: list[str] | None = None, ) -> TaskId: - assert request # nosec - - try: - return start_task( - tasks_manager, - task=task_ports_inputs_pull, - unique=True, - port_keys=port_keys, - mounted_volumes=mounted_volumes, - app=app, - settings=settings, - inputs_pulling_enabled=inputs_state.inputs_pulling_enabled, - ) - except TaskAlreadyRunningError as e: - return cast(str, e.managed_task.task_id) # type: ignore[attr-defined] # pylint:disable=no-member + """Pull input ports data""" + _ = request + return await containers_long_running_tasks.pull_user_services_input_ports( + long_running_manager.rpc_client, long_running_manager.lrt_namespace, port_keys + ) @router.post( "/containers/ports/outputs:pull", - summary="Pull output ports data", status_code=status.HTTP_202_ACCEPTED, response_model=TaskId, ) @cancel_on_disconnect -async def ports_outputs_pull_task( +async def pull_container_port_outputs( request: Request, - tasks_manager: Annotated[TasksManager, Depends(get_tasks_manager)], - app: Annotated[FastAPI, Depends(get_application)], - mounted_volumes: Annotated[MountedVolumes, Depends(get_mounted_volumes)], + long_running_manager: Annotated[ + FastAPILongRunningManager, Depends(get_long_running_manager) + ], port_keys: list[str] | None = None, ) -> TaskId: - assert request # nosec - - try: - return start_task( - tasks_manager, - task=task_ports_outputs_pull, - unique=True, - port_keys=port_keys, - mounted_volumes=mounted_volumes, - app=app, - ) - except TaskAlreadyRunningError as e: - return cast(str, e.managed_task.task_id) # type: ignore[attr-defined] # pylint:disable=no-member + """Pull output ports data""" + _ = request + return await containers_long_running_tasks.pull_user_services_output_ports( + long_running_manager.rpc_client, long_running_manager.lrt_namespace, port_keys + ) @router.post( "/containers/ports/outputs:push", - summary="Push output ports data", status_code=status.HTTP_202_ACCEPTED, response_model=TaskId, ) @cancel_on_disconnect -async def ports_outputs_push_task( +async def push_container_port_outputs( request: Request, - tasks_manager: Annotated[TasksManager, Depends(get_tasks_manager)], - outputs_manager: Annotated[OutputsManager, Depends(get_outputs_manager)], - app: Annotated[FastAPI, Depends(get_application)], + long_running_manager: Annotated[ + FastAPILongRunningManager, Depends(get_long_running_manager) + ], ) -> TaskId: - assert request # nosec - - try: - return start_task( - tasks_manager, - task=task_ports_outputs_push, - unique=True, - outputs_manager=outputs_manager, - app=app, - ) - except TaskAlreadyRunningError as e: - return cast(str, e.managed_task.task_id) # type: ignore[attr-defined] # pylint:disable=no-member + """Push output ports data""" + _ = request + return await containers_long_running_tasks.push_user_services_output_ports( + long_running_manager.rpc_client, long_running_manager.lrt_namespace + ) @router.post( "/containers:restart", - summary="Restarts previously started containers", status_code=status.HTTP_202_ACCEPTED, response_model=TaskId, ) @cancel_on_disconnect -async def containers_restart_task( +async def restart_containers( request: Request, - tasks_manager: Annotated[TasksManager, Depends(get_tasks_manager)], - app: Annotated[FastAPI, Depends(get_application)], - settings: Annotated[ApplicationSettings, Depends(get_settings)], - shared_store: Annotated[SharedStore, Depends(get_shared_store)], + long_running_manager: Annotated[ + FastAPILongRunningManager, Depends(get_long_running_manager) + ], ) -> TaskId: - assert request # nosec - - try: - return start_task( - tasks_manager, - task=task_containers_restart, - unique=True, - app=app, - settings=settings, - shared_store=shared_store, - ) - except TaskAlreadyRunningError as e: - return cast(str, e.managed_task.task_id) # type: ignore[attr-defined] # pylint:disable=no-member + """Restarts previously started user services""" + _ = request + return await containers_long_running_tasks.restart_user_services( + long_running_manager.rpc_client, long_running_manager.lrt_namespace + ) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/disk.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/disk.py index 4ff0cc2dbcae..92bbb95640f7 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/disk.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/disk.py @@ -5,10 +5,7 @@ router = APIRouter() -@router.post( - "/disk/reserved:free", - summary="Frees up reserved disk space", - status_code=status.HTTP_204_NO_CONTENT, -) +@router.post("/disk/reserved:free", status_code=status.HTTP_204_NO_CONTENT) async def free_reserved_disk_space() -> None: - disk.remove_reserved_disk_space() + """Frees up reserved disk space""" + disk.free_reserved_disk_space() diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/volumes.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/volumes.py index 793fbc687e9f..3547912b7e7b 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/volumes.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rest/volumes.py @@ -16,14 +16,13 @@ class PutVolumeItem(BaseModel): status: VolumeStatus -@router.put( - "/volumes/{id}", - summary="Updates the state of the volume", - status_code=status.HTTP_204_NO_CONTENT, -) +@router.put("/volumes/{id}", status_code=status.HTTP_204_NO_CONTENT) async def put_volume_state( item: PutVolumeItem, app: Annotated[FastAPI, Depends(get_application)], volume_category: Annotated[VolumeCategory, PathParam(..., alias="id")], ) -> None: - await volumes.save_volume_state(app, status=item.status, category=volume_category) + """Updates the state of the volume""" + await volumes.update_volume_status( + app, status=item.status, category=volume_category + ) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rpc/_containers.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rpc/_containers.py new file mode 100644 index 000000000000..543742760cd6 --- /dev/null +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rpc/_containers.py @@ -0,0 +1,68 @@ +from typing import Any + +from fastapi import FastAPI +from models_library.api_schemas_directorv2.dynamic_services import ContainersComposeSpec +from models_library.api_schemas_dynamic_sidecar.containers import ( + ActivityInfoOrNone, +) +from servicelib.rabbitmq import RPCRouter + +from ...core.validation import InvalidComposeSpecError +from ...services import containers + +router = RPCRouter() + + +@router.expose(reraise_if_error_type=(InvalidComposeSpecError,)) +async def create_compose_spec( + app: FastAPI, + *, + containers_compose_spec: ContainersComposeSpec, +) -> None: + """ + Validates and stores the docker compose spec for the user services. + """ + await containers.create_compose_spec( + app, containers_compose_spec=containers_compose_spec + ) + + +@router.expose() +async def containers_docker_inspect( + app: FastAPI, *, only_status: bool +) -> dict[str, Any]: + """ + Returns entire docker inspect data, if only_state is True, + the status of the containers is returned + """ + return await containers.containers_docker_inspect(app, only_status=only_status) + + +@router.expose() +async def get_containers_activity(app: FastAPI) -> ActivityInfoOrNone: + """ + If user service declared an inactivity hook, this endpoint provides + information about how much time has passed since the service became inactive. + """ + return await containers.get_containers_activity(app=app) + + +@router.expose() +async def get_containers_name(app: FastAPI, *, filters: str) -> str | dict[str, Any]: + """ + Searches for the container's name given the network + on which the proxy communicates with it. + Supported filters: + network: matches against the exact network name + assigned to the container; `will include` + containers + exclude: matches if contained in the name of the + container; `will exclude` containers + """ + return await containers.get_containers_name(app=app, filters=filters) + + +@router.expose() +async def inspect_container(app: FastAPI, *, container_id: str) -> dict[str, Any]: + """Returns information about the container, like docker inspect command""" + return await containers.inspect_container(app=app, container_id=container_id) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rpc/_containers_extension.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rpc/_containers_extension.py new file mode 100644 index 000000000000..dd95092a25e9 --- /dev/null +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rpc/_containers_extension.py @@ -0,0 +1,54 @@ +from fastapi import FastAPI +from models_library.services import ServiceOutput +from servicelib.rabbitmq import RPCRouter + +from ...services import container_extensions + +router = RPCRouter() + + +@router.expose() +async def toggle_ports_io( + app: FastAPI, *, enable_outputs: bool, enable_inputs: bool +) -> None: + """Enable/disable ports i/o""" + await container_extensions.toggle_ports_io( + app, enable_outputs=enable_outputs, enable_inputs=enable_inputs + ) + + +@router.expose() +async def create_output_dirs( + app: FastAPI, *, outputs_labels: dict[str, ServiceOutput] +) -> None: + """ + Creates the output directories declared by the docker images's labels. + It is more convenient to pass the labels from director-v2, + since it already has all the machinery to call into director-v0 + to retrieve them. + """ + await container_extensions.create_output_dirs(app, outputs_labels=outputs_labels) + + +@router.expose() +async def attach_container_to_network( + app: FastAPI, *, container_id: str, network_id: str, network_aliases: list[str] +) -> None: + """attach container to a network, if not already attached""" + _ = app + await container_extensions.attach_container_to_network( + container_id=container_id, + network_id=network_id, + network_aliases=network_aliases, + ) + + +@router.expose() +async def detach_container_from_network( + app: FastAPI, *, container_id: str, network_id: str +) -> None: + """detach container from a network, if not already detached""" + _ = app + await container_extensions.detach_container_from_network( + container_id=container_id, network_id=network_id + ) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rpc/_containers_long_running_tasks.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rpc/_containers_long_running_tasks.py new file mode 100644 index 000000000000..d97881bc90d9 --- /dev/null +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rpc/_containers_long_running_tasks.py @@ -0,0 +1,117 @@ +from fastapi import FastAPI +from models_library.api_schemas_directorv2.dynamic_services import ContainersCreate +from servicelib.long_running_tasks.models import LRTNamespace, TaskId +from servicelib.rabbitmq import RPCRouter + +from ...core.rabbitmq import get_rabbitmq_rpc_client +from ...services import containers_long_running_tasks + +router = RPCRouter() + + +@router.expose() +async def pull_user_services_images( + app: FastAPI, *, lrt_namespace: LRTNamespace +) -> TaskId: + """Pulls all the docker container images for the user services""" + rpc_client = get_rabbitmq_rpc_client(app) + return await containers_long_running_tasks.pull_user_services_images( + rpc_client, lrt_namespace + ) + + +@router.expose() +async def create_user_services( + app: FastAPI, *, lrt_namespace: LRTNamespace, containers_create: ContainersCreate +) -> TaskId: + """ + Starts the containers as defined in ContainerCreate by: + - cleaning up resources from previous runs if any + - starting the containers + + Progress may be obtained through URL + Process may be cancelled through URL + """ + rpc_client = get_rabbitmq_rpc_client(app) + return await containers_long_running_tasks.create_user_services( + rpc_client, lrt_namespace, containers_create + ) + + +@router.expose() +async def remove_user_services(app: FastAPI, *, lrt_namespace: LRTNamespace) -> TaskId: + """Remove the previously started containers""" + rpc_client = get_rabbitmq_rpc_client(app) + return await containers_long_running_tasks.remove_user_services( + rpc_client, lrt_namespace + ) + + +@router.expose() +async def restore_user_services_state_paths( + app: FastAPI, *, lrt_namespace: LRTNamespace +) -> TaskId: + """Restores the state of the dynamic service""" + rpc_client = get_rabbitmq_rpc_client(app) + return await containers_long_running_tasks.restore_user_services_state_paths( + rpc_client, lrt_namespace + ) + + +@router.expose() +async def save_user_services_state_paths( + app: FastAPI, *, lrt_namespace: LRTNamespace +) -> TaskId: + """Stores the state of the dynamic service""" + rpc_client = get_rabbitmq_rpc_client(app) + return await containers_long_running_tasks.save_user_services_state_paths( + rpc_client, lrt_namespace + ) + + +@router.expose() +async def pull_user_services_input_ports( + app: FastAPI, + *, + lrt_namespace: LRTNamespace, + port_keys: list[str] | None, +) -> TaskId: + """Pull input ports data""" + rpc_client = get_rabbitmq_rpc_client(app) + return await containers_long_running_tasks.pull_user_services_input_ports( + rpc_client, lrt_namespace, port_keys + ) + + +@router.expose() +async def pull_user_services_output_ports( + app: FastAPI, + *, + lrt_namespace: LRTNamespace, + port_keys: list[str] | None, +) -> TaskId: + """Pull output ports data""" + rpc_client = get_rabbitmq_rpc_client(app) + return await containers_long_running_tasks.pull_user_services_output_ports( + rpc_client, lrt_namespace, port_keys + ) + + +@router.expose() +async def push_user_services_output_ports( + app: FastAPI, *, lrt_namespace: LRTNamespace +) -> TaskId: + """Push output ports data""" + rpc_client = get_rabbitmq_rpc_client(app) + return await containers_long_running_tasks.push_user_services_output_ports( + rpc_client, lrt_namespace + ) + + +@router.expose() +async def restart_user_services(app: FastAPI, *, lrt_namespace: LRTNamespace) -> TaskId: + """Restarts previously started user services""" + rpc_client = get_rabbitmq_rpc_client(app) + return await containers_long_running_tasks.restart_user_services( + rpc_client, lrt_namespace + ) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rpc/_disk.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rpc/_disk.py index f3bba913ac94..46b91df4b41e 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rpc/_disk.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rpc/_disk.py @@ -8,4 +8,5 @@ @router.expose() async def free_reserved_disk_space(_: FastAPI) -> None: - disk.remove_reserved_disk_space() + """Frees up reserved disk space""" + disk.free_reserved_disk_space() diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rpc/_disk_usage.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rpc/_disk_usage.py index a0026e86dd9f..36d9d47d880c 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rpc/_disk_usage.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rpc/_disk_usage.py @@ -15,6 +15,10 @@ @router.expose() @validate_call(config={"arbitrary_types_allowed": True}) async def update_disk_usage(app: FastAPI, *, usage: dict[str, DiskUsage]) -> None: + """ + Updates the report disk usage to the forntend via external tooling. + Used by the efs guardian. + """ disk_usage_monitor = get_disk_usage_monitor(app) if disk_usage_monitor is None: diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rpc/_volumes.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rpc/_volumes.py index 6f51d7ff629f..02349e104869 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rpc/_volumes.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rpc/_volumes.py @@ -10,7 +10,8 @@ @router.expose() @validate_call(config={"arbitrary_types_allowed": True}) -async def save_volume_state( +async def update_volume_status( app: FastAPI, *, status: VolumeStatus, category: VolumeCategory ) -> None: - await volumes.save_volume_state(app, status=status, category=category) + """Updates the state of the volume""" + await volumes.update_volume_status(app, status=status, category=category) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rpc/routes.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rpc/routes.py index 1b020c03c370..aeda529b7831 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rpc/routes.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/rpc/routes.py @@ -4,9 +4,19 @@ from ...core.rabbitmq import get_rabbitmq_rpc_server from ...core.settings import ApplicationSettings -from . import _disk, _disk_usage, _volumes +from . import ( + _containers, + _containers_extension, + _containers_long_running_tasks, + _disk, + _disk_usage, + _volumes, +) ROUTERS: list[RPCRouter] = [ + _containers_extension.router, + _containers_long_running_tasks.router, + _containers.router, _disk_usage.router, _disk.router, _volumes.router, diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/cli.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/cli.py index 4bbf9e6016ed..e00e5ec527eb 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/cli.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/cli.py @@ -6,13 +6,17 @@ import typer from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.fastapi.long_running_tasks.server import TaskProgress +from servicelib.long_running_tasks.models import TaskProgress from settings_library.utils_cli import create_settings_command from ._meta import PROJECT_NAME from .core.application import create_base_app +from .core.rabbitmq import setup_rabbitmq from .core.settings import ApplicationSettings -from .modules.long_running_tasks import task_ports_outputs_push, task_save_state +from .modules.long_running_tasks import ( + push_user_services_output_ports, + save_user_services_state_paths, +) from .modules.mounted_fs import MountedVolumes, setup_mounted_fs from .modules.outputs import OutputsManager, setup_outputs @@ -39,6 +43,7 @@ async def _initialized_app() -> AsyncIterator[FastAPI]: app = create_base_app() # setup MountedVolumes + setup_rabbitmq(app) setup_mounted_fs(app) setup_outputs(app) @@ -74,7 +79,12 @@ async def _async_save_state() -> None: settings: ApplicationSettings = app.state.settings mounted_volumes: MountedVolumes = app.state.mounted_volumes - await task_save_state(TaskProgress.create(), settings, mounted_volumes, app) + await save_user_services_state_paths( + TaskProgress.create(), + app=app, + settings=settings, + mounted_volumes=mounted_volumes, + ) asyncio.run(_async_save_state()) _print_highlight("state save finished successfully") @@ -87,7 +97,9 @@ def outputs_push(): async def _async_outputs_push() -> None: async with _initialized_app() as app: outputs_manager: OutputsManager = app.state.outputs_manager - await task_ports_outputs_push(TaskProgress.create(), outputs_manager, app) + await push_user_services_output_ports( + TaskProgress.create(), app=app, outputs_manager=outputs_manager + ) asyncio.run(_async_outputs_push()) _print_highlight("output ports push finished successfully") @@ -95,5 +107,5 @@ async def _async_outputs_push() -> None: # # NOTE: We intentionally did NOT create a command to run the application -# Use instead $ uvicorn simcore_service_dynamic_sidecar.main:the_app +# Use instead $ uvicorn --factory simcore_service_dynamic_sidecar.main:app_factory # diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py index b141e7ca236e..0de264932ac1 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py @@ -2,9 +2,10 @@ from asyncio import Lock from typing import Any, ClassVar +from common_library.json_serialization import json_dumps from fastapi import FastAPI from servicelib.async_utils import cancel_sequential_workers -from servicelib.fastapi import long_running_tasks +from servicelib.fastapi.logging_lifespan import create_logging_shutdown_event from servicelib.fastapi.openapi import ( get_common_oas_options, override_fastapi_openapi_method, @@ -13,16 +14,16 @@ initialize_fastapi_app_tracing, setup_tracing, ) -from servicelib.logging_utils import config_all_loggers from simcore_sdk.node_ports_common.exceptions import NodeNotFound -from .._meta import API_VERSION, API_VTAG, PROJECT_NAME, SUMMARY, __version__ +from .._meta import API_VERSION, API_VTAG, APP_NAME, PROJECT_NAME, SUMMARY, __version__ from ..api.rest import get_main_router from ..api.rpc.routes import setup_rpc_api_routes from ..models.schemas.application_health import ApplicationHealth from ..models.shared_store import SharedStore, setup_shared_store from ..modules.attribute_monitor import setup_attribute_monitor from ..modules.inputs import setup_inputs +from ..modules.long_running_tasks import setup_long_running_tasks from ..modules.mounted_fs import MountedVolumes, setup_mounted_fs from ..modules.notifications import setup_notifications from ..modules.outputs import setup_outputs @@ -40,14 +41,13 @@ from .settings import ApplicationSettings from .utils import volumes_fix_permissions -_LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR _NOISY_LOGGERS = ( "aio_pika", "aiormq", "httpcore", ) -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) # # https://patorjk.com/software/taag/#p=display&f=AMC%20Tubes&t=DYSIDECAR @@ -115,53 +115,46 @@ def compose_spec(self) -> str | None: return self._shared_store.compose_spec -def setup_logger(settings: ApplicationSettings): - # SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 - logging.basicConfig(level=settings.log_level) - logging.root.setLevel(settings.log_level) - config_all_loggers( - log_format_local_dev_enabled=settings.DY_SIDECAR_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=settings.DY_SIDECAR_LOG_FILTER_MAPPING, - tracing_settings=settings.DYNAMIC_SIDECAR_TRACING, - ) - - def create_base_app() -> FastAPI: - # keep mostly quiet noisy loggers - quiet_level: int = max( - min(logging.root.level + _LOG_LEVEL_STEP, logging.CRITICAL), logging.WARNING + # settings + app_settings = ApplicationSettings.create_from_envs() + logging_shutdown_event = create_logging_shutdown_event( + log_format_local_dev_enabled=app_settings.DY_SIDECAR_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.DY_SIDECAR_LOG_FILTER_MAPPING, + tracing_settings=app_settings.DYNAMIC_SIDECAR_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, ) - for name in _NOISY_LOGGERS: - logging.getLogger(name).setLevel(quiet_level) - # settings - settings = ApplicationSettings.create_from_envs() - setup_logger(settings) - logger.debug(settings.model_dump_json(indent=2)) + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) # minimal - assert settings.SC_BOOT_MODE # nosec + assert app_settings.SC_BOOT_MODE # nosec app = FastAPI( - debug=settings.SC_BOOT_MODE.is_devel_mode(), - title=PROJECT_NAME, + debug=app_settings.SC_BOOT_MODE.is_devel_mode(), + title=APP_NAME, description=SUMMARY, version=API_VERSION, openapi_url=f"/api/{API_VTAG}/openapi.json", - **get_common_oas_options(is_devel_mode=settings.SC_BOOT_MODE.is_devel_mode()), + **get_common_oas_options( + is_devel_mode=app_settings.SC_BOOT_MODE.is_devel_mode() + ), ) override_fastapi_openapi_method(app) - app.state.settings = settings - - long_running_tasks.server.setup(app) + app.state.settings = app_settings app.include_router(get_main_router(app)) setup_reserved_space(app) + app.add_event_handler("shutdown", logging_shutdown_event) return app -def create_app(): +def create_app() -> FastAPI: """ Creates the application from using the env vars as a context Also stores inside the state all instances of classes @@ -192,6 +185,8 @@ def create_app(): setup_inputs(app) setup_outputs(app) + setup_long_running_tasks(app) + setup_attribute_monitor(app) setup_user_services_preferences(app) @@ -222,11 +217,11 @@ async def _on_startup() -> None: async def _on_shutdown() -> None: app_state = AppState(app) if docker_compose_yaml := app_state.compose_spec: - logger.info("Removing spawned containers") + _logger.info("Removing spawned containers") result = await docker_compose_down(docker_compose_yaml, app.state.settings) - logger.log( + _logger.log( logging.INFO if result.success else logging.ERROR, "Removed spawned containers:\n%s", result.message, diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/errors.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/errors.py index fc67b7072f86..e5ac1badeac8 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/errors.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/errors.py @@ -14,18 +14,3 @@ class VolumeNotFoundError(BaseDynamicSidecarError): class UnexpectedDockerError(BaseDynamicSidecarError): msg_template = "An unexpected Docker error occurred status_code={status_code}, message={message}" - - -class ContainerExecContainerNotFoundError(BaseDynamicSidecarError): - msg_template = "Container '{container_name}' was not found" - - -class ContainerExecTimeoutError(BaseDynamicSidecarError): - msg_template = "Timed out after {timeout} while executing: '{command}'" - - -class ContainerExecCommandFailedError(BaseDynamicSidecarError): - msg_template = ( - "Command '{command}' exited with code '{exit_code}'" - "and output: '{command_result}'" - ) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/rabbitmq.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/rabbitmq.py index 88c77c849979..9e6316c604ba 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/rabbitmq.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/rabbitmq.py @@ -121,6 +121,11 @@ def get_rabbitmq_rpc_server(app: FastAPI) -> RabbitMQRPCClient: return cast(RabbitMQRPCClient, app.state.rabbitmq_rpc_server) +def get_rabbitmq_rpc_client(app: FastAPI) -> RabbitMQRPCClient: + _raise_if_not_initialized(app, "rabbitmq_rpc_client") + return cast(RabbitMQRPCClient, app.state.rabbitmq_rpc_server) + + def setup_rabbitmq(app: FastAPI) -> None: async def on_startup() -> None: app_settings: ApplicationSettings = app.state.settings @@ -134,7 +139,12 @@ async def on_startup() -> None: ) with log_context(_logger, logging.INFO, msg="Create RabbitMQRPCClient"): app.state.rabbitmq_rpc_server = await RabbitMQRPCClient.create( - client_name=f"dynamic-sidecar_rpc_{app_settings.DY_SIDECAR_NODE_ID}", + client_name=f"dynamic-sidecar_rpc_server_{app_settings.DY_SIDECAR_NODE_ID}", + settings=settings, + ) + with log_context(_logger, logging.INFO, msg="Create RabbitMQRPCClient"): + app.state.rabbitmq_rpc_client = await RabbitMQRPCClient.create( + client_name=f"dynamic-sidecar_rpc_client_{app_settings.DY_SIDECAR_NODE_ID}", settings=settings, ) @@ -143,6 +153,8 @@ async def on_shutdown() -> None: await app.state.rabbitmq_client.close() if app.state.rabbitmq_rpc_server: await app.state.rabbitmq_rpc_server.close() + if app.state.rabbitmq_rpc_client: + await app.state.rabbitmq_rpc_client.close() app.add_event_handler("startup", on_startup) app.add_event_handler("shutdown", on_shutdown) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/reserved_space.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/reserved_space.py index e43946f5375f..05e4df212d50 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/reserved_space.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/reserved_space.py @@ -28,7 +28,7 @@ def _write_random_binary_file( bytes_written += current_chunk_size -def remove_reserved_disk_space() -> None: +def free_reserved_disk_space() -> None: _RESERVED_DISK_SPACE_NAME.unlink(missing_ok=True) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/settings.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/settings.py index 4187f08b02c9..cf516f97770b 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/settings.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/settings.py @@ -4,6 +4,7 @@ from pathlib import Path from typing import cast +from common_library.logging.logging_utils_filtering import LoggerName, MessageSubstring from common_library.pydantic_validators import validate_numeric_string_as_timedelta from models_library.basic_types import PortInt from models_library.callbacks_mapping import CallbacksMapping @@ -21,7 +22,6 @@ TypeAdapter, field_validator, ) -from servicelib.logging_utils_filtering import LoggerName, MessageSubstring from settings_library.application import BaseApplicationSettings from settings_library.aws_s3_cli import AwsS3CliSettings from settings_library.docker_registry import RegistrySettings @@ -29,6 +29,7 @@ from settings_library.postgres import PostgresSettings from settings_library.r_clone import RCloneSettings from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings from settings_library.resource_usage_tracker import ( DEFAULT_RESOURCE_USAGE_HEARTBEAT_INTERVAL, ) @@ -54,7 +55,6 @@ class SystemMonitorSettings(BaseApplicationSettings): class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): - DYNAMIC_SIDECAR_DY_VOLUMES_MOUNT_DIR: Path = Field( ..., description="Base directory where dynamic-sidecar stores creates " @@ -187,6 +187,9 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): RABBIT_SETTINGS: RabbitSettings = Field( json_schema_extra={"auto_default_from_env": True} ) + REDIS_SETTINGS: RedisSettings = Field( + json_schema_extra={"auto_default_from_env": True} + ) DY_DEPLOYMENT_REGISTRY_SETTINGS: RegistrySettings = Field() DY_DOCKER_HUB_REGISTRY_SETTINGS: RegistrySettings | None = Field(default=None) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/utils.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/utils.py index 3993ce37a56d..239d9b5122ce 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/utils.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/utils.py @@ -8,7 +8,7 @@ import psutil from common_library.error_codes import create_error_code -from servicelib.logging_errors import create_troubleshotting_log_kwargs +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from ..modules.mounted_fs import MountedVolumes @@ -106,11 +106,10 @@ async def async_command( ) except Exception as err: # pylint: disable=broad-except - error_code = create_error_code(err) user_error_msg = f"Unexpected error [{error_code}]" _logger.exception( - **create_troubleshotting_log_kwargs( + **create_troubleshooting_log_kwargs( user_error_msg, error=err, error_context={"command": command, "proc.returncode": proc.returncode}, diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/main.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/main.py index 52c91f228374..b710b5047857 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/main.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/main.py @@ -1,8 +1,12 @@ -"""Main application to be deployed in for example uvicorn. -""" +"""Main application to be deployed in for example uvicorn.""" from fastapi import FastAPI from simcore_service_dynamic_sidecar.core.application import create_app -# SINGLETON FastAPI app -the_app: FastAPI = create_app() + +def app_factory() -> FastAPI: + """Factory function to create the FastAPI app instance. + + This is used by uvicorn or other ASGI servers to run the application. + """ + return create_app() diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/models/schemas/application_health.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/models/schemas/application_health.py index 4da644858b9d..72413188e4b7 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/models/schemas/application_health.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/models/schemas/application_health.py @@ -1,5 +1,3 @@ -from typing import Optional - from pydantic import BaseModel, Field @@ -7,6 +5,6 @@ class ApplicationHealth(BaseModel): is_healthy: bool = Field( default=True, description="returns True if the service sis running correctly" ) - error_message: Optional[str] = Field( + error_message: str | None = Field( default=None, description="in case of error this gets set" ) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/models/schemas/containers.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/models/schemas/containers.py deleted file mode 100644 index e374c924070d..000000000000 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/models/schemas/containers.py +++ /dev/null @@ -1,10 +0,0 @@ -from models_library.services_creation import CreateServiceMetricsAdditionalParams -from pydantic import BaseModel - - -class ContainersComposeSpec(BaseModel): - docker_compose_yaml: str - - -class ContainersCreate(BaseModel): - metrics_params: CreateServiceMetricsAdditionalParams diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/database.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/database.py index a1ccfb9805c0..88f62dd6d649 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/database.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/database.py @@ -2,6 +2,7 @@ from servicelib.db_asyncpg_utils import check_postgres_liveness, with_async_pg_engine from settings_library.postgres import PostgresSettings +from .._meta import APP_NAME from ..core.settings import ApplicationSettings from .service_liveness import ( wait_for_service_liveness, @@ -17,7 +18,10 @@ async def wait_for_database_liveness(app: FastAPI) -> None: assert isinstance(app_settings, ApplicationSettings) # nosec postgres_settings = app_settings.POSTGRES_SETTINGS assert isinstance(postgres_settings, PostgresSettings) # nosec - async with with_async_pg_engine(postgres_settings) as engine: + async with with_async_pg_engine( + postgres_settings, + application_name=f"{APP_NAME}-{app_settings.DY_SIDECAR_NODE_ID}", + ) as engine: await wait_for_service_liveness( check_postgres_liveness, engine, diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py index 42412376d08c..974b2e0dad7b 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py @@ -3,19 +3,19 @@ from collections.abc import AsyncGenerator from contextlib import asynccontextmanager from pathlib import Path -from typing import Final +from typing import Any, Final from fastapi import FastAPI -from models_library.api_schemas_long_running_tasks.base import ( - ProgressPercent, - TaskProgress, -) +from models_library.api_schemas_directorv2.dynamic_services import ContainersCreate +from models_library.api_schemas_long_running_tasks.base import TaskProgress from models_library.generated_models.docker_rest_api import ContainerState from models_library.rabbitmq_messages import ProgressType, SimcorePlatformStatus from models_library.service_settings_labels import LegacyState from pydantic import PositiveInt +from servicelib.fastapi import long_running_tasks from servicelib.file_utils import log_directory_changes from servicelib.logging_utils import log_context +from servicelib.long_running_tasks.task import TaskProtocol, TaskRegistry from servicelib.progress_bar import ProgressBarData from servicelib.utils import logged_gather from simcore_sdk.node_data import data_manager @@ -25,6 +25,7 @@ from tenacity.stop import stop_after_delay from tenacity.wait import wait_random_exponential +from .._meta import APP_NAME from ..core.docker_compose_utils import ( docker_compose_create, docker_compose_down, @@ -48,9 +49,9 @@ from ..core.utils import CommandResult from ..core.validation import parse_compose_spec from ..models.schemas.application_health import ApplicationHealth -from ..models.schemas.containers import ContainersCreate from ..models.shared_store import SharedStore from ..modules import nodeports, user_services_preferences +from ..modules.inputs import InputsState from ..modules.mounted_fs import MountedVolumes from ..modules.notifications._notifications_ports import PortNotifier from ..modules.outputs import OutputsManager, event_propagation_disabled @@ -146,66 +147,62 @@ async def _reset_on_error( raise -async def task_pull_user_servcices_docker_images( - progress: TaskProgress, shared_store: SharedStore, app: FastAPI +async def pull_user_services_images( + progress: TaskProgress, app: FastAPI, shared_store: SharedStore ) -> None: assert shared_store.compose_spec # nosec - progress.update(message="started pulling user services", percent=ProgressPercent(0)) + await progress.update(message="started pulling user services", percent=0) await docker_compose_pull(app, shared_store.compose_spec) - progress.update( - message="finished pulling user services", percent=ProgressPercent(1) - ) + await progress.update(message="finished pulling user services", percent=1) -async def task_create_service_containers( +async def create_user_services( progress: TaskProgress, + app: FastAPI, settings: ApplicationSettings, - containers_create: ContainersCreate, shared_store: SharedStore, - app: FastAPI, application_health: ApplicationHealth, + containers_create: ContainersCreate, ) -> list[str]: - progress.update(message="validating service spec", percent=ProgressPercent(0)) + await progress.update(message="validating service spec", percent=0) assert shared_store.compose_spec # nosec - async with event_propagation_disabled(app), _reset_on_error( - shared_store - ), ProgressBarData( - num_steps=4, - progress_report_cb=functools.partial( - post_progress_message, - app, - ProgressType.SERVICE_CONTAINERS_STARTING, - ), - description="starting software", - ) as progress_bar: + async with ( + event_propagation_disabled(app), + _reset_on_error(shared_store), + ProgressBarData( + num_steps=4, + progress_report_cb=functools.partial( + post_progress_message, + app, + ProgressType.SERVICE_CONTAINERS_STARTING, + ), + description="starting software", + ) as progress_bar, + ): with log_context(_logger, logging.INFO, "load user services preferences"): if user_services_preferences.is_feature_enabled(app): await user_services_preferences.load_user_services_preferences(app) await progress_bar.update() # removes previous pending containers - progress.update(message="cleanup previous used resources") + await progress.update(message="cleanup previous used resources") result = await docker_compose_rm(shared_store.compose_spec, settings) _raise_for_errors(result, "rm") await progress_bar.update() - progress.update( - message="creating and starting containers", percent=ProgressPercent(0.90) - ) + await progress.update(message="creating and starting containers", percent=0.90) await post_sidecar_log_message( app, "starting service containers", log_level=logging.INFO ) await _retry_docker_compose_create(shared_store.compose_spec, settings) await progress_bar.update() - progress.update( - message="ensure containers are started", percent=ProgressPercent(0.95) - ) + await progress.update(message="ensure containers are started", percent=0.95) compose_start_result = await _retry_docker_compose_start( shared_store.compose_spec, settings ) @@ -236,11 +233,11 @@ async def task_create_service_containers( return shared_store.container_names -async def task_runs_docker_compose_down( +async def remove_user_services( progress: TaskProgress, app: FastAPI, - shared_store: SharedStore, settings: ApplicationSettings, + shared_store: SharedStore, mounted_volumes: MountedVolumes, ) -> None: if shared_store.compose_spec is None: @@ -288,9 +285,7 @@ async def _send_resource_tracking_stop(platform_status: SimcorePlatformStatus): await send_service_stopped(app, simcore_platform_status) try: - progress.update( - message="running docker-compose-down", percent=ProgressPercent(0.1) - ) + await progress.update(message="running docker-compose-down", percent=0.1) await run_before_shutdown_actions( shared_store, settings.DY_SIDECAR_CALLBACKS_MAPPING.before_shutdown @@ -303,13 +298,11 @@ async def _send_resource_tracking_stop(platform_status: SimcorePlatformStatus): result = await _retry_docker_compose_down(shared_store.compose_spec, settings) _raise_for_errors(result, "down") - progress.update(message="stopping logs", percent=ProgressPercent(0.9)) + await progress.update(message="stopping logs", percent=0.9) for container_name in shared_store.container_names: await stop_log_fetching(app, container_name) - progress.update( - message="removing pending resources", percent=ProgressPercent(0.95) - ) + await progress.update(message="removing pending resources", percent=0.95) result = await docker_compose_rm(shared_store.compose_spec, settings) _raise_for_errors(result, "rm") except Exception: @@ -326,7 +319,7 @@ async def _send_resource_tracking_stop(platform_status: SimcorePlatformStatus): async with shared_store: shared_store.compose_spec = None shared_store.container_names = [] - progress.update(message="done", percent=ProgressPercent(0.99)) + await progress.update(message="done", percent=0.99) def _get_satate_folders_size(paths: list[Path]) -> int: @@ -373,11 +366,11 @@ async def _restore_state_folder( ) -async def task_restore_state( +async def restore_user_services_state_paths( progress: TaskProgress, + app: FastAPI, settings: ApplicationSettings, mounted_volumes: MountedVolumes, - app: FastAPI, ) -> int: # NOTE: the legacy data format was a zip file # this method will maintain retro compatibility. @@ -389,7 +382,7 @@ async def task_restore_state( # NOTE: this implies that the legacy format will always be decompressed # until it is not removed. - progress.update(message="Downloading state", percent=ProgressPercent(0.05)) + await progress.update(message="Downloading state", percent=0.05) state_paths = list(mounted_volumes.disk_state_paths_iter()) await post_sidecar_log_message( app, @@ -419,7 +412,7 @@ async def task_restore_state( await post_sidecar_log_message( app, "Finished state downloading", log_level=logging.INFO ) - progress.update(message="state restored", percent=ProgressPercent(0.99)) + await progress.update(message="state restored", percent=0.99) return _get_satate_folders_size(state_paths) @@ -445,21 +438,22 @@ async def _save_state_folder( progress_bar=progress_bar, aws_s3_cli_settings=settings.DY_SIDECAR_AWS_S3_CLI_SETTINGS, legacy_state=_get_legacy_state_with_dy_volumes_path(settings), + application_name=f"{APP_NAME}-{settings.DY_SIDECAR_NODE_ID}", ) -async def task_save_state( +async def save_user_services_state_paths( progress: TaskProgress, + app: FastAPI, settings: ApplicationSettings, mounted_volumes: MountedVolumes, - app: FastAPI, ) -> int: """ Saves the states of the service. If a legacy archive is detected, it will be removed after saving the new format. """ - progress.update(message="starting state save", percent=ProgressPercent(0.0)) + await progress.update(message="starting state save", percent=0.0) state_paths = list(mounted_volumes.disk_state_paths_iter()) async with ProgressBarData( num_steps=len(state_paths), @@ -485,30 +479,29 @@ async def task_save_state( ) await post_sidecar_log_message(app, "Finished state saving", log_level=logging.INFO) - progress.update(message="finished state saving", percent=ProgressPercent(0.99)) + await progress.update(message="finished state saving", percent=0.99) return _get_satate_folders_size(state_paths) -async def task_ports_inputs_pull( +async def pull_user_services_input_ports( progress: TaskProgress, - port_keys: list[str] | None, - mounted_volumes: MountedVolumes, app: FastAPI, settings: ApplicationSettings, - *, - inputs_pulling_enabled: bool, + mounted_volumes: MountedVolumes, + inputs_state: InputsState, + port_keys: list[str] | None, ) -> int: - if not inputs_pulling_enabled: + if not inputs_state.inputs_pulling_enabled: _logger.info("Received request to pull inputs but was ignored") return 0 - progress.update(message="starting inputs pulling", percent=ProgressPercent(0.0)) + await progress.update(message="starting inputs pulling", percent=0.0) port_keys = [] if port_keys is None else port_keys await post_sidecar_log_message( app, f"Pulling inputs for {port_keys}", log_level=logging.INFO ) - progress.update(message="pulling inputs", percent=ProgressPercent(0.1)) + await progress.update(message="pulling inputs", percent=0.1) async with ProgressBarData( num_steps=1, progress_report_cb=functools.partial( @@ -539,17 +532,17 @@ async def task_ports_inputs_pull( await post_sidecar_log_message( app, "Finished pulling inputs", log_level=logging.INFO ) - progress.update(message="finished inputs pulling", percent=ProgressPercent(0.99)) + await progress.update(message="finished inputs pulling", percent=0.99) return int(transferred_bytes) -async def task_ports_outputs_pull( +async def pull_user_services_output_ports( progress: TaskProgress, - port_keys: list[str] | None, - mounted_volumes: MountedVolumes, app: FastAPI, + mounted_volumes: MountedVolumes, + port_keys: list[str] | None, ) -> int: - progress.update(message="starting outputs pulling", percent=ProgressPercent(0.0)) + await progress.update(message="starting outputs pulling", percent=0.0) port_keys = [] if port_keys is None else port_keys await post_sidecar_log_message( app, f"Pulling output for {port_keys}", log_level=logging.INFO @@ -576,14 +569,14 @@ async def task_ports_outputs_pull( await post_sidecar_log_message( app, "Finished pulling outputs", log_level=logging.INFO ) - progress.update(message="finished outputs pulling", percent=ProgressPercent(0.99)) + await progress.update(message="finished outputs pulling", percent=0.99) return int(transferred_bytes) -async def task_ports_outputs_push( - progress: TaskProgress, outputs_manager: OutputsManager, app: FastAPI +async def push_user_services_output_ports( + progress: TaskProgress, app: FastAPI, outputs_manager: OutputsManager ) -> None: - progress.update(message="starting outputs pushing", percent=ProgressPercent(0.0)) + await progress.update(message="starting outputs pushing", percent=0.0) await post_sidecar_log_message( app, f"waiting for outputs {outputs_manager.outputs_context.file_type_port_keys} to be pushed", @@ -595,10 +588,10 @@ async def task_ports_outputs_push( await post_sidecar_log_message( app, "finished outputs pushing", log_level=logging.INFO ) - progress.update(message="finished outputs pushing", percent=ProgressPercent(0.99)) + await progress.update(message="finished outputs pushing", percent=0.99) -async def task_containers_restart( +async def restart_user_services( progress: TaskProgress, app: FastAPI, settings: ApplicationSettings, @@ -610,9 +603,7 @@ async def task_containers_restart( # or some other state, the service will get shutdown, to prevent this # blocking status while containers are being restarted. async with app.state.container_restart_lock: - progress.update( - message="starting containers restart", percent=ProgressPercent(0.0) - ) + await progress.update(message="starting containers restart", percent=0.0) if shared_store.compose_spec is None: msg = "No spec for docker compose command was found" raise RuntimeError(msg) @@ -620,20 +611,100 @@ async def task_containers_restart( for container_name in shared_store.container_names: await stop_log_fetching(app, container_name) - progress.update(message="stopped log fetching", percent=ProgressPercent(0.1)) + await progress.update(message="stopped log fetching", percent=0.1) result = await docker_compose_restart(shared_store.compose_spec, settings) _raise_for_errors(result, "restart") - progress.update(message="containers restarted", percent=ProgressPercent(0.8)) + await progress.update(message="containers restarted", percent=0.8) for container_name in shared_store.container_names: await start_log_fetching(app, container_name) - progress.update(message="started log fetching", percent=ProgressPercent(0.9)) + await progress.update(message="started log fetching", percent=0.9) await post_sidecar_log_message( app, "Service was restarted please reload the UI", log_level=logging.INFO ) await post_event_reload_iframe(app) - progress.update(message="started log fetching", percent=ProgressPercent(0.99)) + await progress.update(message="started log fetching", percent=0.99) + + +def setup_long_running_tasks(app: FastAPI) -> None: + app_settings: ApplicationSettings = app.state.settings + long_running_tasks.server.setup( + app, + redis_settings=app_settings.REDIS_SETTINGS, + rabbit_settings=app_settings.RABBIT_SETTINGS, + lrt_namespace=f"{APP_NAME}-{app_settings.DY_SIDECAR_RUN_ID}", + ) + + task_context: dict[TaskProtocol, dict[str, Any]] = {} + + async def on_startup() -> None: + shared_store: SharedStore = app.state.shared_store + settings: ApplicationSettings = app.state.settings + application_health: ApplicationHealth = app.state.application_health + mounted_volumes: MountedVolumes = app.state.mounted_volumes + outputs_manager: OutputsManager = app.state.outputs_manager + inputs_state: InputsState = app.state.inputs_state + + task_context.update( + { + pull_user_services_images: { + "shared_store": shared_store, + "app": app, + }, + create_user_services: { + "app": app, + "settings": settings, + "shared_store": shared_store, + "application_health": application_health, + }, + remove_user_services: { + "app": app, + "settings": settings, + "shared_store": shared_store, + "mounted_volumes": mounted_volumes, + }, + restore_user_services_state_paths: { + "app": app, + "settings": settings, + "mounted_volumes": mounted_volumes, + }, + save_user_services_state_paths: { + "app": app, + "settings": settings, + "mounted_volumes": mounted_volumes, + }, + pull_user_services_input_ports: { + "app": app, + "settings": settings, + "mounted_volumes": mounted_volumes, + "inputs_state": inputs_state, + }, + pull_user_services_output_ports: { + "app": app, + "mounted_volumes": mounted_volumes, + }, + push_user_services_output_ports: { + "app": app, + "outputs_manager": outputs_manager, + }, + restart_user_services: { + "app": app, + "settings": settings, + "shared_store": shared_store, + }, + } + ) + + for handler, context in task_context.items(): + TaskRegistry.register(handler, **context) + + async def _on_shutdown() -> None: + for handler in task_context: + TaskRegistry.unregister(handler) + + app.add_event_handler("startup", on_startup) + app.add_event_handler("shutdown", _on_shutdown) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks_utils.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks_utils.py index 21d9adaebbb5..3544bfd164e8 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks_utils.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks_utils.py @@ -3,17 +3,18 @@ from datetime import timedelta from typing import Final +from aiodocker import DockerError from models_library.callbacks_mapping import UserServiceCommand -from servicelib.logging_utils import log_context - -from ..core.errors import ( +from servicelib.container_utils import ( ContainerExecCommandFailedError, ContainerExecContainerNotFoundError, ContainerExecTimeoutError, + run_command_in_container, ) +from servicelib.logging_utils import log_context + from ..models.shared_store import SharedStore from ..modules.mounted_fs import MountedVolumes -from .container_utils import run_command_in_container _logger = logging.getLogger(__name__) @@ -39,6 +40,7 @@ async def run_before_shutdown_actions( ContainerExecContainerNotFoundError, ContainerExecCommandFailedError, ContainerExecTimeoutError, + DockerError, ): _logger.warning( "Could not run before_shutdown command %s in container %s", diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/mounted_fs.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/mounted_fs.py index 78ddbf411995..6e9c9e7a7561 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/mounted_fs.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/mounted_fs.py @@ -162,7 +162,7 @@ async def iter_state_paths_to_docker_volumes( def setup_mounted_fs(app: FastAPI) -> MountedVolumes: settings: ApplicationSettings = app.state.settings - app.state.mounted_volumes = MountedVolumes( + app.state.mounted_volumes = mounted_volumes = MountedVolumes( service_run_id=settings.DY_SIDECAR_RUN_ID, node_id=settings.DY_SIDECAR_NODE_ID, inputs_path=settings.DY_SIDECAR_PATH_INPUTS, @@ -174,4 +174,4 @@ def setup_mounted_fs(app: FastAPI) -> MountedVolumes: dy_volumes=settings.DYNAMIC_SIDECAR_DY_VOLUMES_MOUNT_DIR, ) - return app.state.mounted_volumes + return mounted_volumes diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py index 7e6f6c7b638d..a65d69eb4912 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py @@ -15,7 +15,6 @@ from aiofiles.os import remove from aiofiles.tempfile import TemporaryDirectory as AioTemporaryDirectory from common_library.json_serialization import json_loads -from models_library.projects import ProjectIDStr from models_library.projects_nodes_io import NodeIDStr from models_library.services_types import ServicePortKey from pydantic import ByteSize, TypeAdapter @@ -29,10 +28,11 @@ from simcore_sdk.node_ports_common.file_io_utils import LogRedirectCB from simcore_sdk.node_ports_v2 import Port from simcore_sdk.node_ports_v2.links import ItemConcreteValue -from simcore_sdk.node_ports_v2.nodeports_v2 import Nodeports, OutputsCallbacks +from simcore_sdk.node_ports_v2.nodeports_v2 import OutputsCallbacks from simcore_sdk.node_ports_v2.port import SetKWargs from simcore_sdk.node_ports_v2.port_utils import is_file_type +from .._meta import APP_NAME from ..core.settings import ApplicationSettings, get_settings from ..modules.notifications import PortNotifier @@ -98,15 +98,19 @@ async def upload_outputs( # pylint:disable=too-many-statements # noqa: PLR0915 start_time = time.perf_counter() settings: ApplicationSettings = get_settings() - PORTS: Nodeports = await node_ports_v2.ports( + db_manager = node_ports_v2.DBManager( + application_name=f"{APP_NAME}-{settings.DY_SIDECAR_NODE_ID}" + ) + ports = await node_ports_v2.ports( user_id=settings.DY_SIDECAR_USER_ID, - project_id=ProjectIDStr(settings.DY_SIDECAR_PROJECT_ID), + project_id=f"{settings.DY_SIDECAR_PROJECT_ID}", node_uuid=TypeAdapter(NodeIDStr).validate_python( f"{settings.DY_SIDECAR_NODE_ID}" ), r_clone_settings=None, io_log_redirect_cb=io_log_redirect_cb, aws_s3_cli_settings=None, + db_manager=db_manager, ) # let's gather the tasks @@ -116,7 +120,7 @@ async def upload_outputs( # pylint:disable=too-many-statements # noqa: PLR0915 archiving_tasks: deque[Coroutine[None, None, None]] = deque() ports_to_set: list[Port] = [ port_value - for port_value in (await PORTS.outputs).values() + for port_value in (await ports.outputs).values() if (not port_keys) or (port_value.key in port_keys) ] @@ -220,7 +224,7 @@ async def _archive_dir_notified( if archiving_tasks: await limited_gather(*archiving_tasks, limit=4) - await PORTS.set_multiple( + await ports.set_multiple( ports_values, progress_bar=sub_progress, outputs_callbacks=OutputCallbacksWrapper(port_notifier), @@ -276,7 +280,7 @@ async def _get_data_from_port( archive_files: set[Path] if _is_zip_file(downloaded_file): - prunable_folder = PrunableFolder(final_path.parent) + prunable_folder = PrunableFolder(final_path) with log_context( _logger, logging.DEBUG, @@ -327,21 +331,25 @@ async def download_target_ports( start_time = time.perf_counter() settings: ApplicationSettings = get_settings() - PORTS: Nodeports = await node_ports_v2.ports( + db_manager = node_ports_v2.DBManager( + application_name=f"{APP_NAME}-{settings.DY_SIDECAR_NODE_ID}" + ) + ports = await node_ports_v2.ports( user_id=settings.DY_SIDECAR_USER_ID, - project_id=ProjectIDStr(settings.DY_SIDECAR_PROJECT_ID), + project_id=f"{settings.DY_SIDECAR_PROJECT_ID}", node_uuid=TypeAdapter(NodeIDStr).validate_python( f"{settings.DY_SIDECAR_NODE_ID}" ), r_clone_settings=None, io_log_redirect_cb=io_log_redirect_cb, aws_s3_cli_settings=None, + db_manager=db_manager, ) # let's gather all the data ports_to_get: list[Port] = [ port_value - for port_value in (await getattr(PORTS, port_type_name.value)).values() + for port_value in (await getattr(ports, port_type_name.value)).values() if (not port_keys) or (port_value.key in port_keys) ] diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_manager.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_manager.py index f29f26358e29..a58216b34861 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_manager.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_manager.py @@ -6,12 +6,12 @@ from datetime import timedelta from functools import partial +from common_library.async_tools import cancel_wait_task from common_library.errors_classes import OsparcErrorMixin from fastapi import FastAPI from models_library.rabbitmq_messages import ProgressType from pydantic import PositiveFloat from servicelib import progress_bar -from servicelib.async_utils import cancel_wait_task from servicelib.background_task import create_periodic_task from servicelib.logging_utils import log_catch, log_context from simcore_sdk.node_ports_common.file_io_utils import LogRedirectCB @@ -118,6 +118,7 @@ def __init__( self._port_key_tracker = _PortKeyTracker() self._task_uploading: Task | None = None + self._task_uploading_followup: Task | None = None self._task_scheduler_worker: Task | None = None self._schedule_all_ports_for_upload: bool = False @@ -171,7 +172,9 @@ def _remove_downloads(future: Future) -> None: except Exception as e: # pylint: disable=broad-except self._last_upload_error_tracker[port_key] = e - create_task(self._port_key_tracker.remove_all_uploading()) + self._task_uploading_followup = create_task( + self._port_key_tracker.remove_all_uploading() + ) self._task_uploading.add_done_callback(_remove_downloads) @@ -179,6 +182,12 @@ async def _uploading_task_cancel(self) -> None: if self._task_uploading is not None: await _cancel_task(self._task_uploading, self.task_cancellation_timeout_s) await self._port_key_tracker.move_all_uploading_to_pending() + self._task_uploading = None + if self._task_uploading_followup is not None: + await _cancel_task( + self._task_uploading_followup, self.task_cancellation_timeout_s + ) + self._task_uploading_followup = None async def _scheduler_worker(self) -> None: if await self._port_key_tracker.are_pending_ports_uploading(): diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/prometheus_metrics.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/prometheus_metrics.py index eb7ad93ed9e1..e7d7b24e4277 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/prometheus_metrics.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/prometheus_metrics.py @@ -6,18 +6,18 @@ from typing import Final import arrow +from common_library.async_tools import cancel_wait_task from fastapi import FastAPI, status from models_library.callbacks_mapping import CallbacksMapping, UserServiceCommand from pydantic import BaseModel, NonNegativeFloat, NonNegativeInt -from servicelib.async_utils import cancel_wait_task -from servicelib.logging_utils import log_context -from servicelib.sequences_utils import pairwise -from simcore_service_dynamic_sidecar.core.errors import ( +from servicelib.container_utils import ( ContainerExecContainerNotFoundError, + run_command_in_container, ) +from servicelib.logging_utils import log_context +from servicelib.sequences_utils import pairwise from ..models.shared_store import SharedStore -from .container_utils import run_command_in_container _logger = logging.getLogger(__name__) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/resource_tracking/_core.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/resource_tracking/_core.py index eecbfd2089e7..d61402e6d285 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/resource_tracking/_core.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/resource_tracking/_core.py @@ -2,6 +2,7 @@ import logging from typing import Final +from common_library.async_tools import cancel_wait_task from fastapi import FastAPI from models_library.generated_models.docker_rest_api import ContainerState from models_library.rabbitmq_messages import ( @@ -14,7 +15,6 @@ from models_library.services import ServiceType from models_library.services_creation import CreateServiceMetricsAdditionalParams from pydantic import NonNegativeFloat -from servicelib.async_utils import cancel_wait_task from servicelib.background_task import create_periodic_task from servicelib.logging_utils import log_context diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_disk_usage.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_disk_usage.py index d2148842ef5c..fc6941d1a542 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_disk_usage.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/system_monitor/_disk_usage.py @@ -7,6 +7,7 @@ from typing import Final import psutil +from common_library.async_tools import cancel_wait_task from fastapi import FastAPI from models_library.api_schemas_dynamic_sidecar.telemetry import ( DiskUsage, @@ -14,7 +15,6 @@ ) from models_library.projects_nodes_io import NodeID from models_library.users import UserID -from servicelib.async_utils import cancel_wait_task from servicelib.background_task import create_periodic_task from servicelib.logging_utils import log_context from servicelib.utils import logged_gather diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_db.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_db.py index 3942e23b1845..e2d566f470b8 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_db.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_db.py @@ -32,6 +32,7 @@ async def save_preferences( user_preferences_path: Path, user_id: UserID, product_name: ProductName, + application_name: str, ): preference_class = get_model_class(service_key) @@ -40,7 +41,10 @@ async def save_preferences( service_key=service_key, service_version=service_version, value=dir_content ) - async with DBContextManager() as engine, engine.begin() as conn: + async with ( + DBContextManager(application_name=application_name) as engine, + engine.begin() as conn, + ): await UserServicesUserPreferencesRepo.save( conn, user_id=user_id, @@ -58,10 +62,14 @@ async def load_preferences( user_preferences_path: Path, user_id: UserID, product_name: ProductName, + application_name: str, ) -> None: preference_class = get_model_class(service_key) - async with DBContextManager() as engine, engine.connect() as conn: + async with ( + DBContextManager(application_name=application_name) as engine, + engine.connect() as conn, + ): payload = await UserServicesUserPreferencesRepo.load( conn, user_id=user_id, diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_manager.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_manager.py index 3c9ede49dbce..2f4b5a0641ee 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_manager.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_manager.py @@ -19,6 +19,7 @@ class UserServicesPreferencesManager: service_version: ServiceVersion user_id: UserID product_name: ProductName + application_name: str _preferences_already_saved: bool = False async def load_preferences(self) -> None: @@ -28,6 +29,7 @@ async def load_preferences(self) -> None: service_version=self.service_version, user_id=self.user_id, product_name=self.product_name, + application_name=self.application_name, ) async def save_preferences(self) -> None: @@ -41,6 +43,7 @@ async def save_preferences(self) -> None: service_version=self.service_version, user_id=self.user_id, product_name=self.product_name, + application_name=self.application_name, ) self._preferences_already_saved = True diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_setup.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_setup.py index 83915fc151ad..0457a58e1e7a 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_setup.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/user_services_preferences/_setup.py @@ -3,6 +3,7 @@ from fastapi import FastAPI from servicelib.logging_utils import log_context +from ..._meta import APP_NAME from ...core.settings import ApplicationSettings from ._manager import UserServicesPreferencesManager from ._utils import is_feature_enabled @@ -33,6 +34,7 @@ async def on_startup() -> None: service_version=settings.DY_SIDECAR_SERVICE_VERSION, user_id=settings.DY_SIDECAR_USER_ID, product_name=settings.DY_SIDECAR_PRODUCT_NAME, + application_name=f"{APP_NAME}-{settings.DY_SIDECAR_NODE_ID}", ) ) else: diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/services/container_extensions.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/services/container_extensions.py new file mode 100644 index 000000000000..c563bc47ccdd --- /dev/null +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/services/container_extensions.py @@ -0,0 +1,110 @@ +import logging + +from aiodocker.networks import DockerNetwork +from fastapi import FastAPI +from models_library.services import ServiceOutput +from simcore_sdk.node_ports_v2.port_utils import is_file_type + +from ..core.docker_utils import docker_client +from ..modules.inputs import disable_inputs_pulling, enable_inputs_pulling +from ..modules.mounted_fs import MountedVolumes +from ..modules.outputs import ( + OutputsContext, + disable_event_propagation, + enable_event_propagation, +) + +_logger = logging.getLogger(__name__) + + +async def toggle_ports_io( + app: FastAPI, *, enable_outputs: bool, enable_inputs: bool +) -> None: + if enable_outputs: + await enable_event_propagation(app) + else: + await disable_event_propagation(app) + + if enable_inputs: + enable_inputs_pulling(app) + else: + disable_inputs_pulling(app) + + +async def create_output_dirs( + app: FastAPI, *, outputs_labels: dict[str, ServiceOutput] +) -> None: + mounted_volumes: MountedVolumes = app.state.mounted_volumes + outputs_context: OutputsContext = app.state.outputs_context + + outputs_path = mounted_volumes.disk_outputs_path + file_type_port_keys = [] + non_file_port_keys = [] + for port_key, service_output in outputs_labels.items(): + _logger.debug("Parsing output labels, detected: %s", f"{port_key=}") + if is_file_type(service_output.property_type): + dir_to_create = outputs_path / port_key + dir_to_create.mkdir(parents=True, exist_ok=True) + file_type_port_keys.append(port_key) + else: + non_file_port_keys.append(port_key) + + _logger.debug( + "Setting: %s, %s", f"{file_type_port_keys=}", f"{non_file_port_keys=}" + ) + await outputs_context.set_file_type_port_keys(file_type_port_keys) + outputs_context.non_file_type_port_keys = non_file_port_keys + + +async def attach_container_to_network( + *, container_id: str, network_id: str, network_aliases: list[str] +) -> None: + async with docker_client() as docker: + container_instance = await docker.containers.get(container_id) + container_inspect = await container_instance.show() + + attached_network_ids: set[str] = { + x["NetworkID"] + for x in container_inspect["NetworkSettings"]["Networks"].values() + } + + if network_id in attached_network_ids: + _logger.debug( + "Container %s already attached to network %s", + container_id, + network_id, + ) + return + + # NOTE: A docker network is only visible on a docker node when it is + # used by a container + network = DockerNetwork(docker=docker, id_=network_id) + await network.connect( + { + "Container": container_id, + "EndpointConfig": {"Aliases": network_aliases}, + } + ) + + +async def detach_container_from_network(*, container_id: str, network_id: str) -> None: + async with docker_client() as docker: + container_instance = await docker.containers.get(container_id) + container_inspect = await container_instance.show() + + attached_network_ids: set[str] = set( + container_inspect["NetworkSettings"]["Networks"].keys() + ) + + if network_id not in attached_network_ids: + _logger.debug( + "Container %s already detached from network %s", + container_id, + network_id, + ) + return + + # NOTE: A docker network is only visible on a docker node when it is + # used by a container + network = DockerNetwork(docker=docker, id_=network_id) + await network.disconnect({"Container": container_id, "Force": True}) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/services/containers.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/services/containers.py new file mode 100644 index 000000000000..7afb4316944b --- /dev/null +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/services/containers.py @@ -0,0 +1,232 @@ +import logging +from asyncio import Lock +from typing import Any, Final + +from aiodocker import DockerError +from common_library.errors_classes import OsparcErrorMixin +from common_library.json_serialization import json_loads +from fastapi import FastAPI +from models_library.api_schemas_directorv2.dynamic_services import ContainersComposeSpec +from models_library.api_schemas_dynamic_sidecar.containers import ( + ActivityInfo, + ActivityInfoOrNone, +) +from pydantic import TypeAdapter, ValidationError +from servicelib.container_utils import ( + ContainerExecCommandFailedError, + ContainerExecContainerNotFoundError, + ContainerExecTimeoutError, + run_command_in_container, +) + +from ..core.docker_utils import docker_client +from ..core.settings import ApplicationSettings +from ..core.validation import ( + ComposeSpecValidation, + get_and_validate_compose_spec, + parse_compose_spec, +) +from ..models.shared_store import SharedStore +from ..modules.mounted_fs import MountedVolumes + +_INACTIVE_FOR_LONG_TIME: Final[int] = 2**63 - 1 + +_logger = logging.getLogger(__name__) + + +async def create_compose_spec( + app: FastAPI, + *, + containers_compose_spec: ContainersComposeSpec, +) -> None: + settings: ApplicationSettings = app.state.settings + shared_store: SharedStore = app.state.shared_store + mounted_volumes: MountedVolumes = app.state.mounted_volumes + + async with shared_store: + compose_spec_validation: ComposeSpecValidation = ( + await get_and_validate_compose_spec( + settings=settings, + compose_file_content=containers_compose_spec.docker_compose_yaml, + mounted_volumes=mounted_volumes, + ) + ) + shared_store.compose_spec = compose_spec_validation.compose_spec + shared_store.container_names = compose_spec_validation.current_container_names + shared_store.original_to_container_names = ( + compose_spec_validation.original_to_current_container_names + ) + + _logger.info("Validated compose-spec:\n%s", f"{shared_store.compose_spec}") + + assert shared_store.compose_spec + + +def _format_result( + container_inspect: dict[str, Any], *, only_status: bool +) -> dict[str, Any]: + if only_status: + container_state = container_inspect.get("State", {}) + + # pending is another fake state use to share more information with the frontend + return { + "Status": container_state.get("Status", "pending"), + "Error": container_state.get("Error", ""), + } + + return container_inspect + + +async def containers_docker_inspect( + app: FastAPI, *, only_status: bool +) -> dict[str, Any]: + container_restart_lock: Lock = app.state.container_restart_lock + shared_store: SharedStore = app.state.shared_store + + async with container_restart_lock, docker_client() as docker: + container_names = shared_store.container_names + + results = {} + for container in container_names: + container_instance = await docker.containers.get(container) + container_inspect = await container_instance.show() + results[container] = _format_result( + container_inspect, only_status=only_status + ) + + return results + + +async def get_containers_activity(app: FastAPI) -> ActivityInfoOrNone: + settings: ApplicationSettings = app.state.settings + shared_store: SharedStore = app.state.shared_store + + inactivity_command = settings.DY_SIDECAR_CALLBACKS_MAPPING.inactivity + if inactivity_command is None: + return None + + container_name = inactivity_command.service + + try: + inactivity_response = await run_command_in_container( + shared_store.original_to_container_names[inactivity_command.service], + command=inactivity_command.command, + timeout=inactivity_command.timeout, + ) + except ( + ContainerExecContainerNotFoundError, + ContainerExecCommandFailedError, + ContainerExecTimeoutError, + DockerError, + ): + _logger.warning( + "Could not run inactivity command '%s' in container '%s'", + inactivity_command.command, + container_name, + exc_info=True, + ) + return ActivityInfo(seconds_inactive=_INACTIVE_FOR_LONG_TIME) + + try: + return TypeAdapter(ActivityInfo).validate_json(inactivity_response) + except ValidationError: + _logger.warning( + "Could not parse command result '%s' as '%s'", + inactivity_response, + ActivityInfo.__name__, + exc_info=True, + ) + + return ActivityInfo(seconds_inactive=_INACTIVE_FOR_LONG_TIME) + + +class BaseGetNameError(OsparcErrorMixin, RuntimeError): + pass + + +class InvalidFilterFormatError(BaseGetNameError): + msg_template: str = "Provided filters, could not parsed {filters}" + + +class MissingDockerComposeDownSpecError(BaseGetNameError): + msg_template: str = "No spec for docker compose down was found" + + +class ContainerNotFoundError(BaseGetNameError): + msg_template: str = ( + "No container found for network={network_name} and exclude={exclude}" + ) + + +async def get_containers_name(app: FastAPI, *, filters: str) -> str | dict[str, Any]: + """ + Searches for the container's name given the network + on which the proxy communicates with it. + Supported filters: + network: matches against the exact network name + assigned to the container; `will include` + containers + exclude: matches if contained in the name of the + container; `will exclude` containers + """ + shared_store: SharedStore = app.state.shared_store + + filters_dict: dict[str, str] = json_loads(filters) + if not isinstance(filters_dict, dict): + raise InvalidFilterFormatError(filters=filters_dict) + network_name: str | None = filters_dict.get("network") + exclude: str | None = filters_dict.get("exclude") + + stored_compose_content = shared_store.compose_spec + if stored_compose_content is None: + raise MissingDockerComposeDownSpecError + + compose_spec = parse_compose_spec(stored_compose_content) + + container_name = None + + spec_services = compose_spec["services"] + for service in spec_services: + service_content = spec_services[service] + if network_name in service_content.get("networks", {}): + if exclude is not None and exclude in service_content["container_name"]: + # removing this container from results + continue + container_name = service_content["container_name"] + break + + if container_name is None: + raise ContainerNotFoundError(network_name=network_name, exclude=exclude) + + return f"{container_name}" + + +class ContainerIsMissingError(OsparcErrorMixin, RuntimeError): + msg_template: str = ( + "No container='{container_id}' was found in started_containers='{container_names}'" + ) + + +async def inspect_container( + app: FastAPI, + *, + container_id: str, +) -> dict[str, Any]: + """Returns information about the container, like docker inspect command""" + shared_store: SharedStore = app.state.shared_store + + container_names = shared_store.container_names + if container_id not in container_names: + _logger.warning( + "No container='%s' was found in started_containers='%s'", + container_id, + container_names, + ) + raise ContainerIsMissingError( + container_id=container_id, container_names=container_names + ) + + async with docker_client() as docker: + container_instance = await docker.containers.get(container_id) + inspect_result: dict[str, Any] = await container_instance.show() + return inspect_result diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/services/containers_long_running_tasks.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/services/containers_long_running_tasks.py new file mode 100644 index 000000000000..6c38f1fc4d99 --- /dev/null +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/services/containers_long_running_tasks.py @@ -0,0 +1,148 @@ +from typing import cast + +from models_library.api_schemas_directorv2.dynamic_services import ContainersCreate +from servicelib.long_running_tasks import lrt_api +from servicelib.long_running_tasks.errors import TaskAlreadyRunningError +from servicelib.long_running_tasks.models import LRTNamespace, TaskId +from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient + +from ..modules import long_running_tasks + + +def _get_task_id_from_error(e: TaskAlreadyRunningError) -> str: + return cast(str, e.managed_task.task_id) # type: ignore[attr-defined] # pylint:disable=no-member + + +async def pull_user_services_images( + rpc_client: RabbitMQRPCClient, lrt_namespace: LRTNamespace +) -> TaskId: + try: + return await lrt_api.start_task( + rpc_client, + lrt_namespace, + long_running_tasks.pull_user_services_images.__name__, + unique=True, + ) + except TaskAlreadyRunningError as e: + return _get_task_id_from_error(e) + + +async def create_user_services( + rpc_client: RabbitMQRPCClient, + lrt_namespace: LRTNamespace, + containers_create: ContainersCreate, +) -> TaskId: + try: + return await lrt_api.start_task( + rpc_client, + lrt_namespace, + long_running_tasks.create_user_services.__name__, + unique=True, + containers_create=containers_create, + ) + except TaskAlreadyRunningError as e: + return _get_task_id_from_error(e) + + +async def remove_user_services( + rpc_client: RabbitMQRPCClient, lrt_namespace: LRTNamespace +) -> TaskId: + try: + return await lrt_api.start_task( + rpc_client, + lrt_namespace, + long_running_tasks.remove_user_services.__name__, + unique=True, + ) + except TaskAlreadyRunningError as e: + return _get_task_id_from_error(e) + + +async def restore_user_services_state_paths( + rpc_client: RabbitMQRPCClient, lrt_namespace: LRTNamespace +) -> TaskId: + try: + return await lrt_api.start_task( + rpc_client, + lrt_namespace, + long_running_tasks.restore_user_services_state_paths.__name__, + unique=True, + ) + except TaskAlreadyRunningError as e: + return _get_task_id_from_error(e) + + +async def save_user_services_state_paths( + rpc_client: RabbitMQRPCClient, lrt_namespace: LRTNamespace +) -> TaskId: + try: + return await lrt_api.start_task( + rpc_client, + lrt_namespace, + long_running_tasks.save_user_services_state_paths.__name__, + unique=True, + ) + except TaskAlreadyRunningError as e: + return _get_task_id_from_error(e) + + +async def pull_user_services_input_ports( + rpc_client: RabbitMQRPCClient, + lrt_namespace: LRTNamespace, + port_keys: list[str] | None = None, +) -> TaskId: + try: + return await lrt_api.start_task( + rpc_client, + lrt_namespace, + long_running_tasks.pull_user_services_input_ports.__name__, + unique=True, + port_keys=port_keys, + ) + except TaskAlreadyRunningError as e: + return _get_task_id_from_error(e) + + +async def pull_user_services_output_ports( + rpc_client: RabbitMQRPCClient, + lrt_namespace: LRTNamespace, + port_keys: list[str] | None, +) -> TaskId: + try: + return await lrt_api.start_task( + rpc_client, + lrt_namespace, + long_running_tasks.pull_user_services_output_ports.__name__, + unique=True, + port_keys=port_keys, + ) + except TaskAlreadyRunningError as e: + return _get_task_id_from_error(e) + + +async def push_user_services_output_ports( + rpc_client: RabbitMQRPCClient, lrt_namespace: LRTNamespace +) -> TaskId: + try: + return await lrt_api.start_task( + rpc_client, + lrt_namespace, + long_running_tasks.push_user_services_output_ports.__name__, + unique=True, + ) + except TaskAlreadyRunningError as e: + return _get_task_id_from_error(e) + + +async def restart_user_services( + rpc_client: RabbitMQRPCClient, lrt_namespace: LRTNamespace +) -> TaskId: + try: + return await lrt_api.start_task( + rpc_client, + lrt_namespace, + long_running_tasks.restart_user_services.__name__, + unique=True, + ) + except TaskAlreadyRunningError as e: + return _get_task_id_from_error(e) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/services/disk.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/services/disk.py index 316a86d6036e..263ba8ee8f2e 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/services/disk.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/services/disk.py @@ -1,5 +1,5 @@ -from ..core.reserved_space import remove_reserved_disk_space +from ..core.reserved_space import free_reserved_disk_space -__all__: tuple[str, ...] = ("remove_reserved_disk_space",) +__all__: tuple[str, ...] = ("free_reserved_disk_space",) # nopycln: file diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/services/volumes.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/services/volumes.py index 366276bbaed4..a4a3b14a0c37 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/services/volumes.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/services/volumes.py @@ -4,7 +4,7 @@ from ..models.shared_store import get_shared_store -async def save_volume_state( +async def update_volume_status( app: FastAPI, *, status: VolumeStatus, category: VolumeCategory ) -> None: shared_store = get_shared_store(app) diff --git a/services/dynamic-sidecar/tests/conftest.py b/services/dynamic-sidecar/tests/conftest.py index b0cf6b67413e..04312f859ecb 100644 --- a/services/dynamic-sidecar/tests/conftest.py +++ b/services/dynamic-sidecar/tests/conftest.py @@ -28,17 +28,21 @@ setenvs_from_dict, setenvs_from_envfile, ) +from settings_library.redis import RedisSettings from simcore_service_dynamic_sidecar.core.reserved_space import ( - remove_reserved_disk_space, + free_reserved_disk_space, ) logger = logging.getLogger(__name__) pytest_plugins = [ + "pytest_simcore.asyncio_event_loops", "pytest_simcore.docker_compose", "pytest_simcore.docker_registry", "pytest_simcore.docker_swarm", "pytest_simcore.faker_users_data", + "pytest_simcore.logging", + "pytest_simcore.long_running_tasks", "pytest_simcore.minio_service", "pytest_simcore.postgres_service", "pytest_simcore.pytest_global_environs", @@ -166,6 +170,8 @@ def mock_rabbit_check(mocker: MockerFixture) -> None: @pytest.fixture def base_mock_envs( + fast_long_running_tasks_cancellation: None, + use_in_memory_redis: RedisSettings, dy_volumes: Path, shared_store_dir: Path, compose_namespace: str, @@ -207,6 +213,8 @@ def base_mock_envs( @pytest.fixture def mock_environment( + fast_long_running_tasks_cancellation: None, + use_in_memory_redis: RedisSettings, mock_storage_check: None, mock_postgres_check: None, mock_rabbit_check: None, @@ -352,14 +360,12 @@ def mock_stop_heart_beat_task(mocker: MockerFixture) -> AsyncMock: @pytest.fixture def mock_metrics_params(faker: Faker) -> CreateServiceMetricsAdditionalParams: return TypeAdapter(CreateServiceMetricsAdditionalParams).validate_python( - CreateServiceMetricsAdditionalParams.model_config["json_schema_extra"][ - "example" - ], + CreateServiceMetricsAdditionalParams.model_json_schema()["example"] ) @pytest.fixture def cleanup_reserved_disk_space() -> Iterator[None]: - remove_reserved_disk_space() + free_reserved_disk_space() yield - remove_reserved_disk_space() + free_reserved_disk_space() diff --git a/services/dynamic-sidecar/tests/integration/conftest.py b/services/dynamic-sidecar/tests/integration/conftest.py index 8c7e5e795331..5972315d910d 100644 --- a/services/dynamic-sidecar/tests/integration/conftest.py +++ b/services/dynamic-sidecar/tests/integration/conftest.py @@ -4,9 +4,12 @@ import sqlalchemy as sa from models_library.users import UserID from pytest_simcore.helpers.faker_factories import random_user +from pytest_simcore.helpers.postgres_tools import sync_insert_and_get_row_lifespan from simcore_postgres_database.models.users import users pytest_plugins = [ + "pytest_simcore.asyncio_event_loops", + "pytest_simcore.logging", "pytest_simcore.postgres_service", "pytest_simcore.simcore_storage_service", "pytest_simcore.rabbit_service", @@ -22,15 +25,13 @@ def user_id(postgres_db: sa.engine.Engine) -> Iterable[UserID]: # which would turn this test too complex. # pylint: disable=no-value-for-parameter - stmt = users.insert().values(**random_user(name="test")).returning(users.c.id) - print(f"{stmt}") - with postgres_db.connect() as conn: - result = conn.execute(stmt) - row = result.first() - assert row - usr_id = row[users.c.id] - - yield usr_id - - with postgres_db.connect() as conn: - conn.execute(users.delete().where(users.c.id == usr_id)) + with sync_insert_and_get_row_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup + postgres_db, + table=users, + values=random_user( + name="test", + ), + pk_col=users.c.id, + ) as user_row: + + yield user_row["id"] diff --git a/services/dynamic-sidecar/tests/integration/test_modules_long_running_tasks.py b/services/dynamic-sidecar/tests/integration/test_modules_long_running_tasks.py index b7d45d90654e..ede7b955a240 100644 --- a/services/dynamic-sidecar/tests/integration/test_modules_long_running_tasks.py +++ b/services/dynamic-sidecar/tests/integration/test_modules_long_running_tasks.py @@ -29,8 +29,10 @@ from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from pytest_simcore.helpers.postgres_tools import PostgresTestConfig from pytest_simcore.helpers.storage import replace_storage_endpoint -from servicelib.fastapi.long_running_tasks.server import TaskProgress +from servicelib.long_running_tasks.models import TaskProgress from servicelib.utils import logged_gather +from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings from settings_library.s3 import S3Settings from simcore_postgres_database.models.projects import projects from simcore_sdk.node_ports_common.constants import SIMCORE_LOCATION @@ -38,8 +40,8 @@ from simcore_service_dynamic_sidecar.core.application import AppState, create_app from simcore_service_dynamic_sidecar.core.utils import HIDDEN_FILE_NAME from simcore_service_dynamic_sidecar.modules.long_running_tasks import ( - task_restore_state, - task_save_state, + restore_user_services_state_paths, + save_user_services_state_paths, ) from types_aiobotocore_s3 import S3Client from yarl import URL @@ -89,7 +91,8 @@ def project_id(user_id: int, postgres_db: sa.engine.Engine) -> Iterable[ProjectI def mock_environment( mock_storage_check: None, mock_rabbit_check: None, - rabbit_service, + redis_service: RedisSettings, + rabbit_service: RabbitSettings, postgres_host_config: PostgresTestConfig, storage_endpoint: URL, minio_s3_settings_envs: EnvVarsDict, @@ -372,7 +375,7 @@ async def test_legacy_state_open_and_clone( # restore state from legacy archives for _ in range(repeat_count): - await task_restore_state( + await restore_user_services_state_paths( progress=task_progress, settings=app_state.settings, mounted_volumes=app_state.mounted_volumes, @@ -393,7 +396,7 @@ async def test_legacy_state_open_and_clone( ) for _ in range(repeat_count): - await task_save_state( + await save_user_services_state_paths( progress=task_progress, settings=app_state.settings, mounted_volumes=app_state.mounted_volumes, @@ -433,7 +436,7 @@ async def test_state_open_and_close( # restoring finds nothing inside for _ in range(repeat_count): - await task_restore_state( + await restore_user_services_state_paths( progress=task_progress, settings=app_state.settings, mounted_volumes=app_state.mounted_volumes, @@ -459,7 +462,7 @@ async def test_state_open_and_close( # save them to S3 for _ in range(repeat_count): - await task_save_state( + await save_user_services_state_paths( progress=task_progress, settings=app_state.settings, mounted_volumes=app_state.mounted_volumes, @@ -482,7 +485,7 @@ async def test_state_open_and_close( # restore them from S3 for _ in range(repeat_count): - await task_restore_state( + await restore_user_services_state_paths( progress=task_progress, settings=app_state.settings, mounted_volumes=app_state.mounted_volumes, diff --git a/services/dynamic-sidecar/tests/integration/test_modules_user_services_preferences.py b/services/dynamic-sidecar/tests/integration/test_modules_user_services_preferences.py index 9be0bbdebbf2..73eb9781beab 100644 --- a/services/dynamic-sidecar/tests/integration/test_modules_user_services_preferences.py +++ b/services/dynamic-sidecar/tests/integration/test_modules_user_services_preferences.py @@ -17,6 +17,7 @@ from pydantic import TypeAdapter from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from pytest_simcore.helpers.postgres_tools import PostgresTestConfig +from settings_library.redis import RedisSettings from simcore_service_dynamic_sidecar.core.application import create_app from simcore_service_dynamic_sidecar.modules.user_services_preferences import ( load_user_services_preferences, @@ -32,6 +33,7 @@ pytest_simcore_core_services_selection = [ "migration", "postgres", + "redis", ] pytest_simcore_ops_services_selection = [ @@ -62,9 +64,10 @@ def product_name() -> ProductName: @pytest.fixture -def mock_environment( # pylint:disable=too-many-arguments +def mock_environment( # pylint:disable=too-many-arguments,too-many-positional-arguments mock_rabbit_check: None, mock_storage_check: None, + redis_service: RedisSettings, postgres_host_config: PostgresTestConfig, monkeypatch: pytest.MonkeyPatch, base_mock_envs: EnvVarsDict, diff --git a/services/dynamic-sidecar/tests/unit/test_api_rest_containers.py b/services/dynamic-sidecar/tests/unit/api/rest/test_containers.py similarity index 94% rename from services/dynamic-sidecar/tests/unit/test_api_rest_containers.py rename to services/dynamic-sidecar/tests/unit/api/rest/test_containers.py index 0731009a380d..4aabcd02548b 100644 --- a/services/dynamic-sidecar/tests/unit/test_api_rest_containers.py +++ b/services/dynamic-sidecar/tests/unit/api/rest/test_containers.py @@ -19,6 +19,7 @@ from aiodocker.volumes import DockerVolume from aiofiles.os import mkdir from async_asgi_testclient import TestClient +from common_library.serialization import model_dump_with_secrets from faker import Faker from fastapi import FastAPI, status from models_library.api_schemas_dynamic_sidecar.containers import ActivityInfo @@ -28,9 +29,9 @@ from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from servicelib.docker_constants import SUFFIX_EGRESS_PROXY_NAME -from servicelib.fastapi.long_running_tasks.client import TaskId +from servicelib.long_running_tasks.models import TaskId +from settings_library.rabbit import RabbitSettings from simcore_service_dynamic_sidecar._meta import API_VTAG -from simcore_service_dynamic_sidecar.api.rest.containers import _INACTIVE_FOR_LONG_TIME from simcore_service_dynamic_sidecar.core.application import AppState from simcore_service_dynamic_sidecar.core.docker_compose_utils import ( docker_compose_create, @@ -42,13 +43,18 @@ from simcore_service_dynamic_sidecar.modules.outputs._context import OutputsContext from simcore_service_dynamic_sidecar.modules.outputs._manager import OutputsManager from simcore_service_dynamic_sidecar.modules.outputs._watcher import OutputsWatcher +from simcore_service_dynamic_sidecar.services.containers import _INACTIVE_FOR_LONG_TIME from tenacity.asyncio import AsyncRetrying from tenacity.retry import retry_if_exception_type from tenacity.stop import stop_after_delay from tenacity.wait import wait_fixed -WAIT_FOR_OUTPUTS_WATCHER: Final[float] = 0.1 -FAST_POLLING_INTERVAL: Final[float] = 0.1 +pytest_simcore_core_services_selection = [ + "rabbit", +] + +_WAIT_FOR_OUTPUTS_WATCHER: Final[float] = 0.1 +_FAST_POLLING_INTERVAL: Final[float] = 0.1 # UTILS @@ -109,8 +115,8 @@ async def _start_containers( task_id: TaskId = response.json() async for attempt in AsyncRetrying( - wait=wait_fixed(FAST_POLLING_INTERVAL), - stop=stop_after_delay(100 * FAST_POLLING_INTERVAL), + wait=wait_fixed(_FAST_POLLING_INTERVAL), + stop=stop_after_delay(100 * _FAST_POLLING_INTERVAL), reraise=True, ): with attempt: @@ -162,9 +168,19 @@ async def _assert_compose_spec_pulled(compose_spec: str, settings: ApplicationSe @pytest.fixture def mock_environment( - mock_environment: EnvVarsDict, mock_rabbitmq_envs: EnvVarsDict + monkeypatch: pytest.MonkeyPatch, + rabbit_service: RabbitSettings, + mock_environment: EnvVarsDict, ) -> EnvVarsDict: - return mock_rabbitmq_envs + return setenvs_from_dict( + monkeypatch, + { + **mock_environment, + "RABBIT_SETTINGS": json.dumps( + model_dump_with_secrets(rabbit_service, show_secrets=True) + ), + }, + ) @pytest.fixture @@ -267,10 +283,10 @@ def not_started_containers() -> list[str]: def mock_outputs_labels() -> dict[str, ServiceOutput]: return { "output_port_1": TypeAdapter(ServiceOutput).validate_python( - ServiceOutput.model_config["json_schema_extra"]["examples"][3] + ServiceOutput.model_json_schema()["examples"][3] ), "output_port_2": TypeAdapter(ServiceOutput).validate_python( - ServiceOutput.model_config["json_schema_extra"]["examples"][3] + ServiceOutput.model_json_schema()["examples"][3] ), } @@ -428,7 +444,7 @@ async def test_container_missing_container( ): def _expected_error_string(container: str) -> dict[str, str]: return { - "detail": f"No container '{container}' was started. Started containers '[]'" + "detail": f"No container='{container}' was found in started_containers='[]'" } for container in not_started_containers: @@ -465,7 +481,7 @@ async def test_outputs_watcher_disabling( assert isinstance(test_client.application, FastAPI) outputs_context: OutputsContext = test_client.application.state.outputs_context outputs_manager: OutputsManager = test_client.application.state.outputs_manager - outputs_manager.task_monitor_interval_s = WAIT_FOR_OUTPUTS_WATCHER / 10 + outputs_manager.task_monitor_interval_s = _WAIT_FOR_OUTPUTS_WATCHER / 10 async def _create_port_key_events(is_propagation_enabled: bool) -> None: random_subdir = f"{uuid4()}" @@ -532,7 +548,7 @@ async def test_container_create_outputs_dirs( # by default outputs-watcher it is disabled await _assert_enable_output_ports(test_client) - await asyncio.sleep(WAIT_FOR_OUTPUTS_WATCHER) + await asyncio.sleep(_WAIT_FOR_OUTPUTS_WATCHER) assert mock_event_filter_enqueue.call_count == 0 @@ -549,7 +565,7 @@ async def test_container_create_outputs_dirs( for dir_name in mock_outputs_labels: assert (mounted_volumes.disk_outputs_path / dir_name).is_dir() - await asyncio.sleep(WAIT_FOR_OUTPUTS_WATCHER) + await asyncio.sleep(_WAIT_FOR_OUTPUTS_WATCHER) EXPECT_EVENTS_WHEN_CREATING_OUTPUT_PORT_KEY_DIRS = 0 assert ( mock_event_filter_enqueue.call_count @@ -616,7 +632,7 @@ async def test_containers_entrypoint_name_containers_not_started( if include_exclude_filter_option: assert response.status_code == status.HTTP_404_NOT_FOUND, response.text assert response.json() == { - "detail": "No container found for network=entrypoint_container_network" + "detail": "No container found for network=entrypoint_container_network and exclude=egress" } else: assert response.status_code == status.HTTP_200_OK, response.text @@ -736,7 +752,7 @@ def mock_inactive_since_command_response( activity_response: ActivityInfo, ) -> None: mocker.patch( - "simcore_service_dynamic_sidecar.api.rest.containers.run_command_in_container", + "simcore_service_dynamic_sidecar.services.containers.run_command_in_container", return_value=activity_response.model_dump_json(), ) @@ -756,7 +772,7 @@ async def test_containers_activity_inactive_since( @pytest.fixture def mock_inactive_response_wrong_format(mocker: MockerFixture) -> None: mocker.patch( - "simcore_service_dynamic_sidecar.api.rest.containers.run_command_in_container", + "simcore_service_dynamic_sidecar.services.containers.run_command_in_container", return_value="This is an unparsable json response {}", ) diff --git a/services/dynamic-sidecar/tests/unit/test_api_rest_containers_long_running_tasks.py b/services/dynamic-sidecar/tests/unit/api/rest/test_containers_long_running_tasks.py similarity index 66% rename from services/dynamic-sidecar/tests/unit/test_api_rest_containers_long_running_tasks.py rename to services/dynamic-sidecar/tests/unit/api/rest/test_containers_long_running_tasks.py index 75677e1d7f73..c8d97fb52d8c 100644 --- a/services/dynamic-sidecar/tests/unit/test_api_rest_containers_long_running_tasks.py +++ b/services/dynamic-sidecar/tests/unit/api/rest/test_containers_long_running_tasks.py @@ -2,10 +2,10 @@ # pylint: disable=unused-argument # pylint: disable=no-member +import asyncio import json from collections.abc import AsyncIterable, AsyncIterator, Awaitable, Callable, Iterator from contextlib import asynccontextmanager, contextmanager -from inspect import getmembers, isfunction from pathlib import Path from typing import Any, Final, NamedTuple from unittest.mock import AsyncMock @@ -16,9 +16,14 @@ from aiodocker.containers import DockerContainer from aiodocker.volumes import DockerVolume from asgi_lifespan import LifespanManager +from common_library.serialization import model_dump_with_secrets from fastapi import FastAPI from fastapi.routing import APIRoute from httpx import ASGITransport, AsyncClient +from models_library.api_schemas_directorv2.dynamic_services import ( + ContainersComposeSpec, + ContainersCreate, +) from models_library.api_schemas_dynamic_sidecar.containers import DockerComposeYamlStr from models_library.api_schemas_long_running_tasks.base import ( ProgressMessage, @@ -27,31 +32,41 @@ from models_library.services_creation import CreateServiceMetricsAdditionalParams from pydantic import AnyHttpUrl, TypeAdapter from pytest_mock.plugin import MockerFixture -from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict +from pytest_simcore.helpers.long_running_tasks import ( + assert_task_is_no_longer_present, + get_fastapi_long_running_manager, +) +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from servicelib.fastapi.long_running_tasks.client import ( - Client, - TaskId, + HttpClient, periodic_task_result, ) from servicelib.fastapi.long_running_tasks.client import setup as client_setup +from servicelib.long_running_tasks.errors import TaskExceptionError +from servicelib.long_running_tasks.models import ProgressCallback, TaskId +from servicelib.long_running_tasks.task import TaskRegistry +from settings_library.rabbit import RabbitSettings from simcore_sdk.node_ports_common.exceptions import NodeNotFound from simcore_service_dynamic_sidecar._meta import API_VTAG -from simcore_service_dynamic_sidecar.api.rest import containers_long_running_tasks from simcore_service_dynamic_sidecar.core.validation import InvalidComposeSpecError -from simcore_service_dynamic_sidecar.models.schemas.containers import ( - ContainersComposeSpec, - ContainersCreate, -) from simcore_service_dynamic_sidecar.models.shared_store import SharedStore +from simcore_service_dynamic_sidecar.modules import long_running_tasks as sidecar_lrts from simcore_service_dynamic_sidecar.modules.inputs import enable_inputs_pulling from simcore_service_dynamic_sidecar.modules.outputs._context import OutputsContext -from simcore_service_dynamic_sidecar.modules.outputs._manager import ( - OutputsManager, - UploadPortsFailedError, +from simcore_service_dynamic_sidecar.modules.outputs._manager import OutputsManager +from tenacity import ( + AsyncRetrying, + retry_if_exception_type, + stop_after_delay, + wait_fixed, ) -FAST_STATUS_POLL: Final[float] = 0.1 -CREATE_SERVICE_CONTAINERS_TIMEOUT: Final[float] = 60 +pytest_simcore_core_services_selection = [ + "rabbit", +] + +_FAST_STATUS_POLL: Final[float] = 0.1 +_CREATE_SERVICE_CONTAINERS_TIMEOUT: Final[float] = 60 DEFAULT_COMMAND_TIMEOUT: Final[int] = 5 @@ -78,28 +93,38 @@ def mock_tasks(mocker: MockerFixture) -> Iterator[None]: async def _just_log_task(*args, **kwargs) -> None: print(f"Called mocked function with {args}, {kwargs}") - # searching by name since all start with _task - tasks_names = [ - x[0] - for x in getmembers(containers_long_running_tasks, isfunction) - if x[0].startswith("task") - ] - - for task_name in tasks_names: - mocker.patch.object( - containers_long_running_tasks, task_name, new=_just_log_task - ) + TaskRegistry.register(_just_log_task) + + for task_name in [ + sidecar_lrts.pull_user_services_images.__name__, + sidecar_lrts.create_user_services.__name__, + sidecar_lrts.remove_user_services.__name__, + sidecar_lrts.restore_user_services_state_paths.__name__, + sidecar_lrts.save_user_services_state_paths.__name__, + sidecar_lrts.pull_user_services_input_ports.__name__, + sidecar_lrts.pull_user_services_output_ports.__name__, + sidecar_lrts.push_user_services_output_ports.__name__, + sidecar_lrts.restart_user_services.__name__, + ]: + mocker.patch.object(sidecar_lrts, task_name, new=_just_log_task) yield None + TaskRegistry.unregister(_just_log_task) + @asynccontextmanager -async def auto_remove_task(client: Client, task_id: TaskId) -> AsyncIterator[None]: +async def auto_remove_task( + http_client: HttpClient, task_id: TaskId +) -> AsyncIterator[None]: """clenup pending tasks""" try: yield finally: - await client.cancel_and_delete_task(task_id, timeout=10) + await http_client.remove_task(task_id, timeout=10) + await assert_task_is_no_longer_present( + get_fastapi_long_running_manager(http_client.app), task_id, {} + ) async def _get_container_timestamps( @@ -164,17 +189,32 @@ def backend_url() -> AnyHttpUrl: @pytest.fixture -def mock_environment(mock_rabbitmq_envs: EnvVarsDict) -> EnvVarsDict: - return mock_rabbitmq_envs +def mock_environment( + monkeypatch: pytest.MonkeyPatch, + rabbit_service: RabbitSettings, + mock_environment: EnvVarsDict, +) -> EnvVarsDict: + return setenvs_from_dict( + monkeypatch, + { + **mock_environment, + "RABBIT_SETTINGS": json.dumps( + model_dump_with_secrets(rabbit_service, show_secrets=True) + ), + }, + ) @pytest.fixture -async def app(app: FastAPI) -> AsyncIterable[FastAPI]: +async def app( + app: FastAPI, + fast_long_running_tasks_cancellation: None, +) -> AsyncIterable[FastAPI]: # add the client setup to the same application # this is only required for testing, in reality # this will be in a different process client_setup(app) - async with LifespanManager(app): + async with LifespanManager(app, startup_timeout=30, shutdown_timeout=30): _print_routes(app) yield app @@ -197,10 +237,12 @@ async def httpx_async_client( @pytest.fixture -def client( +def http_client( app: FastAPI, httpx_async_client: AsyncClient, backend_url: AnyHttpUrl -) -> Client: - return Client(app=app, async_client=httpx_async_client, base_url=f"{backend_url}") +) -> HttpClient: + return HttpClient( + app=app, async_client=httpx_async_client, base_url=f"{backend_url}" + ) @pytest.fixture @@ -245,17 +287,17 @@ def mock_nodeports(mocker: MockerFixture) -> None: ] ) async def mock_port_keys( - request: pytest.FixtureRequest, client: Client + request: pytest.FixtureRequest, http_client: HttpClient ) -> list[str] | None: - outputs_context: OutputsContext = client.app.state.outputs_context + outputs_context: OutputsContext = http_client.app.state.outputs_context if request.param is not None: await outputs_context.set_file_type_port_keys(request.param) return request.param @pytest.fixture -def outputs_manager(client: Client) -> OutputsManager: - return client.app.state.outputs_manager +def outputs_manager(http_client: HttpClient) -> OutputsManager: + return http_client.app.state.outputs_manager @pytest.fixture @@ -384,38 +426,78 @@ async def _debug_progress( print(f"{task_id} {percent} {message}") +async def _assert_progress_finished( + last_progress_message: tuple[ProgressMessage, ProgressPercent] | None, +) -> None: + async for attempt in AsyncRetrying( + stop=stop_after_delay(10), + wait=wait_fixed(0.1), + retry=retry_if_exception_type(AssertionError), + reraise=True, + ): + with attempt: + await asyncio.sleep(0) # yield control to the event loop + assert last_progress_message == ("finished", 1.0) + + +async def _perioduc_result_and_task_removed( + app: FastAPI, + http_client: HttpClient, + task_id: TaskId, + *, + progress_callback: ProgressCallback | None = None, +) -> Any | None: + try: + async with periodic_task_result( + client=http_client, + task_id=task_id, + task_timeout=_CREATE_SERVICE_CONTAINERS_TIMEOUT, + status_poll_interval=_FAST_STATUS_POLL, + progress_callback=progress_callback, + ) as result: + return result + finally: + await assert_task_is_no_longer_present( + get_fastapi_long_running_manager(app), task_id, {} + ) + + async def test_create_containers_task( httpx_async_client: AsyncClient, - client: Client, + http_client: HttpClient, + app: FastAPI, compose_spec: str, mock_stop_heart_beat_task: AsyncMock, mock_metrics_params: CreateServiceMetricsAdditionalParams, shared_store: SharedStore, ) -> None: - last_progress_message: tuple[str, float] | None = None + last_progress_message: tuple[ProgressMessage, ProgressPercent] | None = None - async def create_progress(message: str, percent: float, _: TaskId) -> None: + async def create_progress( + message: ProgressMessage, percent: ProgressPercent | None, _: TaskId + ) -> None: nonlocal last_progress_message + assert percent is not None last_progress_message = (message, percent) print(message, percent) - async with periodic_task_result( - client=client, - task_id=await _get_task_id_create_service_containers( + result = await _perioduc_result_and_task_removed( + app, + http_client, + await _get_task_id_create_service_containers( httpx_async_client, compose_spec, mock_metrics_params ), - task_timeout=CREATE_SERVICE_CONTAINERS_TIMEOUT, - status_poll_interval=FAST_STATUS_POLL, progress_callback=create_progress, - ) as result: - assert shared_store.container_names == result + ) + assert shared_store.container_names == result - assert last_progress_message == ("finished", 1.0) + await _assert_progress_finished(last_progress_message) async def test_pull_user_servcices_docker_images( httpx_async_client: AsyncClient, - client: Client, + http_client: HttpClient, + app: FastAPI, compose_spec: str, mock_stop_heart_beat_task: AsyncMock, mock_metrics_params: CreateServiceMetricsAdditionalParams, @@ -431,49 +513,44 @@ async def create_progress( last_progress_message = (message, percent) print(message, percent) - async with periodic_task_result( - client=client, - task_id=await _get_task_id_create_service_containers( + result = await _perioduc_result_and_task_removed( + app, + http_client, + await _get_task_id_create_service_containers( httpx_async_client, compose_spec, mock_metrics_params ), - task_timeout=CREATE_SERVICE_CONTAINERS_TIMEOUT, - status_poll_interval=FAST_STATUS_POLL, progress_callback=create_progress, - ) as result: - assert shared_store.container_names == result - - assert last_progress_message == ("finished", 1.0) + ) + assert shared_store.container_names == result + await _assert_progress_finished(last_progress_message) - async with periodic_task_result( - client=client, - task_id=await _get_task_id_pull_user_servcices_docker_images( + result = await _perioduc_result_and_task_removed( + app, + http_client, + await _get_task_id_pull_user_servcices_docker_images( httpx_async_client, compose_spec, mock_metrics_params ), - task_timeout=CREATE_SERVICE_CONTAINERS_TIMEOUT, - status_poll_interval=FAST_STATUS_POLL, - progress_callback=_debug_progress, - ) as result: - assert result is None - assert last_progress_message == ("finished", 1.0) + progress_callback=create_progress, + ) + assert result is None + await _assert_progress_finished(last_progress_message) async def test_create_containers_task_invalid_yaml_spec( httpx_async_client: AsyncClient, - client: Client, + http_client: HttpClient, + app: FastAPI, mock_stop_heart_beat_task: AsyncMock, mock_metrics_params: CreateServiceMetricsAdditionalParams, ): with pytest.raises(InvalidComposeSpecError) as exec_info: - async with periodic_task_result( - client=client, - task_id=await _get_task_id_create_service_containers( + await _perioduc_result_and_task_removed( + app, + http_client, + await _get_task_id_create_service_containers( httpx_async_client, "", mock_metrics_params ), - task_timeout=CREATE_SERVICE_CONTAINERS_TIMEOUT, - status_poll_interval=FAST_STATUS_POLL, - progress_callback=_debug_progress, - ): - pass + ) assert "Provided yaml is not valid" in f"{exec_info.value}" @@ -493,7 +570,7 @@ async def test_create_containers_task_invalid_yaml_spec( ) async def test_same_task_id_is_returned_if_task_exists( httpx_async_client: AsyncClient, - client: Client, + http_client: HttpClient, mocker: MockerFixture, get_task_id_callable: Callable[..., Awaitable], mock_stop_heart_beat_task: AsyncMock, @@ -511,21 +588,24 @@ def _get_awaitable() -> Awaitable: with mock_tasks(mocker): task_id = await _get_awaitable() - async with auto_remove_task(client, task_id): + assert task_id.endswith("unique") + async with auto_remove_task(http_client, task_id): assert await _get_awaitable() == task_id # since the previous task was already removed it is again possible - # to create a task + # to create a task and it will share the same task_id new_task_id = await _get_awaitable() - assert new_task_id != task_id - async with auto_remove_task(client, task_id): + assert new_task_id.endswith("unique") + assert new_task_id == task_id + async with auto_remove_task(http_client, task_id): pass async def test_containers_down_after_starting( mock_ensure_read_permissions_on_user_service_data: None, httpx_async_client: AsyncClient, - client: Client, + http_client: HttpClient, + app: FastAPI, compose_spec: str, mock_stop_heart_beat_task: AsyncMock, mock_metrics_params: CreateServiceMetricsAdditionalParams, @@ -534,74 +614,76 @@ async def test_containers_down_after_starting( mocker: MockerFixture, ): # start containers - async with periodic_task_result( - client=client, - task_id=await _get_task_id_create_service_containers( + result = await _perioduc_result_and_task_removed( + app, + http_client, + await _get_task_id_create_service_containers( httpx_async_client, compose_spec, mock_metrics_params ), - task_timeout=CREATE_SERVICE_CONTAINERS_TIMEOUT, - status_poll_interval=FAST_STATUS_POLL, progress_callback=_debug_progress, - ) as result: - assert shared_store.container_names == result + ) + assert shared_store.container_names == result # put down containers - async with periodic_task_result( - client=client, - task_id=await _get_task_id_docker_compose_down(httpx_async_client), - task_timeout=CREATE_SERVICE_CONTAINERS_TIMEOUT, - status_poll_interval=FAST_STATUS_POLL, + result = await _perioduc_result_and_task_removed( + app, + http_client, + await _get_task_id_docker_compose_down(httpx_async_client), progress_callback=_debug_progress, - ) as result: - assert result is None + ) + assert result is None async def test_containers_down_missing_spec( httpx_async_client: AsyncClient, - client: Client, + http_client: HttpClient, + app: FastAPI, caplog_info_debug: pytest.LogCaptureFixture, ): - async with periodic_task_result( - client=client, - task_id=await _get_task_id_docker_compose_down(httpx_async_client), - task_timeout=CREATE_SERVICE_CONTAINERS_TIMEOUT, - status_poll_interval=FAST_STATUS_POLL, + result = await _perioduc_result_and_task_removed( + app, + http_client, + await _get_task_id_docker_compose_down(httpx_async_client), progress_callback=_debug_progress, - ) as result: - assert result is None + ) + assert result is None assert "No compose-spec was found" in caplog_info_debug.text async def test_container_restore_state( - httpx_async_client: AsyncClient, client: Client, mock_data_manager: None + httpx_async_client: AsyncClient, + http_client: HttpClient, + app: FastAPI, + mock_data_manager: None, ): - async with periodic_task_result( - client=client, - task_id=await _get_task_id_state_restore(httpx_async_client), - task_timeout=CREATE_SERVICE_CONTAINERS_TIMEOUT, - status_poll_interval=FAST_STATUS_POLL, + result = await _perioduc_result_and_task_removed( + app, + http_client, + await _get_task_id_state_restore(httpx_async_client), progress_callback=_debug_progress, - ) as result: - assert isinstance(result, int) + ) + assert isinstance(result, int) async def test_container_save_state( - httpx_async_client: AsyncClient, client: Client, mock_data_manager: None + httpx_async_client: AsyncClient, + http_client: HttpClient, + app: FastAPI, + mock_data_manager: None, ): - async with periodic_task_result( - client=client, - task_id=await _get_task_id_state_save(httpx_async_client), - task_timeout=CREATE_SERVICE_CONTAINERS_TIMEOUT, - status_poll_interval=FAST_STATUS_POLL, + result = await _perioduc_result_and_task_removed( + app, + http_client, + await _get_task_id_state_save(httpx_async_client), progress_callback=_debug_progress, - ) as result: - assert isinstance(result, int) + ) + assert isinstance(result, int) @pytest.mark.parametrize("inputs_pulling_enabled", [True, False]) async def test_container_pull_input_ports( httpx_async_client: AsyncClient, - client: Client, + http_client: HttpClient, inputs_pulling_enabled: bool, app: FastAPI, mock_port_keys: list[str] | None, @@ -610,57 +692,51 @@ async def test_container_pull_input_ports( if inputs_pulling_enabled: enable_inputs_pulling(app) - async with periodic_task_result( - client=client, - task_id=await _get_task_id_task_ports_inputs_pull( - httpx_async_client, mock_port_keys - ), - task_timeout=CREATE_SERVICE_CONTAINERS_TIMEOUT, - status_poll_interval=FAST_STATUS_POLL, + result = await _perioduc_result_and_task_removed( + app, + http_client, + await _get_task_id_task_ports_inputs_pull(httpx_async_client, mock_port_keys), progress_callback=_debug_progress, - ) as result: - assert result == (42 if inputs_pulling_enabled else 0) + ) + assert result == (42 if inputs_pulling_enabled else 0) async def test_container_pull_output_ports( httpx_async_client: AsyncClient, - client: Client, + http_client: HttpClient, + app: FastAPI, mock_port_keys: list[str] | None, mock_nodeports: None, ): - async with periodic_task_result( - client=client, - task_id=await _get_task_id_task_ports_outputs_pull( - httpx_async_client, mock_port_keys - ), - task_timeout=CREATE_SERVICE_CONTAINERS_TIMEOUT, - status_poll_interval=FAST_STATUS_POLL, + result = await _perioduc_result_and_task_removed( + app, + http_client, + await _get_task_id_task_ports_outputs_pull(httpx_async_client, mock_port_keys), progress_callback=_debug_progress, - ) as result: - assert result == 42 + ) + assert result == 42 async def test_container_push_output_ports( httpx_async_client: AsyncClient, - client: Client, + http_client: HttpClient, + app: FastAPI, mock_port_keys: list[str] | None, mock_nodeports: None, ): - async with periodic_task_result( - client=client, - task_id=await _get_task_id_task_ports_outputs_push( - httpx_async_client, mock_port_keys - ), - task_timeout=CREATE_SERVICE_CONTAINERS_TIMEOUT, - status_poll_interval=FAST_STATUS_POLL, + result = await _perioduc_result_and_task_removed( + app, + http_client, + await _get_task_id_task_ports_outputs_push(httpx_async_client, mock_port_keys), progress_callback=_debug_progress, - ) as result: - assert result is None + ) + assert result is None async def test_container_push_output_ports_missing_node( httpx_async_client: AsyncClient, - client: Client, + http_client: HttpClient, + app: FastAPI, mock_port_keys: list[str] | None, missing_node_uuid: str, mock_node_missing: None, @@ -670,58 +746,54 @@ async def test_container_push_output_ports_missing_node( await outputs_manager.port_key_content_changed(port_key) async def _test_code() -> None: - async with periodic_task_result( - client=client, - task_id=await _get_task_id_task_ports_outputs_push( + await _perioduc_result_and_task_removed( + app, + http_client, + await _get_task_id_task_ports_outputs_push( httpx_async_client, mock_port_keys ), - task_timeout=CREATE_SERVICE_CONTAINERS_TIMEOUT, - status_poll_interval=FAST_STATUS_POLL, progress_callback=_debug_progress, - ): - pass + ) if not mock_port_keys: await _test_code() else: - with pytest.raises(UploadPortsFailedError) as exec_info: + with pytest.raises(TaskExceptionError) as exec_info: await _test_code() assert f"the node id {missing_node_uuid} was not found" in f"{exec_info.value}" async def test_containers_restart( httpx_async_client: AsyncClient, - client: Client, + http_client: HttpClient, + app: FastAPI, compose_spec: str, mock_stop_heart_beat_task: AsyncMock, mock_metrics_params: CreateServiceMetricsAdditionalParams, shared_store: SharedStore, ): - async with periodic_task_result( - client=client, - task_id=await _get_task_id_create_service_containers( + container_names = await _perioduc_result_and_task_removed( + app, + http_client, + await _get_task_id_create_service_containers( httpx_async_client, compose_spec, mock_metrics_params ), - task_timeout=CREATE_SERVICE_CONTAINERS_TIMEOUT, - status_poll_interval=FAST_STATUS_POLL, progress_callback=_debug_progress, - ) as container_names: - assert shared_store.container_names == container_names - + ) + assert shared_store.container_names == container_names assert container_names container_timestamps_before = await _get_container_timestamps(container_names) - async with periodic_task_result( - client=client, - task_id=await _get_task_id_task_containers_restart( + result = await _perioduc_result_and_task_removed( + app, + http_client, + await _get_task_id_task_containers_restart( httpx_async_client, DEFAULT_COMMAND_TIMEOUT ), - task_timeout=CREATE_SERVICE_CONTAINERS_TIMEOUT, - status_poll_interval=FAST_STATUS_POLL, progress_callback=_debug_progress, - ) as result: - assert result is None + ) + assert result is None container_timestamps_after = await _get_container_timestamps(container_names) diff --git a/services/dynamic-sidecar/tests/unit/api/rest/test_disk.py b/services/dynamic-sidecar/tests/unit/api/rest/test_disk.py index 3d6bda8d8f1b..fa466827e0dd 100644 --- a/services/dynamic-sidecar/tests/unit/api/rest/test_disk.py +++ b/services/dynamic-sidecar/tests/unit/api/rest/test_disk.py @@ -1,5 +1,7 @@ # pylint:disable=unused-argument +from unittest.mock import AsyncMock + from async_asgi_testclient import TestClient from fastapi import status from simcore_service_dynamic_sidecar._meta import API_VTAG @@ -9,7 +11,9 @@ async def test_reserved_disk_space_freed( - cleanup_reserved_disk_space: None, test_client: TestClient + mock_core_rabbitmq: dict[str, AsyncMock], + cleanup_reserved_disk_space: None, + test_client: TestClient, ): assert _RESERVED_DISK_SPACE_NAME.exists() response = await test_client.post(f"/{API_VTAG}/disk/reserved:free") diff --git a/services/dynamic-sidecar/tests/unit/api/rest/test_volumes.py b/services/dynamic-sidecar/tests/unit/api/rest/test_volumes.py index 40eab12336a3..5bf729dfe0db 100644 --- a/services/dynamic-sidecar/tests/unit/api/rest/test_volumes.py +++ b/services/dynamic-sidecar/tests/unit/api/rest/test_volumes.py @@ -1,6 +1,7 @@ # pylint: disable=unused-argument from pathlib import Path +from unittest.mock import AsyncMock import pytest from async_asgi_testclient import TestClient @@ -20,6 +21,7 @@ ], ) async def test_volumes_state_saved_ok( + mock_core_rabbitmq: dict[str, AsyncMock], ensure_shared_store_dir: Path, test_client: TestClient, volume_category: VolumeCategory, @@ -46,6 +48,7 @@ async def test_volumes_state_saved_ok( @pytest.mark.parametrize("invalid_volume_category", ["outputs", "outputS"]) async def test_volumes_state_saved_error( + mock_core_rabbitmq: dict[str, AsyncMock], ensure_shared_store_dir: Path, test_client: TestClient, invalid_volume_category: VolumeCategory, diff --git a/services/dynamic-sidecar/tests/unit/api/rpc/test__container_extensions.py b/services/dynamic-sidecar/tests/unit/api/rpc/test__container_extensions.py new file mode 100644 index 000000000000..6ab049a121cd --- /dev/null +++ b/services/dynamic-sidecar/tests/unit/api/rpc/test__container_extensions.py @@ -0,0 +1,713 @@ +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name +# pylint:disable=protected-access +import asyncio +import json +from collections.abc import AsyncIterable +from inspect import signature +from typing import Any, Final +from unittest.mock import AsyncMock + +import aiodocker +import pytest +import yaml +from aiodocker.volumes import DockerVolume +from faker import Faker +from fastapi import FastAPI +from models_library.api_schemas_directorv2.dynamic_services import ( + ContainersComposeSpec, + ContainersCreate, +) +from models_library.api_schemas_dynamic_sidecar.containers import ActivityInfo +from models_library.projects_nodes_io import NodeID +from models_library.services_creation import CreateServiceMetricsAdditionalParams +from models_library.services_io import ServiceOutput +from pydantic import TypeAdapter +from pytest_mock import MockerFixture +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict +from servicelib.docker_constants import SUFFIX_EGRESS_PROXY_NAME +from servicelib.fastapi.long_running_tasks._manager import FastAPILongRunningManager +from servicelib.long_running_tasks.models import LRTNamespace +from servicelib.rabbitmq import RabbitMQRPCClient, RPCServerError +from servicelib.rabbitmq.rpc_interfaces.dynamic_sidecar import ( + container_extensions, + containers, + containers_long_running_tasks, +) +from simcore_service_dynamic_sidecar.core.application import AppState +from simcore_service_dynamic_sidecar.core.docker_compose_utils import ( + docker_compose_create, +) +from simcore_service_dynamic_sidecar.core.settings import ApplicationSettings +from simcore_service_dynamic_sidecar.core.utils import async_command +from simcore_service_dynamic_sidecar.core.validation import parse_compose_spec +from simcore_service_dynamic_sidecar.models.shared_store import SharedStore +from simcore_service_dynamic_sidecar.modules.inputs import InputsState +from simcore_service_dynamic_sidecar.modules.outputs._watcher import OutputsWatcher +from simcore_service_dynamic_sidecar.services.containers import _INACTIVE_FOR_LONG_TIME +from utils import get_lrt_result + +pytest_simcore_core_services_selection = [ + "rabbit", +] + +_WAIT_FOR_OUTPUTS_WATCHER: Final[float] = 0.1 + + +def _assert_inputs_pulling(app: FastAPI, is_enabled: bool) -> None: + inputs_state: InputsState = app.state.inputs_state + assert inputs_state.inputs_pulling_enabled is is_enabled + + +def _assert_outputs_event_propagation( + spy_output_watcher: dict[str, AsyncMock], is_enabled: bool +) -> None: + assert spy_output_watcher["disable_event_propagation"].call_count == ( + 1 if not is_enabled else 0 + ) + assert spy_output_watcher["enable_event_propagation"].call_count == ( + 1 if is_enabled else 0 + ) + + +@pytest.fixture +def spy_output_watcher(mocker: MockerFixture) -> dict[str, AsyncMock]: + return { + "disable_event_propagation": mocker.spy( + OutputsWatcher, "disable_event_propagation" + ), + "enable_event_propagation": mocker.spy( + OutputsWatcher, "enable_event_propagation" + ), + } + + +@pytest.mark.parametrize("enabled", [True, False]) +async def test_toggle_ports_io( + app: FastAPI, + rpc_client: RabbitMQRPCClient, + enabled: bool, + spy_output_watcher: dict[str, AsyncMock], +): + settings: ApplicationSettings = app.state.settings + + result = await container_extensions.toggle_ports_io( + rpc_client, + node_id=settings.DY_SIDECAR_NODE_ID, + enable_outputs=enabled, + enable_inputs=enabled, + ) + assert result is None + + _assert_inputs_pulling(app, enabled) + _assert_outputs_event_propagation(spy_output_watcher, enabled) + + +@pytest.fixture +def mock_outputs_labels() -> dict[str, ServiceOutput]: + return { + "output_port_1": TypeAdapter(ServiceOutput).validate_python( + ServiceOutput.model_json_schema()["examples"][3] + ), + "output_port_2": TypeAdapter(ServiceOutput).validate_python( + ServiceOutput.model_json_schema()["examples"][3] + ), + } + + +@pytest.fixture +def mock_event_filter_enqueue( + app: FastAPI, monkeypatch: pytest.MonkeyPatch +) -> AsyncMock: + mock = AsyncMock(return_value=None) + outputs_watcher: OutputsWatcher = app.state.outputs_watcher + monkeypatch.setattr(outputs_watcher._event_filter, "enqueue", mock) # noqa: SLF001 + return mock + + +async def test_container_create_outputs_dirs( + app: FastAPI, + rpc_client: RabbitMQRPCClient, + mock_outputs_labels: dict[str, ServiceOutput], + mock_event_filter_enqueue: AsyncMock, +): + app_state = AppState(app) + + # by default outputs-watcher it is disabled + result = await container_extensions.toggle_ports_io( + rpc_client, + node_id=app_state.settings.DY_SIDECAR_NODE_ID, + enable_outputs=True, + enable_inputs=True, + ) + assert result is None + await asyncio.sleep(_WAIT_FOR_OUTPUTS_WATCHER) + + assert mock_event_filter_enqueue.call_count == 0 + + result = await container_extensions.create_output_dirs( + rpc_client, + node_id=app_state.settings.DY_SIDECAR_NODE_ID, + outputs_labels=mock_outputs_labels, + ) + + for dir_name in mock_outputs_labels: + assert (app_state.mounted_volumes.disk_outputs_path / dir_name).is_dir() + + await asyncio.sleep(_WAIT_FOR_OUTPUTS_WATCHER) + EXPECT_EVENTS_WHEN_CREATING_OUTPUT_PORT_KEY_DIRS = 0 + assert ( + mock_event_filter_enqueue.call_count + == EXPECT_EVENTS_WHEN_CREATING_OUTPUT_PORT_KEY_DIRS + ) + + +@pytest.fixture +async def attachable_networks_and_ids(faker: Faker) -> AsyncIterable[dict[str, str]]: + # generate some network names + unique_id = faker.uuid4() + network_names = {f"test_network_{i}_{unique_id}": "" for i in range(10)} + + # create networks + async with aiodocker.Docker() as client: + for network_name in network_names: + network_config = { + "Name": network_name, + "Driver": "overlay", + "Attachable": True, + "Internal": True, + } + network = await client.networks.create(network_config) + network_names[network_name] = network.id + + yield network_names + + # remove networks + async with aiodocker.Docker() as client: + for network_id in network_names.values(): + network = await client.networks.get(network_id) + assert await network.delete() is True + + +@pytest.fixture +def dynamic_sidecar_network_name() -> str: + return "entrypoint_container_network" + + +@pytest.fixture +def compose_spec(dynamic_sidecar_network_name: str) -> ContainersComposeSpec: + return ContainersComposeSpec( + docker_compose_yaml=yaml.dump( + { + "version": "3", + "services": { + "first-box": { + "image": "busybox:latest", + "networks": { + dynamic_sidecar_network_name: None, + }, + "labels": {"io.osparc.test-label": "mark-entrypoint"}, + }, + "second-box": {"image": "busybox:latest"}, + "egress": { + "image": "busybox:latest", + "networks": { + dynamic_sidecar_network_name: None, + }, + }, + }, + "networks": {dynamic_sidecar_network_name: None}, + } + ) + ) + + +@pytest.fixture +def compose_spec_single_service() -> ContainersComposeSpec: + return ContainersComposeSpec( + docker_compose_yaml=yaml.dump( + { + "version": "3", + "services": { + "solo-box": { + "image": "busybox:latest", + "labels": {"io.osparc.test-label": "mark-entrypoint"}, + }, + }, + } + ) + ) + + +@pytest.fixture(params=["compose_spec", "compose_spec_single_service"]) +def selected_spec( + request, compose_spec: str, compose_spec_single_service: str +) -> ContainersComposeSpec: + # check that fixture_name is present in this function's parameters + fixture_name = request.param + sig = signature(selected_spec) + assert fixture_name in sig.parameters, ( + f"Provided fixture name {fixture_name} was not found " + f"as a parameter in the signature {sig}" + ) + + # returns the parameter by name from the ones declared in the signature + result: ContainersComposeSpec = locals()[fixture_name] + return result + + +@pytest.fixture +def lrt_namespace(app: FastAPI) -> LRTNamespace: + long_running_manager: FastAPILongRunningManager = app.state.long_running_manager + return long_running_manager.lrt_namespace + + +_FAST_STATUS_POLL: Final[float] = 0.1 +_CREATE_SERVICE_CONTAINERS_TIMEOUT: Final[float] = 60 + + +async def _start_containers( + app: FastAPI, + rpc_client: RabbitMQRPCClient, + node_id: NodeID, + lrt_namespace: LRTNamespace, + compose_spec: ContainersComposeSpec, + mock_metrics_params: CreateServiceMetricsAdditionalParams, +) -> list[str]: + await containers.create_compose_spec( + rpc_client, node_id=node_id, containers_compose_spec=compose_spec + ) + + containers_create = ContainersCreate(metrics_params=mock_metrics_params) + task_id = await containers_long_running_tasks.create_user_services( + rpc_client, + node_id=node_id, + lrt_namespace=lrt_namespace, + containers_create=containers_create, + ) + + response_containers = await get_lrt_result( + rpc_client, + lrt_namespace, + task_id, + status_poll_interval=_FAST_STATUS_POLL, + task_timeout=_CREATE_SERVICE_CONTAINERS_TIMEOUT, + ) + + shared_store: SharedStore = app.state.shared_store + container_names = shared_store.container_names + assert response_containers == container_names + + return container_names + + +def _create_network_aliases(network_name: str) -> list[str]: + return [f"alias_{i}_{network_name}" for i in range(10)] + + +async def test_attach_detach_container_to_network( + ensure_external_volumes: tuple[DockerVolume], + docker_swarm: None, + app: FastAPI, + rpc_client: RabbitMQRPCClient, + lrt_namespace: LRTNamespace, + selected_spec: ContainersComposeSpec, + attachable_networks_and_ids: dict[str, str], + mock_metrics_params: CreateServiceMetricsAdditionalParams, +): + app_state = AppState(app) + + container_names = await _start_containers( + app, + rpc_client, + node_id=app_state.settings.DY_SIDECAR_NODE_ID, + lrt_namespace=lrt_namespace, + compose_spec=selected_spec, + mock_metrics_params=mock_metrics_params, + ) + + async with aiodocker.Docker() as docker: + for container_name in container_names: + for network_name, network_id in attachable_networks_and_ids.items(): + network_aliases = _create_network_aliases(network_name) + + # attach network to containers + for _ in range(2): # calling 2 times in a row + await container_extensions.attach_container_to_network( + rpc_client, + node_id=app_state.settings.DY_SIDECAR_NODE_ID, + container_id=container_name, + network_id=network_id, + network_aliases=network_aliases, + ) + + container = await docker.containers.get(container_name) + container_inspect = await container.show() + networks = container_inspect["NetworkSettings"]["Networks"] + assert network_id in networks + assert set(network_aliases).issubset( + set(networks[network_id]["Aliases"]) + ) + + # detach network from containers + for _ in range(2): # running twice in a row + await container_extensions.detach_container_from_network( + rpc_client, + node_id=app_state.settings.DY_SIDECAR_NODE_ID, + container_id=container_name, + network_id=network_id, + ) + + container = await docker.containers.get(container_name) + container_inspect = await container.show() + networks = container_inspect["NetworkSettings"]["Networks"] + assert network_id not in networks + + +async def _docker_ps_a_container_names() -> list[str]: + command = 'docker ps -a --format "{{.Names}}"' + success, stdout, *_ = await async_command(command=command, timeout=None) + + assert success is True, stdout + return stdout.split("\n") + + +async def _assert_compose_spec_pulled( + docker_compose_yaml: str, settings: ApplicationSettings +): + """ensures all containers inside compose_spec are pulled""" + + result = await docker_compose_create(docker_compose_yaml, settings) + + assert result.success is True, result.message + + dict_compose_spec = yaml.safe_load(docker_compose_yaml) + expected_services_count = len(dict_compose_spec["services"]) + + docker_ps_names = await _docker_ps_a_container_names() + started_containers = [ + x + for x in docker_ps_names + if x.startswith(settings.DYNAMIC_SIDECAR_COMPOSE_NAMESPACE) + ] + assert len(started_containers) == expected_services_count + + +@pytest.fixture +async def started_containers( + app: FastAPI, + rpc_client: RabbitMQRPCClient, + node_id: NodeID, + lrt_namespace: LRTNamespace, + compose_spec: ContainersComposeSpec, + mock_metrics_params: CreateServiceMetricsAdditionalParams, +) -> list[str]: + settings: ApplicationSettings = app.state.settings + await _assert_compose_spec_pulled(compose_spec.docker_compose_yaml, settings) + + return await _start_containers( + app, + rpc_client, + node_id=node_id, + lrt_namespace=lrt_namespace, + compose_spec=compose_spec, + mock_metrics_params=mock_metrics_params, + ) + + +async def test_containers_get( + ensure_external_volumes: tuple[DockerVolume], + app: FastAPI, + rpc_client: RabbitMQRPCClient, + lrt_namespace: LRTNamespace, + started_containers: list[str], +): + app_state = AppState(app) + containers_inspect = await containers.containers_docker_inspect( + rpc_client, node_id=app_state.settings.DY_SIDECAR_NODE_ID, only_status=False + ) + + assert set(containers_inspect) == set(started_containers) + for entry in containers_inspect.values(): + assert "Status" not in entry + assert "Error" not in entry + + +async def test_containers_get_status( + ensure_external_volumes: tuple[DockerVolume], + app: FastAPI, + rpc_client: RabbitMQRPCClient, + started_containers: list[str], +): + app_state = AppState(app) + containers_inspect = await containers.containers_docker_inspect( + rpc_client, node_id=app_state.settings.DY_SIDECAR_NODE_ID, only_status=True + ) + assert set(containers_inspect) == set(started_containers) + + def _assert_keys_exist(result: dict[str, Any]) -> bool: + for entry in result.values(): + assert "Status" in entry + assert "Error" in entry + return True + + assert _assert_keys_exist(containers_inspect) is True + + +@pytest.fixture +def mock_aiodocker_containers_get(mocker: MockerFixture, faker: Faker) -> int: + """raises a DockerError with a random HTTP status which is also returned""" + mock_status_code = faker.random_int(1, 999) + + async def mock_get(*args: str, **kwargs: Any) -> None: + raise aiodocker.exceptions.DockerError( + status=mock_status_code, data={"message": "aiodocker_mocked_error"} + ) + + mocker.patch("aiodocker.containers.DockerContainers.get", side_effect=mock_get) + + return mock_status_code + + +async def test_containers_docker_status_docker_error( + ensure_external_volumes: tuple[DockerVolume], + app: FastAPI, + rpc_client: RabbitMQRPCClient, + started_containers: list[str], + mock_aiodocker_containers_get: int, +): + app_state = AppState(app) + with pytest.raises( + RPCServerError, match=f"status_code={mock_aiodocker_containers_get}" + ): + await containers.containers_docker_inspect( + rpc_client, node_id=app_state.settings.DY_SIDECAR_NODE_ID, only_status=True + ) + + +@pytest.fixture +def define_inactivity_command( + mock_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch +) -> None: + setenvs_from_dict( + monkeypatch, + { + "DY_SIDECAR_CALLBACKS_MAPPING": json.dumps( + { + "inactivity": { + "service": "mock_container_name", + "command": "", + "timeout": 4, + } + } + ) + }, + ) + + +@pytest.fixture +def mock_shared_store(app: FastAPI) -> None: + shared_store: SharedStore = app.state.shared_store + shared_store.original_to_container_names["mock_container_name"] = ( + "mock_container_name" + ) + + +async def test_containers_activity_command_failed( + define_inactivity_command: None, + app: FastAPI, + rpc_client: RabbitMQRPCClient, + mock_shared_store: None, +): + app_state = AppState(app) + containers_activity = await containers.get_containers_activity( + rpc_client, node_id=app_state.settings.DY_SIDECAR_NODE_ID + ) + assert containers_activity == ActivityInfo(seconds_inactive=_INACTIVE_FOR_LONG_TIME) + + +async def test_containers_activity_no_inactivity_defined( + app: FastAPI, rpc_client: RabbitMQRPCClient, mock_shared_store: None +): + app_state = AppState(app) + containers_activity = await containers.get_containers_activity( + rpc_client, node_id=app_state.settings.DY_SIDECAR_NODE_ID + ) + assert containers_activity is None + + +@pytest.fixture +def activity_response() -> ActivityInfo: + return ActivityInfo(seconds_inactive=10) + + +@pytest.fixture +def mock_inactive_since_command_response( + mocker: MockerFixture, + activity_response: ActivityInfo, +) -> None: + mocker.patch( + "simcore_service_dynamic_sidecar.services.containers.run_command_in_container", + return_value=activity_response.model_dump_json(), + ) + + +async def test_containers_activity_inactive_since( + define_inactivity_command: None, + mock_inactive_since_command_response: None, + app: FastAPI, + rpc_client: RabbitMQRPCClient, + mock_shared_store: None, + activity_response: ActivityInfo, +): + app_state = AppState(app) + containers_activity = await containers.get_containers_activity( + rpc_client, node_id=app_state.settings.DY_SIDECAR_NODE_ID + ) + assert containers_activity == activity_response + + +@pytest.fixture +def mock_inactive_response_wrong_format(mocker: MockerFixture) -> None: + mocker.patch( + "simcore_service_dynamic_sidecar.services.containers.run_command_in_container", + return_value="This is an unparsable json response {}", + ) + + +async def test_containers_activity_unexpected_response( + define_inactivity_command: None, + mock_inactive_response_wrong_format: None, + app: FastAPI, + rpc_client: RabbitMQRPCClient, + mock_shared_store: None, +): + app_state = AppState(app) + containers_activity = await containers.get_containers_activity( + rpc_client, node_id=app_state.settings.DY_SIDECAR_NODE_ID + ) + assert containers_activity == ActivityInfo(seconds_inactive=_INACTIVE_FOR_LONG_TIME) + + +def _get_entrypoint_container_name(app: FastAPI) -> str: + parsed_spec = parse_compose_spec(app.state.shared_store.compose_spec) + container_name = None + for service_name, service_details in parsed_spec["services"].items(): + # a trick to fetch the expected container + if service_details.get("labels", None) is not None: + container_name = service_name + break + assert container_name is not None + return container_name + + +@pytest.mark.parametrize("include_exclude_filter_option", [True, False]) +async def test_containers_entrypoint_name_ok( + ensure_external_volumes: tuple[DockerVolume], + app: FastAPI, + rpc_client: RabbitMQRPCClient, + dynamic_sidecar_network_name: str, + started_containers: list[str], + include_exclude_filter_option: bool, +): + app_state = AppState(app) + + filters_dict = {"network": dynamic_sidecar_network_name} + if include_exclude_filter_option: + filters_dict["exclude"] = SUFFIX_EGRESS_PROXY_NAME + filters = json.dumps(filters_dict) + + container_name = await containers.get_containers_name( + rpc_client, node_id=app_state.settings.DY_SIDECAR_NODE_ID, filters=filters + ) + if include_exclude_filter_option: + assert container_name == _get_entrypoint_container_name(app) + assert SUFFIX_EGRESS_PROXY_NAME not in container_name + else: + assert SUFFIX_EGRESS_PROXY_NAME in container_name + + +@pytest.mark.parametrize("include_exclude_filter_option", [True, False]) +async def test_containers_entrypoint_name_containers_not_started( + ensure_external_volumes: tuple[DockerVolume], + app: FastAPI, + rpc_client: RabbitMQRPCClient, + dynamic_sidecar_network_name: str, + started_containers: list[str], + include_exclude_filter_option: bool, +): + app_state = AppState(app) + + entrypoint_container = _get_entrypoint_container_name(app) + + # remove the container from the spec + parsed_spec = parse_compose_spec(app.state.shared_store.compose_spec) + del parsed_spec["services"][entrypoint_container] + app.state.shared_store.compose_spec = yaml.safe_dump(parsed_spec) + + filters_dict = {"network": dynamic_sidecar_network_name} + if include_exclude_filter_option: + filters_dict["exclude"] = SUFFIX_EGRESS_PROXY_NAME + filters = json.dumps(filters_dict) + + if include_exclude_filter_option: + with pytest.raises( + RPCServerError, + match="No container found for network=entrypoint_container_network and exclude=egress", + ): + await containers.get_containers_name( + rpc_client, + node_id=app_state.settings.DY_SIDECAR_NODE_ID, + filters=filters, + ) + + else: + found_container = await containers.get_containers_name( + rpc_client, node_id=app_state.settings.DY_SIDECAR_NODE_ID, filters=filters + ) + assert found_container in started_containers + assert SUFFIX_EGRESS_PROXY_NAME in found_container + + +@pytest.fixture +def not_started_containers() -> list[str]: + return [f"missing-container-{i}" for i in range(5)] + + +async def test_container_missing_container( + app: FastAPI, rpc_client: RabbitMQRPCClient, not_started_containers: list[str] +): + app_state = AppState(app) + + for container in not_started_containers: + with pytest.raises( + RPCServerError, + match=rf"No container='{container}' was found in started_containers='\[\]'", + ): + await containers.inspect_container( + rpc_client, + node_id=app_state.settings.DY_SIDECAR_NODE_ID, + container_id=container, + ) + + +async def test_container_docker_error( + ensure_external_volumes: tuple[DockerVolume], + app: FastAPI, + rpc_client: RabbitMQRPCClient, + started_containers: list[str], + mock_aiodocker_containers_get: int, +): + app_state = AppState(app) + + for container in started_containers: + # inspect container + with pytest.raises( + RPCServerError, + match=f"An unexpected Docker error occurred status_code={mock_aiodocker_containers_get}, message=aiodocker_mocked_error", + ): + await containers.inspect_container( + rpc_client, + node_id=app_state.settings.DY_SIDECAR_NODE_ID, + container_id=container, + ) diff --git a/services/dynamic-sidecar/tests/unit/api/rpc/test__containers_long_running_tasks.py b/services/dynamic-sidecar/tests/unit/api/rpc/test__containers_long_running_tasks.py new file mode 100644 index 000000000000..039660eb009b --- /dev/null +++ b/services/dynamic-sidecar/tests/unit/api/rpc/test__containers_long_running_tasks.py @@ -0,0 +1,802 @@ +# pylint: disable=no-member +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument + +import asyncio +import json +from collections.abc import Awaitable, Callable, Iterator +from pathlib import Path +from typing import Any, Final, NamedTuple +from unittest.mock import AsyncMock + +import aiodocker +import faker +import pytest +from aiodocker.containers import DockerContainer +from aiodocker.volumes import DockerVolume +from common_library.serialization import model_dump_with_secrets +from fastapi import FastAPI +from models_library.api_schemas_directorv2.dynamic_services import ( + ContainersComposeSpec, + ContainersCreate, +) +from models_library.api_schemas_dynamic_sidecar.containers import DockerComposeYamlStr +from models_library.api_schemas_long_running_tasks.base import ( + ProgressMessage, + ProgressPercent, +) +from models_library.projects_nodes_io import NodeID +from models_library.services_creation import CreateServiceMetricsAdditionalParams +from pytest_mock.plugin import MockerFixture +from pytest_simcore.helpers.long_running_tasks import ( + assert_task_is_no_longer_present, + get_fastapi_long_running_manager, +) +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict +from servicelib.fastapi.long_running_tasks._manager import FastAPILongRunningManager +from servicelib.long_running_tasks import lrt_api +from servicelib.long_running_tasks.models import LRTNamespace, TaskId +from servicelib.long_running_tasks.task import TaskRegistry +from servicelib.rabbitmq import RabbitMQRPCClient +from servicelib.rabbitmq.rpc_interfaces.dynamic_sidecar import ( + containers, + containers_long_running_tasks, +) +from settings_library.rabbit import RabbitSettings +from simcore_sdk.node_ports_common.exceptions import NodeNotFound +from simcore_service_dynamic_sidecar.core.validation import InvalidComposeSpecError +from simcore_service_dynamic_sidecar.models.shared_store import SharedStore +from simcore_service_dynamic_sidecar.modules import long_running_tasks as sidecar_lrts +from simcore_service_dynamic_sidecar.modules.inputs import enable_inputs_pulling +from simcore_service_dynamic_sidecar.modules.outputs._context import OutputsContext +from simcore_service_dynamic_sidecar.modules.outputs._manager import ( + OutputsManager, + UploadPortsFailedError, +) +from tenacity import ( + AsyncRetrying, + retry_if_exception_type, + stop_after_delay, + wait_fixed, +) +from utils import get_lrt_result + +pytest_simcore_core_services_selection = [ + "rabbit", +] + +_FAST_STATUS_POLL: Final[float] = 0.1 +_CREATE_SERVICE_CONTAINERS_TIMEOUT: Final[float] = 60 + + +class ContainerTimes(NamedTuple): + created: Any + started_at: Any + finished_at: Any + + +async def _get_container_timestamps( + container_names: list[str], +) -> dict[str, ContainerTimes]: + container_timestamps: dict[str, ContainerTimes] = {} + async with aiodocker.Docker() as client: + for container_name in container_names: + container: DockerContainer = await client.containers.get(container_name) + container_inspect: dict[str, Any] = await container.show() + container_timestamps[container_name] = ContainerTimes( + created=container_inspect["Created"], + started_at=container_inspect["State"]["StartedAt"], + finished_at=container_inspect["State"]["FinishedAt"], + ) + + return container_timestamps + + +@pytest.fixture +def mock_sidecar_lrts(mocker: MockerFixture) -> Iterator[None]: + async def _just_log_task(*args, **kwargs) -> None: + print(f"Called mocked function with {args}, {kwargs}") + + TaskRegistry.register(_just_log_task) + + for task_name in [ + sidecar_lrts.pull_user_services_images.__name__, + sidecar_lrts.create_user_services.__name__, + sidecar_lrts.remove_user_services.__name__, + sidecar_lrts.restore_user_services_state_paths.__name__, + sidecar_lrts.save_user_services_state_paths.__name__, + sidecar_lrts.pull_user_services_input_ports.__name__, + sidecar_lrts.pull_user_services_output_ports.__name__, + sidecar_lrts.push_user_services_output_ports.__name__, + sidecar_lrts.restart_user_services.__name__, + ]: + mocker.patch.object(sidecar_lrts, task_name, new=_just_log_task) + + yield None + + TaskRegistry.unregister(_just_log_task) + + +@pytest.fixture +def dynamic_sidecar_network_name() -> str: + return "entrypoint_container_network" + + +@pytest.fixture( + params=[ + { + "version": "3", + "services": { + "first-box": { + "image": "alpine:latest", + "networks": { + "entrypoint_container_network": None, + }, + }, + "second-box": { + "image": "alpine:latest", + "command": ["sh", "-c", "sleep 100000"], + }, + }, + "networks": {"entrypoint_container_network": None}, + }, + { + "version": "3", + "services": { + "solo-box": { + "image": "alpine:latest", + "command": ["sh", "-c", "sleep 100000"], + }, + }, + }, + ] +) +def compose_spec(request: pytest.FixtureRequest) -> DockerComposeYamlStr: + spec_dict: dict[str, Any] = request.param # type: ignore + return json.dumps(spec_dict) + + +@pytest.fixture +def mock_environment( + monkeypatch: pytest.MonkeyPatch, + rabbit_service: RabbitSettings, + mock_environment: EnvVarsDict, +) -> EnvVarsDict: + return setenvs_from_dict( + monkeypatch, + { + **mock_environment, + "RABBIT_SETTINGS": json.dumps( + model_dump_with_secrets(rabbit_service, show_secrets=True) + ), + }, + ) + + +@pytest.fixture +async def rpc_client( + rpc_client: RabbitMQRPCClient, + ensure_external_volumes: tuple[DockerVolume], + cleanup_containers: None, + ensure_shared_store_dir: Path, +) -> RabbitMQRPCClient: + # crete dir here + return rpc_client + + +@pytest.fixture +def lrt_namespace(app: FastAPI) -> LRTNamespace: + long_running_manager: FastAPILongRunningManager = app.state.long_running_manager + return long_running_manager.lrt_namespace + + +@pytest.fixture +def shared_store(app: FastAPI) -> SharedStore: + return app.state.shared_store + + +@pytest.fixture +def mock_data_manager(mocker: MockerFixture) -> None: + for function_name in ( + "_push_directory", + "_state_metadata_entry_exists", + "_pull_directory", + "_pull_legacy_archive", + ): + mocker.patch( + f"simcore_service_dynamic_sidecar.modules.long_running_tasks.data_manager.{function_name}", + autospec=True, + return_value=None, + ) + + +@pytest.fixture() +def mock_nodeports(mocker: MockerFixture) -> None: + mocker.patch( + "simcore_service_dynamic_sidecar.modules.outputs._manager.upload_outputs", + return_value=None, + ) + mocker.patch( + "simcore_service_dynamic_sidecar.modules.nodeports.download_target_ports", + return_value=42, + ) + + +@pytest.fixture( + params=[ + [], + None, + ["single_port"], + ["first_port", "second_port"], + ] +) +async def mock_port_keys( + request: pytest.FixtureRequest, app: FastAPI +) -> list[str] | None: + outputs_context: OutputsContext = app.state.outputs_context + if request.param is not None: + await outputs_context.set_file_type_port_keys(request.param) + return request.param + + +@pytest.fixture +def outputs_manager(app: FastAPI) -> OutputsManager: + return app.state.outputs_manager + + +@pytest.fixture +def missing_node_uuid(faker: faker.Faker) -> str: + return faker.uuid4() + + +@pytest.fixture +def mock_node_missing(mocker: MockerFixture, missing_node_uuid: str) -> None: + async def _mocked(*args, **kwargs) -> None: + raise NodeNotFound(missing_node_uuid) + + mocker.patch( + "simcore_service_dynamic_sidecar.modules.outputs._manager.upload_outputs", + side_effect=_mocked, + ) + + +async def _get_task_id_pull_user_services_docker_images_task( + rpc_client: RabbitMQRPCClient, + node_id: NodeID, + lrt_namespace: LRTNamespace, + *args, + **kwargs, +) -> TaskId: + return await containers_long_running_tasks.pull_user_services_images( + rpc_client, node_id=node_id, lrt_namespace=lrt_namespace + ) + + +async def _get_task_id_create_service_containers_task( + rpc_client: RabbitMQRPCClient, + node_id: NodeID, + lrt_namespace: LRTNamespace, + compose_spec: DockerComposeYamlStr, + mock_metrics_params: CreateServiceMetricsAdditionalParams, + *args, + **kwargs, +) -> TaskId: + containers_compose_spec = ContainersComposeSpec( + docker_compose_yaml=compose_spec, + ) + await containers.create_compose_spec( + rpc_client, node_id=node_id, containers_compose_spec=containers_compose_spec + ) + containers_create = ContainersCreate(metrics_params=mock_metrics_params) + return await containers_long_running_tasks.create_user_services( + rpc_client, + node_id=node_id, + lrt_namespace=lrt_namespace, + containers_create=containers_create, + ) + + +async def _get_task_id_runs_docker_compose_down_task( + rpc_client: RabbitMQRPCClient, + node_id: NodeID, + lrt_namespace: LRTNamespace, + *args, + **kwargs, +) -> TaskId: + return await containers_long_running_tasks.remove_user_services( + rpc_client, node_id=node_id, lrt_namespace=lrt_namespace + ) + + +async def _get_task_id_state_restore_task( + rpc_client: RabbitMQRPCClient, + node_id: NodeID, + lrt_namespace: LRTNamespace, + *args, + **kwargs, +) -> TaskId: + return await containers_long_running_tasks.restore_user_services_state_paths( + rpc_client, node_id=node_id, lrt_namespace=lrt_namespace + ) + + +async def _get_task_id_state_save_task( + rpc_client: RabbitMQRPCClient, + node_id: NodeID, + lrt_namespace: LRTNamespace, + *args, + **kwargs, +) -> TaskId: + return await containers_long_running_tasks.save_user_services_state_paths( + rpc_client, node_id=node_id, lrt_namespace=lrt_namespace + ) + + +async def _get_task_id_ports_inputs_pull_task( + rpc_client: RabbitMQRPCClient, + node_id: NodeID, + lrt_namespace: LRTNamespace, + port_keys: list[str] | None, + *args, + **kwargs, +) -> TaskId: + return await containers_long_running_tasks.pull_user_services_input_ports( + rpc_client, node_id=node_id, lrt_namespace=lrt_namespace, port_keys=port_keys + ) + + +async def _get_task_id_ports_outputs_pull_task( + rpc_client: RabbitMQRPCClient, + node_id: NodeID, + lrt_namespace: LRTNamespace, + port_keys: list[str] | None, + *args, + **kwargs, +) -> TaskId: + return await containers_long_running_tasks.pull_user_services_output_ports( + rpc_client, node_id=node_id, lrt_namespace=lrt_namespace, port_keys=port_keys + ) + + +async def _get_task_id_ports_outputs_push_task( + rpc_client: RabbitMQRPCClient, + node_id: NodeID, + lrt_namespace: LRTNamespace, + *args, + **kwargs, +) -> TaskId: + return await containers_long_running_tasks.push_user_services_output_ports( + rpc_client, node_id=node_id, lrt_namespace=lrt_namespace + ) + + +async def _get_task_id_task_containers_restart_task( + rpc_client: RabbitMQRPCClient, + node_id: NodeID, + lrt_namespace: LRTNamespace, + *args, + **kwargs, +) -> TaskId: + return await containers_long_running_tasks.restart_user_services( + rpc_client, + node_id=node_id, + lrt_namespace=lrt_namespace, + ) + + +async def _debug_progress( + message: ProgressMessage, percent: ProgressPercent | None, task_id: TaskId +) -> None: + print(f"{task_id} {percent} {message}") + + +class _LastProgressMessageTracker: + def __init__(self) -> None: + self.last_progress_message: tuple[ProgressMessage, ProgressPercent] | None = ( + None + ) + + async def __call__( + self, message: ProgressMessage, percent: ProgressPercent | None, _: TaskId + ) -> None: + assert percent is not None + self.last_progress_message = (message, percent) + print(message, percent) + + async def assert_progress_finished(self) -> None: + async for attempt in AsyncRetrying( + stop=stop_after_delay(10), + wait=wait_fixed(0.1), + retry=retry_if_exception_type(AssertionError), + reraise=True, + ): + with attempt: + await asyncio.sleep(0) # yield control to the event loop + assert self.last_progress_message == ("finished", 1.0) + + +async def test_create_containers_task( + rpc_client: RabbitMQRPCClient, + node_id: NodeID, + lrt_namespace: LRTNamespace, + compose_spec: str, + mock_stop_heart_beat_task: AsyncMock, + mock_metrics_params: CreateServiceMetricsAdditionalParams, + shared_store: SharedStore, +) -> None: + last_progress_message_tracker = _LastProgressMessageTracker() + + result = await get_lrt_result( + rpc_client, + lrt_namespace, + task_id=await _get_task_id_create_service_containers_task( + rpc_client, node_id, lrt_namespace, compose_spec, mock_metrics_params + ), + task_timeout=_CREATE_SERVICE_CONTAINERS_TIMEOUT, + status_poll_interval=_FAST_STATUS_POLL, + progress_callback=last_progress_message_tracker, + ) + assert shared_store.container_names == result + + await last_progress_message_tracker.assert_progress_finished() + + +async def test_pull_user_servcices_docker_images( + rpc_client: RabbitMQRPCClient, + node_id: NodeID, + lrt_namespace: LRTNamespace, + compose_spec: str, + mock_stop_heart_beat_task: AsyncMock, + mock_metrics_params: CreateServiceMetricsAdditionalParams, + shared_store: SharedStore, +) -> None: + last_progress_message_tracker1 = _LastProgressMessageTracker() + + result = await get_lrt_result( + rpc_client, + lrt_namespace, + task_id=await _get_task_id_create_service_containers_task( + rpc_client, node_id, lrt_namespace, compose_spec, mock_metrics_params + ), + task_timeout=_CREATE_SERVICE_CONTAINERS_TIMEOUT, + status_poll_interval=_FAST_STATUS_POLL, + progress_callback=last_progress_message_tracker1, + ) + assert shared_store.container_names == result + + await last_progress_message_tracker1.assert_progress_finished() + + last_progress_message_tracker2 = _LastProgressMessageTracker() + result = await get_lrt_result( + rpc_client, + lrt_namespace, + task_id=await _get_task_id_pull_user_services_docker_images_task( + rpc_client, node_id, lrt_namespace, compose_spec, mock_metrics_params + ), + task_timeout=_CREATE_SERVICE_CONTAINERS_TIMEOUT, + status_poll_interval=_FAST_STATUS_POLL, + progress_callback=last_progress_message_tracker2, + ) + assert result is None + await last_progress_message_tracker2.assert_progress_finished() + + +async def test_create_containers_task_invalid_yaml_spec( + rpc_client: RabbitMQRPCClient, + node_id: NodeID, + lrt_namespace: LRTNamespace, + mock_stop_heart_beat_task: AsyncMock, + mock_metrics_params: CreateServiceMetricsAdditionalParams, +): + with pytest.raises(InvalidComposeSpecError) as exec_info: + await get_lrt_result( + rpc_client, + lrt_namespace, + task_id=await _get_task_id_create_service_containers_task( + rpc_client, node_id, lrt_namespace, "", mock_metrics_params + ), + task_timeout=_CREATE_SERVICE_CONTAINERS_TIMEOUT, + status_poll_interval=_FAST_STATUS_POLL, + progress_callback=_debug_progress, + ) + assert "Provided yaml is not valid" in f"{exec_info.value}" + + +@pytest.mark.parametrize( + "get_task_id_callable", + [ + _get_task_id_pull_user_services_docker_images_task, + _get_task_id_create_service_containers_task, + _get_task_id_runs_docker_compose_down_task, + _get_task_id_state_restore_task, + _get_task_id_state_save_task, + _get_task_id_ports_inputs_pull_task, + _get_task_id_ports_outputs_pull_task, + _get_task_id_ports_outputs_push_task, + _get_task_id_task_containers_restart_task, + ], +) +async def test_same_task_id_is_returned_if_task_exists( + mock_sidecar_lrts: None, + rpc_client: RabbitMQRPCClient, + app: FastAPI, + node_id: NodeID, + lrt_namespace: LRTNamespace, + mocker: MockerFixture, + get_task_id_callable: Callable[..., Awaitable[TaskId]], + mock_stop_heart_beat_task: AsyncMock, + mock_metrics_params: CreateServiceMetricsAdditionalParams, + compose_spec: str, +) -> None: + def _get_awaitable() -> Awaitable[TaskId]: + return get_task_id_callable( + rpc_client=rpc_client, + node_id=node_id, + lrt_namespace=lrt_namespace, + compose_spec=compose_spec, + mock_metrics_params=mock_metrics_params, + port_keys=None, + ) + + async def _assert_task_removed(task_id: TaskId) -> None: + await lrt_api.remove_task(rpc_client, lrt_namespace, {}, task_id) + await assert_task_is_no_longer_present( + get_fastapi_long_running_manager(app), task_id, {} + ) + + task_id = await _get_awaitable() + assert task_id.endswith("unique") + assert await _get_awaitable() == task_id + + await _assert_task_removed(task_id) + + # since the previous task was already removed it is again possible + # to create a task and it will share the same task_id + new_task_id = await _get_awaitable() + assert new_task_id.endswith("unique") + assert new_task_id == task_id + + await _assert_task_removed(task_id) + + +async def test_containers_down_after_starting( + mock_ensure_read_permissions_on_user_service_data: None, + rpc_client: RabbitMQRPCClient, + node_id: NodeID, + lrt_namespace: LRTNamespace, + compose_spec: str, + mock_stop_heart_beat_task: AsyncMock, + mock_metrics_params: CreateServiceMetricsAdditionalParams, + shared_store: SharedStore, + mock_core_rabbitmq: dict[str, AsyncMock], + mocker: MockerFixture, +): + # start containers + result = await get_lrt_result( + rpc_client, + lrt_namespace, + task_id=await _get_task_id_create_service_containers_task( + rpc_client, node_id, lrt_namespace, compose_spec, mock_metrics_params + ), + task_timeout=_CREATE_SERVICE_CONTAINERS_TIMEOUT, + status_poll_interval=_FAST_STATUS_POLL, + progress_callback=_debug_progress, + ) + assert shared_store.container_names == result + + # put down containers + result = await get_lrt_result( + rpc_client, + lrt_namespace, + task_id=await _get_task_id_runs_docker_compose_down_task( + rpc_client, node_id, lrt_namespace + ), + task_timeout=_CREATE_SERVICE_CONTAINERS_TIMEOUT, + status_poll_interval=_FAST_STATUS_POLL, + progress_callback=_debug_progress, + ) + assert result is None + + +async def test_containers_down_missing_spec( + rpc_client: RabbitMQRPCClient, + node_id: NodeID, + lrt_namespace: LRTNamespace, + caplog_info_debug: pytest.LogCaptureFixture, +): + result = await get_lrt_result( + rpc_client, + lrt_namespace, + task_id=await _get_task_id_runs_docker_compose_down_task( + rpc_client, node_id, lrt_namespace + ), + task_timeout=_CREATE_SERVICE_CONTAINERS_TIMEOUT, + status_poll_interval=_FAST_STATUS_POLL, + progress_callback=_debug_progress, + ) + assert result is None + assert "No compose-spec was found" in caplog_info_debug.text + + +async def test_container_restore_state( + rpc_client: RabbitMQRPCClient, + node_id: NodeID, + lrt_namespace: LRTNamespace, + mock_data_manager: None, +): + result = await get_lrt_result( + rpc_client, + lrt_namespace, + task_id=await _get_task_id_state_restore_task( + rpc_client, node_id, lrt_namespace + ), + task_timeout=_CREATE_SERVICE_CONTAINERS_TIMEOUT, + status_poll_interval=_FAST_STATUS_POLL, + progress_callback=_debug_progress, + ) + assert isinstance(result, int) + + +async def test_container_save_state( + rpc_client: RabbitMQRPCClient, + node_id: NodeID, + lrt_namespace: LRTNamespace, + mock_data_manager: None, +): + result = await get_lrt_result( + rpc_client, + lrt_namespace, + task_id=await _get_task_id_state_save_task(rpc_client, node_id, lrt_namespace), + task_timeout=_CREATE_SERVICE_CONTAINERS_TIMEOUT, + status_poll_interval=_FAST_STATUS_POLL, + progress_callback=_debug_progress, + ) + assert isinstance(result, int) + + +@pytest.mark.parametrize("inputs_pulling_enabled", [True, False]) +async def test_container_pull_input_ports( + rpc_client: RabbitMQRPCClient, + node_id: NodeID, + lrt_namespace: LRTNamespace, + inputs_pulling_enabled: bool, + app: FastAPI, + mock_port_keys: list[str] | None, + mock_nodeports: None, +): + if inputs_pulling_enabled: + enable_inputs_pulling(app) + + result = await get_lrt_result( + rpc_client, + lrt_namespace, + task_id=await _get_task_id_ports_inputs_pull_task( + rpc_client, node_id, lrt_namespace, mock_port_keys + ), + task_timeout=_CREATE_SERVICE_CONTAINERS_TIMEOUT, + status_poll_interval=_FAST_STATUS_POLL, + progress_callback=_debug_progress, + ) + assert result == (42 if inputs_pulling_enabled else 0) + + +async def test_container_pull_output_ports( + rpc_client: RabbitMQRPCClient, + node_id: NodeID, + lrt_namespace: LRTNamespace, + mock_port_keys: list[str] | None, + mock_nodeports: None, +): + result = await get_lrt_result( + rpc_client, + lrt_namespace, + task_id=await _get_task_id_ports_outputs_pull_task( + rpc_client, node_id, lrt_namespace, mock_port_keys + ), + task_timeout=_CREATE_SERVICE_CONTAINERS_TIMEOUT, + status_poll_interval=_FAST_STATUS_POLL, + progress_callback=_debug_progress, + ) + assert result == 42 + + +async def test_container_push_output_ports( + rpc_client: RabbitMQRPCClient, + node_id: NodeID, + lrt_namespace: LRTNamespace, + mock_port_keys: list[str] | None, + mock_nodeports: None, +): + result = await get_lrt_result( + rpc_client, + lrt_namespace, + task_id=await _get_task_id_ports_outputs_push_task( + rpc_client, node_id, lrt_namespace, mock_port_keys + ), + task_timeout=_CREATE_SERVICE_CONTAINERS_TIMEOUT, + status_poll_interval=_FAST_STATUS_POLL, + progress_callback=_debug_progress, + ) + assert result is None + + +async def test_container_push_output_ports_missing_node( + rpc_client: RabbitMQRPCClient, + node_id: NodeID, + lrt_namespace: LRTNamespace, + mock_port_keys: list[str] | None, + missing_node_uuid: str, + mock_node_missing: None, + outputs_manager: OutputsManager, +): + for port_key in mock_port_keys if mock_port_keys else []: + await outputs_manager.port_key_content_changed(port_key) + + async def _test_code() -> None: + await get_lrt_result( + rpc_client, + lrt_namespace, + task_id=await _get_task_id_ports_outputs_push_task( + rpc_client, node_id, lrt_namespace, mock_port_keys + ), + task_timeout=_CREATE_SERVICE_CONTAINERS_TIMEOUT, + status_poll_interval=_FAST_STATUS_POLL, + progress_callback=_debug_progress, + ) + + if not mock_port_keys: + await _test_code() + else: + with pytest.raises(UploadPortsFailedError) as exec_info: + await _test_code() + assert f"the node id {missing_node_uuid} was not found" in f"{exec_info.value}" + + +async def test_containers_restart( + rpc_client: RabbitMQRPCClient, + node_id: NodeID, + lrt_namespace: LRTNamespace, + compose_spec: str, + mock_stop_heart_beat_task: AsyncMock, + mock_metrics_params: CreateServiceMetricsAdditionalParams, + shared_store: SharedStore, +): + container_names = await get_lrt_result( + rpc_client, + lrt_namespace, + task_id=await _get_task_id_create_service_containers_task( + rpc_client, node_id, lrt_namespace, compose_spec, mock_metrics_params + ), + task_timeout=_CREATE_SERVICE_CONTAINERS_TIMEOUT, + status_poll_interval=_FAST_STATUS_POLL, + progress_callback=_debug_progress, + ) + assert shared_store.container_names == container_names + + assert container_names + + container_timestamps_before = await _get_container_timestamps(container_names) + + result = await get_lrt_result( + rpc_client, + lrt_namespace, + task_id=await _get_task_id_task_containers_restart_task( + rpc_client, node_id, lrt_namespace + ), + task_timeout=_CREATE_SERVICE_CONTAINERS_TIMEOUT, + status_poll_interval=_FAST_STATUS_POLL, + progress_callback=_debug_progress, + ) + assert result is None + + container_timestamps_after = await _get_container_timestamps(container_names) + + for container_name in container_names: + before: ContainerTimes = container_timestamps_before[container_name] + after: ContainerTimes = container_timestamps_after[container_name] + + assert before.created == after.created + assert before.started_at < after.started_at + assert before.finished_at < after.finished_at diff --git a/services/dynamic-sidecar/tests/unit/api/rpc/test__volumes.py b/services/dynamic-sidecar/tests/unit/api/rpc/test__volumes.py index e19b50916c1e..d23705f95493 100644 --- a/services/dynamic-sidecar/tests/unit/api/rpc/test__volumes.py +++ b/services/dynamic-sidecar/tests/unit/api/rpc/test__volumes.py @@ -42,7 +42,7 @@ async def test_volumes_state_saved_ok( status=initial_expected_status ) - await volumes.save_volume_state( + await volumes.update_volume_status( rpc_client, node_id=settings.DY_SIDECAR_NODE_ID, status=VolumeStatus.CONTENT_WAS_SAVED, @@ -66,7 +66,7 @@ async def test_volumes_state_saved_error( settings: ApplicationSettings = app.state.settings with pytest.raises(RPCServerError, match="ValidationError"): - await volumes.save_volume_state( + await volumes.update_volume_status( rpc_client, node_id=settings.DY_SIDECAR_NODE_ID, status=VolumeStatus.CONTENT_WAS_SAVED, diff --git a/services/dynamic-sidecar/tests/unit/api/rpc/utils.py b/services/dynamic-sidecar/tests/unit/api/rpc/utils.py new file mode 100644 index 000000000000..c41256305d79 --- /dev/null +++ b/services/dynamic-sidecar/tests/unit/api/rpc/utils.py @@ -0,0 +1,47 @@ +from typing import Any + +from servicelib.long_running_tasks import lrt_api +from servicelib.long_running_tasks.models import LRTNamespace, ProgressCallback, TaskId +from servicelib.rabbitmq import RabbitMQRPCClient +from tenacity import ( + AsyncRetrying, + retry_if_exception_type, + stop_after_delay, + wait_fixed, +) + + +async def get_lrt_result( + rpc_client: RabbitMQRPCClient, + lrt_namespace: LRTNamespace, + task_id: TaskId, + status_poll_interval: float, + task_timeout: float, + progress_callback: ProgressCallback | None = None, +) -> Any: + async for attempt in AsyncRetrying( + stop=stop_after_delay(task_timeout), + wait=wait_fixed(status_poll_interval), + retry=retry_if_exception_type(AssertionError), + reraise=True, + ): + with attempt: + status = await lrt_api.get_task_status( + rpc_client, + lrt_namespace=lrt_namespace, + task_context={}, + task_id=task_id, + ) + + if progress_callback: + await progress_callback( + status.task_progress.message, status.task_progress.percent, task_id + ) + assert status.done is True + + return await lrt_api.get_task_result( + rpc_client, + lrt_namespace=lrt_namespace, + task_context={}, + task_id=task_id, + ) diff --git a/services/dynamic-sidecar/tests/unit/conftest.py b/services/dynamic-sidecar/tests/unit/conftest.py index 75b9d316c103..fc113f1c674a 100644 --- a/services/dynamic-sidecar/tests/unit/conftest.py +++ b/services/dynamic-sidecar/tests/unit/conftest.py @@ -12,7 +12,7 @@ from async_asgi_testclient import TestClient from fastapi import FastAPI from pytest_mock.plugin import MockerFixture -from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict from simcore_service_dynamic_sidecar.core.application import AppState, create_app from simcore_service_dynamic_sidecar.core.docker_compose_utils import ( docker_compose_down, @@ -40,11 +40,7 @@ @pytest.fixture -def app( - mock_environment: EnvVarsDict, - mock_registry_service: AsyncMock, - mock_core_rabbitmq: dict[str, AsyncMock], -) -> FastAPI: +def app(mock_environment: EnvVarsDict, mock_registry_service: AsyncMock) -> FastAPI: """creates app with registry and rabbitMQ services mocked""" return create_app() @@ -131,24 +127,6 @@ async def cleanup_containers(app: FastAPI) -> AsyncIterator[None]: await docker_compose_down(app_state.compose_spec, app_state.settings) -@pytest.fixture -def mock_rabbitmq_envs( - mock_core_rabbitmq: dict[str, AsyncMock], - monkeypatch: pytest.MonkeyPatch, - mock_environment: EnvVarsDict, -) -> EnvVarsDict: - setenvs_from_dict( - monkeypatch, - { - "RABBIT_HOST": "mocked_host", - "RABBIT_SECURE": "false", - "RABBIT_USER": "mocked_user", - "RABBIT_PASSWORD": "mocked_password", - }, - ) - return mock_environment - - @pytest.fixture def port_notifier(app: FastAPI) -> PortNotifier: settings: ApplicationSettings = app.state.settings diff --git a/services/dynamic-sidecar/tests/unit/test_api_rest_health.py b/services/dynamic-sidecar/tests/unit/test_api_rest_health.py index 987ddbf1e636..a5542917b117 100644 --- a/services/dynamic-sidecar/tests/unit/test_api_rest_health.py +++ b/services/dynamic-sidecar/tests/unit/test_api_rest_health.py @@ -1,6 +1,8 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument +from unittest.mock import AsyncMock + from async_asgi_testclient import TestClient from fastapi import status from simcore_service_dynamic_sidecar.models.schemas.application_health import ( @@ -8,14 +10,18 @@ ) -async def test_is_healthy(test_client: TestClient) -> None: +async def test_is_healthy( + mock_core_rabbitmq: dict[str, AsyncMock], test_client: TestClient +) -> None: test_client.application.state.application_health.is_healthy = True response = await test_client.get("/health") assert response.status_code == status.HTTP_200_OK, response assert response.json() == ApplicationHealth(is_healthy=True).model_dump() -async def test_is_unhealthy(test_client: TestClient) -> None: +async def test_is_unhealthy( + mock_core_rabbitmq: dict[str, AsyncMock], test_client: TestClient +) -> None: test_client.application.state.application_health.is_healthy = False response = await test_client.get("/health") assert response.status_code == status.HTTP_503_SERVICE_UNAVAILABLE, response @@ -24,7 +30,9 @@ async def test_is_unhealthy(test_client: TestClient) -> None: } -async def test_is_unhealthy_via_rabbitmq(test_client: TestClient) -> None: +async def test_is_unhealthy_via_rabbitmq( + mock_core_rabbitmq: dict[str, AsyncMock], test_client: TestClient +) -> None: # pylint: disable=protected-access test_client.application.state.rabbitmq_client._healthy_state = False # noqa: SLF001 response = await test_client.get("/health") diff --git a/services/dynamic-sidecar/tests/unit/test_api_rest_prometheus_metrics.py b/services/dynamic-sidecar/tests/unit/test_api_rest_prometheus_metrics.py index 7d4454b7e0af..37961a149573 100644 --- a/services/dynamic-sidecar/tests/unit/test_api_rest_prometheus_metrics.py +++ b/services/dynamic-sidecar/tests/unit/test_api_rest_prometheus_metrics.py @@ -5,38 +5,60 @@ import json from collections.abc import AsyncIterable from typing import Final -from unittest.mock import AsyncMock import pytest from aiodocker.volumes import DockerVolume from asgi_lifespan import LifespanManager +from common_library.serialization import model_dump_with_secrets from fastapi import FastAPI, status from httpx import ASGITransport, AsyncClient +from models_library.api_schemas_directorv2.dynamic_services import ( + ContainersComposeSpec, + ContainersCreate, +) from models_library.api_schemas_dynamic_sidecar.containers import DockerComposeYamlStr from models_library.callbacks_mapping import CallbacksMapping from models_library.services_creation import CreateServiceMetricsAdditionalParams from pydantic import AnyHttpUrl, TypeAdapter from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from servicelib.fastapi.long_running_tasks.client import ( - Client, - TaskId, + HttpClient, periodic_task_result, ) from servicelib.fastapi.long_running_tasks.client import setup as client_setup +from servicelib.long_running_tasks.models import TaskId +from settings_library.rabbit import RabbitSettings from simcore_service_dynamic_sidecar._meta import API_VTAG -from simcore_service_dynamic_sidecar.models.schemas.containers import ( - ContainersComposeSpec, - ContainersCreate, -) from simcore_service_dynamic_sidecar.modules.prometheus_metrics import ( _USER_SERVICES_NOT_STARTED, UserServicesMetrics, ) +pytest_simcore_core_services_selection = [ + "rabbit", +] + _FAST_STATUS_POLL: Final[float] = 0.1 _CREATE_SERVICE_CONTAINERS_TIMEOUT: Final[float] = 60 +@pytest.fixture +def mock_environment( + monkeypatch: pytest.MonkeyPatch, + rabbit_service: RabbitSettings, + mock_environment: EnvVarsDict, +) -> EnvVarsDict: + return setenvs_from_dict( + monkeypatch, + { + **mock_environment, + "RABBIT_SETTINGS": json.dumps( + model_dump_with_secrets(rabbit_service, show_secrets=True) + ), + }, + ) + + @pytest.fixture async def enable_prometheus_metrics( monkeypatch: pytest.MonkeyPatch, mock_environment: EnvVarsDict @@ -45,14 +67,14 @@ async def enable_prometheus_metrics( monkeypatch, { "DY_SIDECAR_CALLBACKS_MAPPING": json.dumps( - CallbacksMapping.model_config["json_schema_extra"]["examples"][2] - ) + CallbacksMapping.model_json_schema()["examples"][2] + ), }, ) @pytest.fixture -async def app(mock_rabbitmq_envs: EnvVarsDict, app: FastAPI) -> AsyncIterable[FastAPI]: +async def app(app: FastAPI) -> AsyncIterable[FastAPI]: client_setup(app) async with LifespanManager(app): yield app @@ -79,10 +101,12 @@ async def httpx_async_client( @pytest.fixture -def client( +def http_client( app: FastAPI, httpx_async_client: AsyncClient, backend_url: AnyHttpUrl -) -> Client: - return Client(app=app, async_client=httpx_async_client, base_url=f"{backend_url}") +) -> HttpClient: + return HttpClient( + app=app, async_client=httpx_async_client, base_url=f"{backend_url}" + ) @pytest.fixture @@ -121,17 +145,13 @@ async def _get_task_id_create_service_containers( return task_id -async def test_metrics_disabled( - mock_core_rabbitmq: dict[str, AsyncMock], httpx_async_client: AsyncClient -) -> None: +async def test_metrics_disabled(httpx_async_client: AsyncClient) -> None: response = await httpx_async_client.get("/metrics") assert response.status_code == status.HTTP_404_NOT_FOUND, response async def test_metrics_enabled_no_containers_running( - enable_prometheus_metrics: None, - mock_core_rabbitmq: dict[str, AsyncMock], - httpx_async_client: AsyncClient, + enable_prometheus_metrics: None, httpx_async_client: AsyncClient ) -> None: response = await httpx_async_client.get("/metrics") assert response.status_code == status.HTTP_500_INTERNAL_SERVER_ERROR, response @@ -140,10 +160,9 @@ async def test_metrics_enabled_no_containers_running( async def test_metrics_enabled_containers_will_start( enable_prometheus_metrics: None, - mock_core_rabbitmq: dict[str, AsyncMock], app: FastAPI, httpx_async_client: AsyncClient, - client: Client, + http_client: HttpClient, compose_spec: str, mock_metrics_params: CreateServiceMetricsAdditionalParams, ): @@ -153,7 +172,7 @@ async def test_metrics_enabled_containers_will_start( assert _USER_SERVICES_NOT_STARTED in response.text async with periodic_task_result( - client=client, + client=http_client, task_id=await _get_task_id_create_service_containers( httpx_async_client, compose_spec, mock_metrics_params ), diff --git a/services/dynamic-sidecar/tests/unit/test_api_rest_workflow_service_metrics.py b/services/dynamic-sidecar/tests/unit/test_api_rest_workflow_service_metrics.py index b276f00cb2c7..6786f99bfcf7 100644 --- a/services/dynamic-sidecar/tests/unit/test_api_rest_workflow_service_metrics.py +++ b/services/dynamic-sidecar/tests/unit/test_api_rest_workflow_service_metrics.py @@ -16,8 +16,13 @@ from aiodocker.utils import clean_filters from aiodocker.volumes import DockerVolume from asgi_lifespan import LifespanManager +from common_library.serialization import model_dump_with_secrets from fastapi import FastAPI from httpx import ASGITransport, AsyncClient +from models_library.api_schemas_directorv2.dynamic_services import ( + ContainersComposeSpec, + ContainersCreate, +) from models_library.api_schemas_dynamic_sidecar.containers import DockerComposeYamlStr from models_library.generated_models.docker_rest_api import ContainerState from models_library.generated_models.docker_rest_api import Status2 as ContainerStatus @@ -31,24 +36,31 @@ from models_library.services_creation import CreateServiceMetricsAdditionalParams from pydantic import AnyHttpUrl, TypeAdapter from pytest_mock import MockerFixture +from pytest_simcore.helpers.long_running_tasks import ( + assert_task_is_no_longer_present, + get_fastapi_long_running_manager, +) from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from servicelib.fastapi.long_running_tasks.client import ( - Client, - TaskId, + HttpClient, periodic_task_result, ) from servicelib.fastapi.long_running_tasks.client import setup as client_setup +from servicelib.long_running_tasks.errors import TaskExceptionError +from servicelib.long_running_tasks.models import TaskId +from settings_library.rabbit import RabbitSettings from simcore_service_dynamic_sidecar._meta import API_VTAG +from simcore_service_dynamic_sidecar.core.application import create_app from simcore_service_dynamic_sidecar.core.docker_utils import get_container_states -from simcore_service_dynamic_sidecar.models.schemas.containers import ( - ContainersComposeSpec, - ContainersCreate, -) from simcore_service_dynamic_sidecar.models.shared_store import SharedStore from tenacity import AsyncRetrying, TryAgain from tenacity.stop import stop_after_delay from tenacity.wait import wait_fixed +pytest_simcore_core_services_selection = [ + "rabbit", +] + _FAST_STATUS_POLL: Final[float] = 0.1 _CREATE_SERVICE_CONTAINERS_TIMEOUT: Final[float] = 60 _BASE_HEART_BEAT_INTERVAL: Final[float] = 0.1 @@ -83,24 +95,36 @@ def backend_url() -> AnyHttpUrl: @pytest.fixture -def mock_environment( - monkeypatch: pytest.MonkeyPatch, mock_rabbitmq_envs: EnvVarsDict +async def mock_environment( + fast_long_running_tasks_cancellation: None, + mock_postgres_check: None, + mock_registry_service: AsyncMock, + mock_environment: EnvVarsDict, + monkeypatch: pytest.MonkeyPatch, + rabbit_service: RabbitSettings, ) -> EnvVarsDict: - setenvs_from_dict( + return setenvs_from_dict( monkeypatch, - {"RESOURCE_TRACKING_HEARTBEAT_INTERVAL": f"{_BASE_HEART_BEAT_INTERVAL}"}, + { + **mock_environment, + "RESOURCE_TRACKING_HEARTBEAT_INTERVAL": f"{_BASE_HEART_BEAT_INTERVAL}", + "RABBIT_SETTINGS": json.dumps( + model_dump_with_secrets(rabbit_service, show_secrets=True) + ), + }, ) - return mock_rabbitmq_envs @pytest.fixture -async def app(app: FastAPI) -> AsyncIterable[FastAPI]: +async def app(mock_environment: EnvVarsDict) -> AsyncIterable[FastAPI]: + local_app = create_app() # add the client setup to the same application # this is only required for testing, in reality # this will be in a different process - client_setup(app) - async with LifespanManager(app): - yield app + client_setup(local_app) + + async with LifespanManager(local_app): + yield local_app @pytest.fixture @@ -122,10 +146,12 @@ async def httpx_async_client( @pytest.fixture -def client( +async def http_client( app: FastAPI, httpx_async_client: AsyncClient, backend_url: AnyHttpUrl -) -> Client: - return Client(app=app, async_client=httpx_async_client, base_url=f"{backend_url}") +) -> HttpClient: + return HttpClient( + app=app, async_client=httpx_async_client, base_url=f"{backend_url}" + ) @pytest.fixture @@ -144,6 +170,15 @@ def mock_user_services_fail_to_stop(mocker: MockerFixture) -> None: ) +@pytest.fixture +def mock_post_rabbit_message(mocker: MockerFixture) -> AsyncMock: + return mocker.patch( + "simcore_service_dynamic_sidecar.core.rabbitmq._post_rabbit_message", + return_value=None, + autospec=True, + ) + + async def _get_task_id_create_service_containers( httpx_async_client: AsyncClient, compose_spec: DockerComposeYamlStr, @@ -173,11 +208,11 @@ async def _get_task_id_docker_compose_down(httpx_async_client: AsyncClient) -> T def _get_resource_tracking_messages( - mock_core_rabbitmq: dict[str, AsyncMock], + mock_post_rabbit_message: AsyncMock, ) -> list[RabbitResourceTrackingMessages]: return [ x[0][1] - for x in mock_core_rabbitmq["post_rabbit_message"].call_args_list + for x in mock_post_rabbit_message.call_args_list if isinstance(x[0][1], RabbitResourceTrackingMessages) ] @@ -201,16 +236,16 @@ async def _wait_for_containers_to_be_running(app: FastAPI) -> None: async def test_service_starts_and_closes_as_expected( - mock_core_rabbitmq: dict[str, AsyncMock], + mock_post_rabbit_message: AsyncMock, app: FastAPI, httpx_async_client: AsyncClient, - client: Client, + http_client: HttpClient, compose_spec: str, container_names: list[str], mock_metrics_params: CreateServiceMetricsAdditionalParams, ): async with periodic_task_result( - client=client, + client=http_client, task_id=await _get_task_id_create_service_containers( httpx_async_client, compose_spec, mock_metrics_params ), @@ -223,7 +258,7 @@ async def test_service_starts_and_closes_as_expected( await _wait_for_containers_to_be_running(app) async with periodic_task_result( - client=client, + client=http_client, task_id=await _get_task_id_docker_compose_down(httpx_async_client), task_timeout=_CREATE_SERVICE_CONTAINERS_TIMEOUT, status_poll_interval=_FAST_STATUS_POLL, @@ -235,7 +270,9 @@ async def test_service_starts_and_closes_as_expected( await asyncio.sleep(_BASE_HEART_BEAT_INTERVAL * 10) # Ensure messages arrive in the expected order - resource_tracking_messages = _get_resource_tracking_messages(mock_core_rabbitmq) + resource_tracking_messages = _get_resource_tracking_messages( + mock_post_rabbit_message + ) assert len(resource_tracking_messages) >= 3 start_message = resource_tracking_messages[0] @@ -252,18 +289,18 @@ async def test_service_starts_and_closes_as_expected( @pytest.mark.parametrize("with_compose_down", [True, False]) async def test_user_services_fail_to_start( - mock_core_rabbitmq: dict[str, AsyncMock], + mock_post_rabbit_message: AsyncMock, app: FastAPI, httpx_async_client: AsyncClient, - client: Client, + http_client: HttpClient, compose_spec: str, mock_metrics_params: CreateServiceMetricsAdditionalParams, with_compose_down: bool, mock_user_services_fail_to_start: None, ): - with pytest.raises(RuntimeError): + with pytest.raises(TaskExceptionError): async with periodic_task_result( - client=client, + client=http_client, task_id=await _get_task_id_create_service_containers( httpx_async_client, compose_spec, mock_metrics_params ), @@ -276,7 +313,7 @@ async def test_user_services_fail_to_start( if with_compose_down: async with periodic_task_result( - client=client, + client=http_client, task_id=await _get_task_id_docker_compose_down(httpx_async_client), task_timeout=_CREATE_SERVICE_CONTAINERS_TIMEOUT, status_poll_interval=_FAST_STATUS_POLL, @@ -284,22 +321,24 @@ async def test_user_services_fail_to_start( assert result is None # no messages were sent - resource_tracking_messages = _get_resource_tracking_messages(mock_core_rabbitmq) + resource_tracking_messages = _get_resource_tracking_messages( + mock_post_rabbit_message + ) assert len(resource_tracking_messages) == 0 async def test_user_services_fail_to_stop_or_save_data( - mock_core_rabbitmq: dict[str, AsyncMock], + mock_post_rabbit_message: AsyncMock, app: FastAPI, httpx_async_client: AsyncClient, - client: Client, + http_client: HttpClient, compose_spec: str, container_names: list[str], mock_metrics_params: CreateServiceMetricsAdditionalParams, mock_user_services_fail_to_stop: None, ): async with periodic_task_result( - client=client, + client=http_client, task_id=await _get_task_id_create_service_containers( httpx_async_client, compose_spec, mock_metrics_params ), @@ -317,17 +356,23 @@ async def test_user_services_fail_to_stop_or_save_data( # in case of manual intervention multiple stops will be sent _EXPECTED_STOP_MESSAGES = 4 for _ in range(_EXPECTED_STOP_MESSAGES): - with pytest.raises(RuntimeError): + task_id = await _get_task_id_docker_compose_down(httpx_async_client) + with pytest.raises(TaskExceptionError): async with periodic_task_result( - client=client, - task_id=await _get_task_id_docker_compose_down(httpx_async_client), + client=http_client, + task_id=task_id, task_timeout=_CREATE_SERVICE_CONTAINERS_TIMEOUT, status_poll_interval=_FAST_STATUS_POLL, ): ... + await assert_task_is_no_longer_present( + get_fastapi_long_running_manager(app), task_id, {} + ) # Ensure messages arrive in the expected order - resource_tracking_messages = _get_resource_tracking_messages(mock_core_rabbitmq) + resource_tracking_messages = _get_resource_tracking_messages( + mock_post_rabbit_message + ) assert len(resource_tracking_messages) >= 3 start_message = resource_tracking_messages[0] @@ -384,10 +429,10 @@ async def _mocked_get_container_states( @pytest.mark.parametrize("expected_platform_state", SimcorePlatformStatus) async def test_user_services_crash_when_running( - mock_core_rabbitmq: dict[str, AsyncMock], + mock_post_rabbit_message: AsyncMock, app: FastAPI, httpx_async_client: AsyncClient, - client: Client, + http_client: HttpClient, compose_spec: str, container_names: list[str], mock_metrics_params: CreateServiceMetricsAdditionalParams, @@ -395,7 +440,7 @@ async def test_user_services_crash_when_running( expected_platform_state: SimcorePlatformStatus, ): async with periodic_task_result( - client=client, + client=http_client, task_id=await _get_task_id_create_service_containers( httpx_async_client, compose_spec, mock_metrics_params ), @@ -419,7 +464,9 @@ async def test_user_services_crash_when_running( await _simulate_container_crash(container_names) # check only start and heartbeats are present - resource_tracking_messages = _get_resource_tracking_messages(mock_core_rabbitmq) + resource_tracking_messages = _get_resource_tracking_messages( + mock_post_rabbit_message + ) assert len(resource_tracking_messages) >= 2 start_message = resource_tracking_messages[0] @@ -431,26 +478,34 @@ async def test_user_services_crash_when_running( # reset mock await asyncio.sleep(_BASE_HEART_BEAT_INTERVAL * 2) - mock_core_rabbitmq["post_rabbit_message"].reset_mock() + mock_post_rabbit_message.reset_mock() # wait a bit more and check no further heartbeats are sent await asyncio.sleep(_BASE_HEART_BEAT_INTERVAL * 2) - new_resource_tracking_messages = _get_resource_tracking_messages(mock_core_rabbitmq) + new_resource_tracking_messages = _get_resource_tracking_messages( + mock_post_rabbit_message + ) assert len(new_resource_tracking_messages) == 0 # sending stop events, and since there was an issue multiple stops # will be sent due to manual intervention _EXPECTED_STOP_MESSAGES = 4 for _ in range(_EXPECTED_STOP_MESSAGES): + task_id = await _get_task_id_docker_compose_down(httpx_async_client) async with periodic_task_result( - client=client, - task_id=await _get_task_id_docker_compose_down(httpx_async_client), + client=http_client, + task_id=task_id, task_timeout=_CREATE_SERVICE_CONTAINERS_TIMEOUT, status_poll_interval=_FAST_STATUS_POLL, ) as result: assert result is None + await assert_task_is_no_longer_present( + get_fastapi_long_running_manager(app), task_id, {} + ) - resource_tracking_messages = _get_resource_tracking_messages(mock_core_rabbitmq) + resource_tracking_messages = _get_resource_tracking_messages( + mock_post_rabbit_message + ) # NOTE: only 1 stop event arrives here since the stopping of the containers # was successful assert len(resource_tracking_messages) == 1 diff --git a/services/dynamic-sidecar/tests/unit/test_cli.py b/services/dynamic-sidecar/tests/unit/test_cli.py index 9caf23163473..855c21497d52 100644 --- a/services/dynamic-sidecar/tests/unit/test_cli.py +++ b/services/dynamic-sidecar/tests/unit/test_cli.py @@ -1,20 +1,41 @@ # pylint: disable=unused-argument # pylint: disable=redefined-outer-name - +import json import os import traceback +from pprint import pprint import pytest from click.testing import Result +from common_library.serialization import model_dump_with_secrets from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict +from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings from simcore_service_dynamic_sidecar.cli import main from typer.testing import CliRunner +pytest_simcore_core_services_selection = [ + "redis", + "rabbit", +] + @pytest.fixture -def cli_runner(mock_environment: EnvVarsDict) -> CliRunner: - return CliRunner() +def cli_runner( + rabbit_service: RabbitSettings, + redis_service: RedisSettings, + mock_environment: EnvVarsDict, +) -> CliRunner: + mock_environment["REDIS_SETTINGS"] = json.dumps( + model_dump_with_secrets(redis_service, show_secrets=True) + ) + mock_environment["RABBIT_SETTINGS"] = json.dumps( + model_dump_with_secrets(rabbit_service, show_secrets=True) + ) + + pprint(mock_environment) + return CliRunner(env=mock_environment) @pytest.fixture @@ -50,12 +71,12 @@ def test_list_state_dirs(cli_runner: CliRunner, mock_data_manager: None): def test_outputs_push_interface(cli_runner: CliRunner, mock_data_manager: None): result = cli_runner.invoke(main, ["state-save"]) assert result.exit_code == os.EX_OK, _format_cli_error(result) - assert result.stdout == "state save finished successfully\n" + assert "state save finished successfully\n" in result.stdout print(result) def test_state_save_interface(cli_runner: CliRunner, mock_nodeports: None): result = cli_runner.invoke(main, ["outputs-push"]) assert result.exit_code == os.EX_OK, _format_cli_error(result) - assert result.stdout == "output ports push finished successfully\n" + assert "output ports push finished successfully\n" in result.stdout print(result) diff --git a/services/dynamic-sidecar/tests/unit/test_core_docker_utils.py b/services/dynamic-sidecar/tests/unit/test_core_docker_utils.py index e39c908dbc90..6ee4f2003d1b 100644 --- a/services/dynamic-sidecar/tests/unit/test_core_docker_utils.py +++ b/services/dynamic-sidecar/tests/unit/test_core_docker_utils.py @@ -2,7 +2,6 @@ # pylint: disable=unused-argument # pylint: disable=unused-variable from collections.abc import AsyncIterable, AsyncIterator -from contextlib import suppress import aiodocker import pytest @@ -73,9 +72,7 @@ async def started_services(container_names: list[str]) -> AsyncIterator[None]: yield for container in started_containers: - with suppress(aiodocker.DockerError): - await container.kill() - await container.delete() + await container.delete(force=True) async def test_volume_with_label( diff --git a/services/dynamic-sidecar/tests/unit/test_core_reserved_space.py b/services/dynamic-sidecar/tests/unit/test_core_reserved_space.py index c78b800ce5a3..c5042c8346e9 100644 --- a/services/dynamic-sidecar/tests/unit/test_core_reserved_space.py +++ b/services/dynamic-sidecar/tests/unit/test_core_reserved_space.py @@ -7,7 +7,7 @@ from simcore_service_dynamic_sidecar.core.application import create_base_app from simcore_service_dynamic_sidecar.core.reserved_space import ( _RESERVED_DISK_SPACE_NAME, - remove_reserved_disk_space, + free_reserved_disk_space, ) @@ -22,5 +22,5 @@ def test_reserved_disk_space_workflow( ByteSize ).validate_python("10MiB") - remove_reserved_disk_space() + free_reserved_disk_space() assert not _RESERVED_DISK_SPACE_NAME.exists() diff --git a/services/dynamic-sidecar/tests/unit/test_models_shared_store.py b/services/dynamic-sidecar/tests/unit/test_models_shared_store.py index 2c2b474a0290..7ecf24a2d33f 100644 --- a/services/dynamic-sidecar/tests/unit/test_models_shared_store.py +++ b/services/dynamic-sidecar/tests/unit/test_models_shared_store.py @@ -5,6 +5,7 @@ from copy import deepcopy from pathlib import Path from typing import Any +from unittest.mock import AsyncMock import arrow import pytest @@ -23,6 +24,7 @@ @pytest.fixture def trigger_setup_shutdown_events( + mock_core_rabbitmq: dict[str, AsyncMock], shared_store_dir: Path, app: FastAPI, test_client: TestClient, diff --git a/services/efs-guardian/Dockerfile b/services/efs-guardian/Dockerfile index 85dcd3feaf66..3b73b0946984 100644 --- a/services/efs-guardian/Dockerfile +++ b/services/efs-guardian/Dockerfile @@ -2,7 +2,7 @@ # Define arguments in the global scope ARG PYTHON_VERSION="3.11.9" -ARG UV_VERSION="0.6" +ARG UV_VERSION="0.7" FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build FROM python:${PYTHON_VERSION}-slim-bookworm AS base-arm64 @@ -34,6 +34,7 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=private \ set -eux; \ apt-get update; \ apt-get install -y --no-install-recommends \ + fd-find \ gosu \ ca-certificates \ curl \ @@ -126,10 +127,7 @@ RUN uv venv "${VIRTUAL_ENV}" -RUN --mount=type=cache,target=/root/.cache/uv \ - uv pip install --upgrade \ - wheel \ - setuptools + WORKDIR /build @@ -146,6 +144,9 @@ WORKDIR /build FROM build AS prod-only-deps ENV SC_BUILD_TARGET=prod-only-deps +# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode +ENV UV_COMPILE_BYTECODE=1 \ + UV_LINK_MODE=copy WORKDIR /build/services/efs-guardian @@ -171,8 +172,6 @@ ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production ENV PYTHONOPTIMIZE=TRUE -# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode -ENV UV_COMPILE_BYTECODE=1 WORKDIR /home/efs diff --git a/services/efs-guardian/docker/boot.sh b/services/efs-guardian/docker/boot.sh index 862a3456b266..904aace5c918 100755 --- a/services/efs-guardian/docker/boot.sh +++ b/services/efs-guardian/docker/boot.sh @@ -24,7 +24,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/efs-guardian - uv pip --quiet sync requirements/dev.txt + uv pip --quiet sync --link-mode=copy requirements/dev.txt cd - echo "$INFO" "PIP :" uv pip list @@ -33,7 +33,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy if command -v uv >/dev/null 2>&1; then - uv pip install debugpy + uv pip install --link-mode=copy debugpy else pip install debugpy fi @@ -48,19 +48,22 @@ SERVER_LOG_LEVEL=$(echo "${APP_LOG_LEVEL}" | tr '[:upper:]' '[:lower:]') echo "$INFO" "Log-level app/server: $APP_LOG_LEVEL/$SERVER_LOG_LEVEL" if [ "${SC_BOOT_MODE}" = "debug" ]; then - reload_dir_packages=$(find /devel/packages -maxdepth 3 -type d -path "*/src/*" ! -path "*.*" -exec echo '--reload-dir {} \' \;) + reload_dir_packages=$(fdfind src /devel/packages --exec echo '--reload-dir {} ' | tr '\n' ' ') exec sh -c " cd services/efs-guardian/src/simcore_service_efs_guardian && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${EFS_GUARDIAN_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${EFS_GUARDIAN_REMOTE_DEBUGGING_PORT} -m \ + uvicorn \ + --factory main:app_factory \ --host 0.0.0.0 \ --reload \ - $reload_dir_packages + $reload_dir_packages \ --reload-dir . \ --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_efs_guardian.main:the_app \ + exec uvicorn \ + --factory simcore_service_efs_guardian.main:app_factory \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/efs-guardian/docker/entrypoint.sh b/services/efs-guardian/docker/entrypoint.sh index d8ddf1c826ad..b3083ac06cde 100755 --- a/services/efs-guardian/docker/entrypoint.sh +++ b/services/efs-guardian/docker/entrypoint.sh @@ -26,6 +26,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" # # DEVELOPMENT MODE @@ -63,14 +64,12 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$EFS_USER_NAME" echo "$INFO" "Changing group properties of files around from $EFS_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -group "$EFS_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \; - # change user property of files already around + fdfind --owner ":$EFS_USER_ID" --exclude proc --exec-batch chgrp --no-dereference "$CONT_GROUPNAME" . '/' echo "$INFO" "Changing ownership properties of files around from $EFS_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -user "$EFS_USER_ID" -exec chown --no-dereference "$EFS_USER_NAME" {} \; + fdfind --owner "$EFS_USER_ID:" --exclude proc --exec-batch chown --no-dereference "$EFS_USER_NAME" . '/' fi fi - # Appends docker group if socket is mounted DOCKER_MOUNT=/var/run/docker.sock if stat $DOCKER_MOUNT >/dev/null 2>&1; then diff --git a/services/efs-guardian/requirements/_base.txt b/services/efs-guardian/requirements/_base.txt index 94fdbf1ee794..0f4d53b322b1 100644 --- a/services/efs-guardian/requirements/_base.txt +++ b/services/efs-guardian/requirements/_base.txt @@ -1,10 +1,10 @@ -aio-pika==9.4.3 +aio-pika==9.5.5 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -aioboto3==14.3.0 +aioboto3==15.0.0 # via -r requirements/../../../packages/aws-library/requirements/_base.in -aiobotocore==2.22.0 +aiobotocore==2.23.0 # via aioboto3 aiocache==0.12.3 # via @@ -15,7 +15,7 @@ aiodebug==2.3.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -aiodocker==0.23.0 +aiodocker==0.24.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in @@ -26,7 +26,7 @@ aiofiles==24.1.0 # aioboto3 aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.11.18 +aiohttp==3.12.13 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -60,13 +60,13 @@ aioitertools==0.12.0 # via aiobotocore aiormq==6.8.1 # via aio-pika -aiosignal==1.3.1 +aiosignal==1.3.2 # via aiohttp -alembic==1.13.3 +alembic==1.16.2 # via -r requirements/../../../packages/postgres-database/requirements/_base.in annotated-types==0.7.0 # via pydantic -anyio==4.6.2.post1 +anyio==4.9.0 # via # fast-depends # faststream @@ -82,27 +82,27 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi -async-timeout==4.0.3 - # via asyncpg -asyncpg==0.29.0 +asyncpg==0.30.0 # via sqlalchemy -attrs==24.2.0 +attrs==25.3.0 # via # aiohttp # jsonschema # referencing -boto3==1.37.3 +boto3==1.38.27 # via aiobotocore -botocore==1.37.3 +botocore==1.38.27 # via # aiobotocore # boto3 # s3transfer -botocore-stubs==1.35.43 +botocore-stubs==1.38.46 # via types-aiobotocore -certifi==2024.8.30 +certifi==2025.6.15 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -133,52 +133,51 @@ certifi==2024.8.30 # httpcore # httpx # requests -charset-normalizer==3.4.0 + # sentry-sdk +charset-normalizer==3.4.2 # via requests -click==8.1.7 +click==8.2.1 # via # rich-toolkit # typer # uvicorn -deprecated==1.2.14 - # via - # opentelemetry-api - # opentelemetry-exporter-otlp-proto-grpc - # opentelemetry-exporter-otlp-proto-http - # opentelemetry-semantic-conventions dnspython==2.7.0 # via email-validator email-validator==2.2.0 # via # fastapi # pydantic +exceptiongroup==1.3.0 + # via aio-pika fast-depends==2.4.12 # via faststream -fastapi==0.115.12 +fastapi==0.116.1 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi-lifespan-manager -fastapi-cli==0.0.7 +fastapi-cli==0.0.8 # via fastapi +fastapi-cloud-cli==0.1.5 + # via fastapi-cli fastapi-lifespan-manager==0.1.4 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -faststream==0.5.31 +faststream==0.5.43 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -frozenlist==1.4.1 +frozenlist==1.7.0 # via # aiohttp # aiosignal -googleapis-common-protos==1.65.0 +googleapis-common-protos==1.70.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -greenlet==3.1.1 +greenlet==3.2.3 # via sqlalchemy -grpcio==1.67.0 +grpcio==1.73.1 # via opentelemetry-exporter-otlp-proto-grpc -h11==0.14.0 +h11==0.16.0 # via # httpcore # uvicorn @@ -186,11 +185,11 @@ h2==4.2.0 # via httpx hpack==4.1.0 # via h2 -httpcore==1.0.6 +httpcore==1.0.9 # via httpx httptools==0.6.4 # via uvicorn -httpx==0.27.2 +httpx==0.28.1 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -220,6 +219,7 @@ httpx==0.27.2 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi + # fastapi-cloud-cli hyperframe==6.1.0 # via h2 idna==3.10 @@ -229,7 +229,7 @@ idna==3.10 # httpx # requests # yarl -importlib-metadata==8.4.0 +importlib-metadata==8.7.0 # via opentelemetry-api jinja2==3.1.6 # via @@ -265,15 +265,21 @@ jmespath==1.0.1 # aiobotocore # boto3 # botocore -jsonschema==4.23.0 +jsonref==1.1.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in -jsonschema-specifications==2023.7.1 +jsonschema==4.24.0 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in +jsonschema-specifications==2025.4.1 # via jsonschema -mako==1.3.5 +mako==1.3.10 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -310,12 +316,12 @@ markupsafe==3.0.2 # mako mdurl==0.1.2 # via markdown-it-py -multidict==6.1.0 +multidict==6.6.2 # via # aiobotocore # aiohttp # yarl -opentelemetry-api==1.27.0 +opentelemetry-api==1.34.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in @@ -334,19 +340,19 @@ opentelemetry-api==1.27.0 # opentelemetry-propagator-aws-xray # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.27.0 +opentelemetry-exporter-otlp==1.34.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.27.0 +opentelemetry-exporter-otlp-proto-common==1.34.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.27.0 +opentelemetry-exporter-otlp-proto-grpc==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.27.0 +opentelemetry-exporter-otlp-proto-http==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.48b0 +opentelemetry-instrumentation==0.55b1 # via # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-asgi @@ -357,47 +363,50 @@ opentelemetry-instrumentation==0.48b0 # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aio-pika==0.48b0 +opentelemetry-instrumentation-aio-pika==0.55b1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-asgi==0.48b0 +opentelemetry-instrumentation-asgi==0.55b1 # via opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-asyncpg==0.48b0 - # via -r requirements/../../../packages/postgres-database/requirements/_base.in -opentelemetry-instrumentation-botocore==0.48b0 +opentelemetry-instrumentation-asyncpg==0.55b1 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-botocore==0.55b1 # via -r requirements/../../../packages/aws-library/requirements/_base.in -opentelemetry-instrumentation-fastapi==0.48b0 +opentelemetry-instrumentation-fastapi==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-httpx==0.48b0 +opentelemetry-instrumentation-httpx==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-logging==0.48b0 +opentelemetry-instrumentation-logging==0.55b1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.48b0 +opentelemetry-instrumentation-redis==0.55b1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.48b0 +opentelemetry-instrumentation-requests==0.55b1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-propagator-aws-xray==1.0.1 +opentelemetry-propagator-aws-xray==1.0.2 # via opentelemetry-instrumentation-botocore -opentelemetry-proto==1.27.0 +opentelemetry-proto==1.34.1 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.27.0 +opentelemetry-sdk==1.34.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.48b0 +opentelemetry-semantic-conventions==0.55b1 # via + # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-botocore @@ -406,13 +415,13 @@ opentelemetry-semantic-conventions==0.48b0 # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.48b0 +opentelemetry-util-http==0.55b1 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-requests -orjson==3.10.7 +orjson==3.10.18 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -457,29 +466,31 @@ orjson==3.10.7 # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in -packaging==24.1 - # via -r requirements/_base.in +packaging==25.0 + # via + # -r requirements/_base.in + # opentelemetry-instrumentation pamqp==3.3.0 # via aiormq -prometheus-client==0.21.0 +prometheus-client==0.22.1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -propcache==0.3.1 +propcache==0.3.2 # via # aiohttp # yarl -protobuf==4.25.5 +protobuf==5.29.5 # via # googleapis-common-protos # opentelemetry-proto -psutil==6.1.0 +psutil==7.0.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in psycopg2-binary==2.9.10 # via sqlalchemy -pycryptodome==3.21.0 +pycryptodome==3.23.0 # via stream-zip -pydantic==2.10.2 +pydantic==2.11.7 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -534,11 +545,12 @@ pydantic==2.10.2 # -r requirements/../../../packages/settings-library/requirements/_base.in # fast-depends # fastapi + # fastapi-cloud-cli # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.1 +pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.9.0 +pydantic-extra-types==2.10.5 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -557,7 +569,7 @@ pydantic-extra-types==2.9.0 # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in -pydantic-settings==2.6.1 +pydantic-settings==2.7.0 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -593,9 +605,9 @@ pydantic-settings==2.6.1 # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in -pygments==2.18.0 +pygments==2.19.2 # via rich -pyinstrument==5.0.0 +pyinstrument==5.0.2 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in @@ -604,7 +616,7 @@ python-dateutil==2.9.0.post0 # aiobotocore # arrow # botocore -python-dotenv==1.0.1 +python-dotenv==1.1.1 # via # pydantic-settings # uvicorn @@ -641,7 +653,7 @@ pyyaml==6.0.2 # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # uvicorn -redis==5.2.1 +redis==6.2.0 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -671,7 +683,7 @@ redis==5.2.1 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -referencing==0.29.3 +referencing==0.35.1 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -701,9 +713,9 @@ referencing==0.29.3 # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications -requests==2.32.3 +requests==2.32.4 # via opentelemetry-exporter-otlp-proto-http -rich==13.9.2 +rich==14.1.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -711,26 +723,30 @@ rich==13.9.2 # -r requirements/../../../packages/settings-library/requirements/_base.in # rich-toolkit # typer -rich-toolkit==0.14.7 - # via fastapi-cli -rpds-py==0.20.0 +rich-toolkit==0.15.0 + # via + # fastapi-cli + # fastapi-cloud-cli +rignore==0.6.4 + # via fastapi-cloud-cli +rpds-py==0.25.1 # via # jsonschema # referencing -s3transfer==0.11.3 +s3transfer==0.13.0 # via boto3 -setuptools==75.2.0 - # via opentelemetry-instrumentation -sh==2.1.0 +sentry-sdk==2.35.0 + # via fastapi-cloud-cli +sh==2.2.2 # via -r requirements/../../../packages/aws-library/requirements/_base.in shellingham==1.5.4 # via typer -six==1.16.0 +six==1.17.0 # via python-dateutil sniffio==1.3.1 # via # anyio - # httpx + # asgi-lifespan sqlalchemy==1.4.54 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -761,7 +777,7 @@ sqlalchemy==1.4.54 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/postgres-database/requirements/_base.in # alembic -starlette==0.41.2 +starlette==0.47.2 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -794,7 +810,7 @@ stream-zip==0.0.83 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -tenacity==9.0.0 +tenacity==9.1.2 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in @@ -802,45 +818,57 @@ toolz==1.0.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -tqdm==4.66.5 +tqdm==4.67.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.12.5 +typer==0.16.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # fastapi-cli -types-aiobotocore==2.19.0 + # fastapi-cloud-cli +types-aiobotocore==2.23.0 # via -r requirements/../../../packages/aws-library/requirements/_base.in -types-aiobotocore-ec2==2.19.0 +types-aiobotocore-ec2==2.23.0 # via types-aiobotocore -types-aiobotocore-s3==2.19.0 +types-aiobotocore-s3==2.23.0 # via types-aiobotocore -types-aiobotocore-ssm==2.19.0 +types-aiobotocore-ssm==2.23.0 # via types-aiobotocore -types-awscrt==0.22.0 +types-awscrt==0.27.4 # via botocore-stubs -types-python-dateutil==2.9.0.20241003 +types-python-dateutil==2.9.0.20250516 # via arrow -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # aiodebug # alembic + # anyio + # exceptiongroup # fastapi # faststream + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http # opentelemetry-sdk + # opentelemetry-semantic-conventions # pydantic # pydantic-core + # pydantic-extra-types # rich-toolkit + # starlette # typer # types-aiobotocore # types-aiobotocore-ec2 # types-aiobotocore-s3 # types-aiobotocore-ssm -urllib3==2.2.3 + # typing-inspection +typing-inspection==0.4.1 + # via pydantic +urllib3==2.5.0 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -870,24 +898,26 @@ urllib3==2.2.3 # -c requirements/../../../requirements/constraints.txt # botocore # requests -uvicorn==0.34.2 + # sentry-sdk +uvicorn==0.35.0 # via # fastapi # fastapi-cli + # fastapi-cloud-cli uvloop==0.21.0 # via uvicorn -watchfiles==1.0.5 +watchfiles==1.1.0 # via uvicorn websockets==15.0.1 # via uvicorn -wrapt==1.16.0 +wrapt==1.17.2 # via # aiobotocore - # deprecated # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis -yarl==1.20.0 +yarl==1.20.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/postgres-database/requirements/_base.in @@ -895,5 +925,5 @@ yarl==1.20.0 # aio-pika # aiohttp # aiormq -zipp==3.20.2 +zipp==3.23.0 # via importlib-metadata diff --git a/services/efs-guardian/requirements/_test.txt b/services/efs-guardian/requirements/_test.txt index 6a6bfc1ff9c8..0b5839cd0610 100644 --- a/services/efs-guardian/requirements/_test.txt +++ b/services/efs-guardian/requirements/_test.txt @@ -1,4 +1,4 @@ -aiodocker==0.23.0 +aiodocker==0.24.0 # via # -c requirements/_base.txt # -r requirements/_test.in @@ -6,12 +6,12 @@ aiohappyeyeballs==2.6.1 # via # -c requirements/_base.txt # aiohttp -aiohttp==3.11.18 +aiohttp==3.12.13 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aiodocker -aiosignal==1.3.1 +aiosignal==1.3.2 # via # -c requirements/_base.txt # aiohttp @@ -21,37 +21,39 @@ annotated-types==0.7.0 # pydantic antlr4-python3-runtime==4.13.2 # via moto -anyio==4.6.2.post1 +anyio==4.9.0 # via # -c requirements/_base.txt # httpx asgi-lifespan==2.1.0 - # via -r requirements/_test.in -attrs==24.2.0 + # via + # -c requirements/_base.txt + # -r requirements/_test.in +attrs==25.3.0 # via # -c requirements/_base.txt # aiohttp # jsonschema # referencing -aws-sam-translator==1.95.0 +aws-sam-translator==1.99.0 # via cfn-lint aws-xray-sdk==2.14.0 # via moto blinker==1.9.0 # via flask -boto3==1.37.3 +boto3==1.38.27 # via # -c requirements/_base.txt # aws-sam-translator # moto -botocore==1.37.3 +botocore==1.38.27 # via # -c requirements/_base.txt # aws-xray-sdk # boto3 # moto # s3transfer -certifi==2024.8.30 +certifi==2025.6.15 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -60,59 +62,59 @@ certifi==2024.8.30 # requests cffi==1.17.1 # via cryptography -cfn-lint==1.27.0 +cfn-lint==1.36.1 # via moto -charset-normalizer==3.4.0 +charset-normalizer==3.4.2 # via # -c requirements/_base.txt # requests -click==8.1.7 +click==8.2.1 # via # -c requirements/_base.txt # flask -coverage==7.6.12 +coverage==7.9.1 # via # -r requirements/_test.in # pytest-cov -cryptography==44.0.2 +cryptography==45.0.4 # via # -c requirements/../../../requirements/constraints.txt # joserfc # moto -debugpy==1.8.12 +debugpy==1.8.14 # via -r requirements/_test.in -deepdiff==8.2.0 +deepdiff==8.5.0 # via -r requirements/_test.in docker==7.1.0 # via # -r requirements/_test.in # moto -faker==36.1.1 +faker==37.4.0 # via -r requirements/_test.in -fakeredis==2.27.0 +fakeredis==2.30.1 # via -r requirements/_test.in -flask==3.1.0 +flask==3.1.1 # via # flask-cors # moto -flask-cors==5.0.1 +flask-cors==6.0.1 # via moto -frozenlist==1.4.1 +frozenlist==1.7.0 # via # -c requirements/_base.txt # aiohttp # aiosignal graphql-core==3.2.6 # via moto -h11==0.14.0 +h11==0.16.0 # via # -c requirements/_base.txt # httpcore -httpcore==1.0.6 +httpcore==1.0.9 # via # -c requirements/_base.txt # httpx -httpx==0.27.2 +httpx==0.28.1 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -125,7 +127,7 @@ idna==3.10 # httpx # requests # yarl -iniconfig==2.0.0 +iniconfig==2.1.0 # via pytest itsdangerous==2.2.0 # via flask @@ -140,7 +142,7 @@ jmespath==1.0.1 # -c requirements/_base.txt # boto3 # botocore -joserfc==1.0.4 +joserfc==1.1.0 # via moto jsonpatch==1.33 # via cfn-lint @@ -148,7 +150,7 @@ jsonpath-ng==1.7.0 # via moto jsonpointer==3.0.0 # via jsonpatch -jsonschema==4.23.0 +jsonschema==4.24.0 # via # -c requirements/_base.txt # aws-sam-translator @@ -156,38 +158,39 @@ jsonschema==4.23.0 # openapi-spec-validator jsonschema-path==0.3.4 # via openapi-spec-validator -jsonschema-specifications==2023.7.1 +jsonschema-specifications==2025.4.1 # via # -c requirements/_base.txt # jsonschema # openapi-schema-validator -lazy-object-proxy==1.10.0 +lazy-object-proxy==1.11.0 # via openapi-spec-validator -lupa==2.4 +lupa==2.5 # via fakeredis markupsafe==3.0.2 # via # -c requirements/_base.txt + # flask # jinja2 # werkzeug -moto==5.1.4 +moto==5.1.6 # via -r requirements/_test.in mpmath==1.3.0 # via sympy -multidict==6.1.0 +multidict==6.6.2 # via # -c requirements/_base.txt # aiohttp # yarl -networkx==3.4.2 +networkx==3.5 # via cfn-lint openapi-schema-validator==0.6.3 # via openapi-spec-validator -openapi-spec-validator==0.7.1 +openapi-spec-validator==0.7.2 # via moto -orderly-set==5.3.0 +orderly-set==5.4.1 # via deepdiff -packaging==24.1 +packaging==25.0 # via # -c requirements/_base.txt # pytest @@ -195,16 +198,18 @@ parse==1.20.2 # via -r requirements/_test.in pathable==0.4.4 # via jsonschema-path -pluggy==1.5.0 - # via pytest +pluggy==1.6.0 + # via + # pytest + # pytest-cov ply==3.11 # via jsonpath-ng -propcache==0.3.1 +propcache==0.3.2 # via # -c requirements/_base.txt # aiohttp # yarl -psutil==6.1.0 +psutil==7.0.0 # via # -c requirements/_base.txt # -r requirements/_test.in @@ -212,28 +217,32 @@ py-partiql-parser==0.6.1 # via moto pycparser==2.22 # via cffi -pydantic==2.10.2 +pydantic==2.11.7 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator -pydantic-core==2.27.1 +pydantic-core==2.33.2 # via # -c requirements/_base.txt # pydantic -pyparsing==3.2.1 +pygments==2.19.2 + # via + # -c requirements/_base.txt + # pytest +pyparsing==3.2.3 # via moto -pytest==8.3.5 +pytest==8.4.1 # via # -r requirements/_test.in # pytest-asyncio # pytest-cov # pytest-mock -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via -r requirements/_test.in -pytest-cov==6.0.0 +pytest-cov==6.2.1 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in @@ -242,7 +251,7 @@ python-dateutil==2.9.0.post0 # -c requirements/_base.txt # botocore # moto -python-dotenv==1.0.1 +python-dotenv==1.1.1 # via # -c requirements/_base.txt # -r requirements/_test.in @@ -254,12 +263,12 @@ pyyaml==6.0.2 # jsonschema-path # moto # responses -redis==5.2.1 +redis==6.2.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # fakeredis -referencing==0.29.3 +referencing==0.35.1 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -268,33 +277,31 @@ referencing==0.29.3 # jsonschema-specifications regex==2024.11.6 # via cfn-lint -requests==2.32.3 +requests==2.32.4 # via # -c requirements/_base.txt # docker # jsonschema-path # moto # responses -responses==0.25.6 +responses==0.25.7 # via moto respx==0.22.0 # via -r requirements/_test.in rfc3339-validator==0.1.4 # via openapi-schema-validator -rpds-py==0.20.0 +rpds-py==0.25.1 # via # -c requirements/_base.txt # jsonschema # referencing -s3transfer==0.11.3 +s3transfer==0.13.0 # via # -c requirements/_base.txt # boto3 -setuptools==75.2.0 - # via - # -c requirements/_base.txt - # moto -six==1.16.0 +setuptools==80.9.0 + # via moto +six==1.17.0 # via # -c requirements/_base.txt # python-dateutil @@ -304,21 +311,26 @@ sniffio==1.3.1 # -c requirements/_base.txt # anyio # asgi-lifespan - # httpx sortedcontainers==2.4.0 # via fakeredis -sympy==1.13.3 +sympy==1.14.0 # via cfn-lint -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt + # anyio # aws-sam-translator # cfn-lint # pydantic # pydantic-core -tzdata==2025.1 + # typing-inspection +typing-inspection==0.4.1 + # via + # -c requirements/_base.txt + # pydantic +tzdata==2025.2 # via faker -urllib3==2.2.3 +urllib3==2.5.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -331,13 +343,13 @@ werkzeug==3.1.3 # flask # flask-cors # moto -wrapt==1.16.0 +wrapt==1.17.2 # via # -c requirements/_base.txt # aws-xray-sdk xmltodict==0.14.2 # via moto -yarl==1.20.0 +yarl==1.20.1 # via # -c requirements/_base.txt # aiohttp diff --git a/services/efs-guardian/requirements/_tools.txt b/services/efs-guardian/requirements/_tools.txt index bd233d726413..33117faabeb5 100644 --- a/services/efs-guardian/requirements/_tools.txt +++ b/services/efs-guardian/requirements/_tools.txt @@ -1,4 +1,4 @@ -astroid==3.3.8 +astroid==3.3.10 # via pylint black==25.1.0 # via -r requirements/../../../requirements/devenv.txt @@ -8,19 +8,19 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.7 +click==8.2.1 # via # -c requirements/_base.txt # -c requirements/_test.txt # black # pip-tools -dill==0.3.9 +dill==0.4.0 # via pylint distlib==0.3.9 # via virtualenv -filelock==3.17.0 +filelock==3.18.0 # via virtualenv -identify==2.6.8 +identify==2.6.12 # via pre-commit isort==6.0.1 # via @@ -28,34 +28,36 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via -r requirements/../../../requirements/devenv.txt -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via # black # mypy nodeenv==1.9.1 # via pre-commit -packaging==24.1 +packaging==25.0 # via # -c requirements/_base.txt # -c requirements/_test.txt # black # build pathspec==0.12.1 - # via black -pip==25.0.1 + # via + # black + # mypy +pip==25.1.1 # via pip-tools pip-tools==7.4.1 # via -r requirements/../../../requirements/devenv.txt -platformdirs==4.3.6 +platformdirs==4.3.8 # via # black # pylint # virtualenv -pre-commit==4.1.0 +pre-commit==4.2.0 # via -r requirements/../../../requirements/devenv.txt -pylint==3.3.4 +pylint==3.3.7 # via -r requirements/../../../requirements/devenv.txt pyproject-hooks==1.2.0 # via @@ -68,21 +70,20 @@ pyyaml==6.0.2 # -c requirements/_test.txt # pre-commit # watchdog -ruff==0.9.9 +ruff==0.12.1 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.2.0 +setuptools==80.9.0 # via - # -c requirements/_base.txt # -c requirements/_test.txt # pip-tools -tomlkit==0.13.2 +tomlkit==0.13.3 # via pylint -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # -c requirements/_test.txt # mypy -virtualenv==20.29.2 +virtualenv==20.31.2 # via pre-commit watchdog==6.0.0 # via -r requirements/_tools.in diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/cli.py b/services/efs-guardian/src/simcore_service_efs_guardian/cli.py index 77d18015ec01..3b64a7663ef7 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/cli.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/cli.py @@ -19,6 +19,6 @@ def run(): """Runs application""" typer.secho("Sorry, this entrypoint is intentionally disabled. Use instead") typer.secho( - "$ uvicorn simcore_service_efs_guardian.main:the_app", + "$ uvicorn --factory simcore_service_efs_guardian.main:app_factory", fg=typer.colors.BLUE, ) diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py b/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py index 7c68ba3f0e45..d44dea344147 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/core/application.py @@ -31,8 +31,6 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI: app_settings = settings or ApplicationSettings.create_from_envs() - logger.info("app settings: %s", app_settings.model_dump_json(indent=1)) - app = FastAPI( debug=app_settings.EFS_GUARDIAN_DEBUG, title=APP_NAME, diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py b/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py index ab5377a82d36..7894e4343a4c 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/core/settings.py @@ -3,10 +3,11 @@ from typing import Annotated, Final, cast from common_library.basic_types import DEFAULT_FACTORY +from common_library.logging.logging_utils_filtering import LoggerName, MessageSubstring from fastapi import FastAPI from models_library.basic_types import LogLevel, VersionTag from pydantic import AliasChoices, ByteSize, Field, TypeAdapter, field_validator -from servicelib.logging_utils_filtering import LoggerName, MessageSubstring +from servicelib.logging_utils import LogLevelInt from settings_library.application import BaseApplicationSettings from settings_library.efs import AwsEfsSettings from settings_library.postgres import PostgresSettings @@ -116,8 +117,8 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): ] @cached_property - def LOG_LEVEL(self) -> LogLevel: # noqa: N802 - return self.EFS_GUARDIAN_LOGLEVEL + def log_level(self) -> LogLevelInt: + return cast(LogLevelInt, self.EFS_GUARDIAN_LOGLEVEL) @field_validator("EFS_GUARDIAN_LOGLEVEL", mode="before") @classmethod diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/main.py b/services/efs-guardian/src/simcore_service_efs_guardian/main.py index 711d32d83ee4..230016b548c2 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/main.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/main.py @@ -1,22 +1,39 @@ -"""Main application to be deployed by uvicorn (or equivalent) server - -""" +"""Main application to be deployed by uvicorn (or equivalent) server""" import logging +from typing import Final +from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.logging_utils import config_all_loggers +from servicelib.fastapi.logging_lifespan import create_logging_shutdown_event from simcore_service_efs_guardian.core.application import create_app from simcore_service_efs_guardian.core.settings import ApplicationSettings -the_settings = ApplicationSettings.create_from_envs() -logging.basicConfig(level=the_settings.log_level) -logging.root.setLevel(the_settings.log_level) -config_all_loggers( - log_format_local_dev_enabled=the_settings.EFS_GUARDIAN_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=the_settings.EFS_GUARDIAN_LOG_FILTER_MAPPING, - tracing_settings=the_settings.EFS_GUARDIAN_TRACING, +_logger = logging.getLogger(__name__) + +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aiobotocore", + "aio_pika", + "aiormq", + "botocore", + "werkzeug", ) -# SINGLETON FastAPI app -the_app: FastAPI = create_app(the_settings) + +def app_factory() -> FastAPI: + app_settings = ApplicationSettings.create_from_envs() + logging_shutdown_event = create_logging_shutdown_event( + log_format_local_dev_enabled=app_settings.EFS_GUARDIAN_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.EFS_GUARDIAN_LOG_FILTER_MAPPING, + tracing_settings=app_settings.EFS_GUARDIAN_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) + + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + app = create_app(settings=app_settings) + app.add_event_handler("shutdown", logging_shutdown_event) + return app diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/services/background_tasks_setup.py b/services/efs-guardian/src/simcore_service_efs_guardian/services/background_tasks_setup.py index e1480f84b205..ae88f0fdb845 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/services/background_tasks_setup.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/services/background_tasks_setup.py @@ -3,8 +3,8 @@ from collections.abc import Awaitable, Callable from datetime import timedelta +from common_library.async_tools import cancel_wait_task from fastapi import FastAPI -from servicelib.async_utils import cancel_wait_task from servicelib.background_task_utils import exclusive_periodic from servicelib.logging_utils import log_catch, log_context diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/services/fire_and_forget_setup.py b/services/efs-guardian/src/simcore_service_efs_guardian/services/fire_and_forget_setup.py index a38411f56a11..5ca03f7bd9ec 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/services/fire_and_forget_setup.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/services/fire_and_forget_setup.py @@ -1,8 +1,8 @@ import logging from collections.abc import Awaitable, Callable +from common_library.async_tools import cancel_wait_task from fastapi import FastAPI -from servicelib.async_utils import cancel_wait_task from servicelib.logging_utils import log_catch, log_context _logger = logging.getLogger(__name__) diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/services/modules/db.py b/services/efs-guardian/src/simcore_service_efs_guardian/services/modules/db.py index f5d5970216e0..e11e0edbfd6f 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/services/modules/db.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/services/modules/db.py @@ -1,10 +1,14 @@ from fastapi import FastAPI from servicelib.fastapi.db_asyncpg_engine import close_db_connection, connect_to_db +from ..._meta import APP_NAME + def setup(app: FastAPI): async def on_startup() -> None: - await connect_to_db(app, app.state.settings.EFS_GUARDIAN_POSTGRES) + await connect_to_db( + app, app.state.settings.EFS_GUARDIAN_POSTGRES, application_name=APP_NAME + ) async def on_shutdown() -> None: await close_db_connection(app) diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/services/modules/redis.py b/services/efs-guardian/src/simcore_service_efs_guardian/services/modules/redis.py index 78d1462378a5..74cf65b320e4 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/services/modules/redis.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/services/modules/redis.py @@ -18,6 +18,7 @@ async def on_startup() -> None: app.state.redis_lock_client_sdk = RedisClientSDK( redis_locks_dsn, client_name=APP_NAME ) + await app.state.redis_lock_client_sdk.setup() async def on_shutdown() -> None: redis_lock_client_sdk: None | RedisClientSDK = app.state.redis_lock_client_sdk diff --git a/services/efs-guardian/tests/conftest.py b/services/efs-guardian/tests/conftest.py index 96585f4c87b5..8a3e48d23258 100644 --- a/services/efs-guardian/tests/conftest.py +++ b/services/efs-guardian/tests/conftest.py @@ -14,6 +14,7 @@ from pytest_simcore.helpers.typing_env import EnvVarsDict pytest_plugins = [ + "pytest_simcore.asyncio_event_loops", "pytest_simcore.cli_runner", "pytest_simcore.docker_compose", "pytest_simcore.docker_registry", @@ -23,6 +24,7 @@ "pytest_simcore.faker_users_data", "pytest_simcore.faker_products_data", "pytest_simcore.faker_projects_data", + "pytest_simcore.logging", "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", "pytest_simcore.rabbit_service", diff --git a/services/efs-guardian/tests/unit/test_core_settings.py b/services/efs-guardian/tests/unit/test_core_settings.py index 0d72653a8e48..ba1d011d68b7 100644 --- a/services/efs-guardian/tests/unit/test_core_settings.py +++ b/services/efs-guardian/tests/unit/test_core_settings.py @@ -1,20 +1,21 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument # pylint: disable=unused-variable +""" +We can validate actual .env files (also refered as `repo.config` files) by passing them via the CLI +$ ln -s /path/to/osparc-config/deployments/mydeploy.com/repo.config .secrets +$ pytest --external-envfile=.secrets --pdb tests/unit/test_core_settings.py + +""" from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict from simcore_service_efs_guardian.core.settings import ApplicationSettings -def test_settings(app_environment: EnvVarsDict): - """ - We validate actual envfiles (e.g. repo.config files) by passing them via the CLI - - $ ln -s /path/to/osparc-config/deployments/mydeploy.com/repo.config .secrets - $ pytest --external-envfile=.secrets --pdb tests/unit/test_core_settings.py +def test_valid_application_settings(app_environment: EnvVarsDict): + assert app_environment - """ settings = ApplicationSettings() # type: ignore assert settings diff --git a/services/efs-guardian/tests/unit/test_efs_removal_policy_task.py b/services/efs-guardian/tests/unit/test_efs_removal_policy_task.py index 4000fab0c886..29673ef668d3 100644 --- a/services/efs-guardian/tests/unit/test_efs_removal_policy_task.py +++ b/services/efs-guardian/tests/unit/test_efs_removal_policy_task.py @@ -17,9 +17,11 @@ from models_library.users import UserID from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.postgres_tools import insert_and_get_row_lifespan +from pytest_simcore.helpers.postgres_users import ( + insert_and_get_user_and_secrets_lifespan, +) from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_postgres_database.models.projects import projects -from simcore_postgres_database.models.users import users from simcore_postgres_database.utils_repos import transaction_context from simcore_service_efs_guardian.core.settings import ( ApplicationSettings, @@ -71,12 +73,9 @@ async def user_in_db( injects a user in db """ assert user_id == user["id"] - async with insert_and_get_row_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup + async with insert_and_get_user_and_secrets_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup app.state.engine, - table=users, - values=user, - pk_col=users.c.id, - pk_value=user["id"], + **user, ) as row: yield row diff --git a/services/efs-guardian/tests/unit/test_main.py b/services/efs-guardian/tests/unit/test_main.py index bbdb41096c85..26b0fa6af396 100644 --- a/services/efs-guardian/tests/unit/test_main.py +++ b/services/efs-guardian/tests/unit/test_main.py @@ -7,6 +7,6 @@ def test_main_app(app_environment: EnvVarsDict): - from simcore_service_efs_guardian.main import the_app, the_settings + from simcore_service_efs_guardian.main import app_factory - assert the_app.state.settings == the_settings + app_factory() diff --git a/services/invitations/Dockerfile b/services/invitations/Dockerfile index 2961dfed8017..33a9251daaf3 100644 --- a/services/invitations/Dockerfile +++ b/services/invitations/Dockerfile @@ -2,7 +2,7 @@ # Define arguments in the global scope ARG PYTHON_VERSION="3.11.9" -ARG UV_VERSION="0.6" +ARG UV_VERSION="0.7" FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build FROM python:${PYTHON_VERSION}-slim-bookworm AS base-arm64 @@ -31,6 +31,7 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=private \ set -eux && \ apt-get update && \ apt-get install -y --no-install-recommends \ + fd-find \ gosu \ && apt-get clean -y \ && rm -rf /var/lib/apt/lists/* \ @@ -89,10 +90,7 @@ RUN uv venv "${VIRTUAL_ENV}" -RUN --mount=type=cache,target=/root/.cache/uv \ - uv pip install --upgrade \ - wheel \ - setuptools + WORKDIR /build @@ -110,6 +108,9 @@ WORKDIR /build FROM build AS prod-only-deps ENV SC_BUILD_TARGET=prod-only-deps +# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode +ENV UV_COMPILE_BYTECODE=1 \ + UV_LINK_MODE=copy WORKDIR /build/services/invitations @@ -135,8 +136,6 @@ ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production ENV PYTHONOPTIMIZE=TRUE -# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode -ENV UV_COMPILE_BYTECODE=1 WORKDIR /home/scu diff --git a/services/invitations/Makefile b/services/invitations/Makefile index f7a9b88fe722..7550b36a91a7 100644 --- a/services/invitations/Makefile +++ b/services/invitations/Makefile @@ -17,7 +17,7 @@ openapi.json: .env-ignore ## produces openapi.json @set -o allexport; \ source $<; \ set +o allexport; \ - python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(the_app.openapi(), indent=2) )" > $@ + python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(app_factory().openapi(), indent=2) )" > $@ # diff --git a/services/invitations/docker/boot.sh b/services/invitations/docker/boot.sh index 0616dc4c2b73..fb1e26875869 100755 --- a/services/invitations/docker/boot.sh +++ b/services/invitations/docker/boot.sh @@ -24,7 +24,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/invitations - uv pip --quiet sync requirements/dev.txt + uv pip --quiet sync --link-mode=copy requirements/dev.txt cd - echo "$INFO" "PIP :" uv pip list @@ -33,7 +33,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy if command -v uv >/dev/null 2>&1; then - uv pip install debugpy + uv pip install --link-mode=copy debugpy else pip install debugpy fi @@ -48,19 +48,22 @@ SERVER_LOG_LEVEL=$(echo "${APP_LOG_LEVEL}" | tr '[:upper:]' '[:lower:]') echo "$INFO" "Log-level app/server: $APP_LOG_LEVEL/$SERVER_LOG_LEVEL" if [ "${SC_BOOT_MODE}" = "debug" ]; then - reload_dir_packages=$(find /devel/packages -maxdepth 3 -type d -path "*/src/*" ! -path "*.*" -exec echo '--reload-dir {} \' \;) + reload_dir_packages=$(fdfind src /devel/packages --exec echo '--reload-dir {} ' | tr '\n' ' ') exec sh -c " cd services/invitations/src/simcore_service_invitations && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${INVITATIONS_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${INVITATIONS_REMOTE_DEBUGGING_PORT} -m \ + uvicorn \ + --factory main:app_factory \ --host 0.0.0.0 \ --reload \ - $reload_dir_packages + $reload_dir_packages \ --reload-dir . \ --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_invitations.main:the_app \ + exec uvicorn \ + --factory simcore_service_invitations.main:app_factory \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/invitations/docker/entrypoint.sh b/services/invitations/docker/entrypoint.sh index 25153a6b2a2a..357d8b604d98 100755 --- a/services/invitations/docker/entrypoint.sh +++ b/services/invitations/docker/entrypoint.sh @@ -19,6 +19,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" # # DEVELOPMENT MODE @@ -56,10 +57,9 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME" echo "$INFO" "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \; - # change user property of files already around + fdfind --owner ":$SC_USER_ID" --exclude proc --exec-batch chgrp --no-dereference "$CONT_GROUPNAME" . '/' echo "$INFO" "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \; + fdfind --owner "$SC_USER_ID:" --exclude proc --exec-batch chown --no-dereference "$SC_USER_NAME" . '/' fi fi diff --git a/services/invitations/requirements/_base.in b/services/invitations/requirements/_base.in index f67a4ccf1d80..36c0d6a35c3e 100644 --- a/services/invitations/requirements/_base.in +++ b/services/invitations/requirements/_base.in @@ -16,4 +16,4 @@ cryptography packaging -typer[all] +typer diff --git a/services/invitations/requirements/_base.txt b/services/invitations/requirements/_base.txt index 1a3c71b6e497..c973ebd37e4c 100644 --- a/services/invitations/requirements/_base.txt +++ b/services/invitations/requirements/_base.txt @@ -10,7 +10,7 @@ aiofiles==24.1.0 # via -r requirements/../../../packages/service-library/requirements/_base.in aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -43,6 +43,8 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi attrs==25.2.0 @@ -67,11 +69,12 @@ certifi==2025.1.31 # httpcore # httpx # requests + # sentry-sdk cffi==1.17.1 # via cryptography charset-normalizer==3.4.1 # via requests -click==8.1.8 +click==8.2.1 # via # rich-toolkit # typer @@ -91,12 +94,6 @@ cryptography==44.0.2 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in -deprecated==1.2.18 - # via - # opentelemetry-api - # opentelemetry-exporter-otlp-proto-grpc - # opentelemetry-exporter-otlp-proto-http - # opentelemetry-semantic-conventions dnspython==2.7.0 # via email-validator email-validator==2.2.0 @@ -107,12 +104,14 @@ exceptiongroup==1.2.2 # via aio-pika fast-depends==2.4.12 # via faststream -fastapi==0.115.12 +fastapi==0.116.1 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi-lifespan-manager -fastapi-cli==0.0.7 +fastapi-cli==0.0.8 # via fastapi +fastapi-cloud-cli==0.1.5 + # via fastapi-cli fastapi-lifespan-manager==0.1.4 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in faststream==0.5.35 @@ -121,13 +120,13 @@ frozenlist==1.5.0 # via # aiohttp # aiosignal -googleapis-common-protos==1.69.1 +googleapis-common-protos==1.70.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http grpcio==1.71.0 # via opentelemetry-exporter-otlp-proto-grpc -h11==0.14.0 +h11==0.16.0 # via # httpcore # uvicorn @@ -135,7 +134,7 @@ h2==4.2.0 # via httpx hpack==4.1.0 # via h2 -httpcore==1.0.7 +httpcore==1.0.9 # via httpx httptools==0.6.4 # via uvicorn @@ -155,6 +154,7 @@ httpx==0.28.1 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi + # fastapi-cloud-cli hyperframe==6.1.0 # via h2 idna==3.10 @@ -181,6 +181,10 @@ jinja2==3.1.6 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # fastapi +jsonref==1.1.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema==4.23.0 # via # -r requirements/../../../packages/models-library/requirements/_base.in @@ -197,7 +201,7 @@ multidict==6.1.0 # via # aiohttp # yarl -opentelemetry-api==1.31.0 +opentelemetry-api==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc @@ -205,6 +209,7 @@ opentelemetry-api==1.31.0 # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-logging @@ -212,59 +217,63 @@ opentelemetry-api==1.31.0 # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.31.0 +opentelemetry-exporter-otlp==1.34.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.31.0 +opentelemetry-exporter-otlp-proto-common==1.34.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.31.0 +opentelemetry-exporter-otlp-proto-grpc==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.31.0 +opentelemetry-exporter-otlp-proto-http==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.52b0 +opentelemetry-instrumentation==0.55b1 # via # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aio-pika==0.52b0 +opentelemetry-instrumentation-aio-pika==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-asgi==0.52b0 +opentelemetry-instrumentation-asgi==0.55b1 # via opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-fastapi==0.52b0 +opentelemetry-instrumentation-asyncpg==0.55b1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-fastapi==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-httpx==0.52b0 +opentelemetry-instrumentation-httpx==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-logging==0.52b0 +opentelemetry-instrumentation-logging==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.52b0 +opentelemetry-instrumentation-redis==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.52b0 +opentelemetry-instrumentation-requests==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.31.0 +opentelemetry-proto==1.34.1 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.31.0 +opentelemetry-sdk==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.52b0 +opentelemetry-semantic-conventions==0.55b1 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi + # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-fastapi # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.52b0 +opentelemetry-util-http==0.55b1 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi @@ -304,7 +313,7 @@ propcache==0.3.0 # via # aiohttp # yarl -protobuf==5.29.3 +protobuf==5.29.5 # via # googleapis-common-protos # opentelemetry-proto @@ -314,7 +323,7 @@ pycparser==2.22 # via cffi pycryptodome==3.21.0 # via stream-zip -pydantic==2.10.6 +pydantic==2.11.7 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -341,11 +350,12 @@ pydantic==2.10.6 # -r requirements/../../../packages/settings-library/requirements/_base.in # fast-depends # fastapi + # fastapi-cloud-cli # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.2 +pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.10.3 +pydantic-extra-types==2.10.5 # via # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -432,27 +442,35 @@ referencing==0.35.1 # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications -requests==2.32.3 +requests==2.32.4 # via opentelemetry-exporter-otlp-proto-http -rich==13.9.4 +rich==14.1.0 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # rich-toolkit # typer -rich-toolkit==0.14.7 - # via fastapi-cli +rich-toolkit==0.15.0 + # via + # fastapi-cli + # fastapi-cloud-cli +rignore==0.6.4 + # via fastapi-cloud-cli rpds-py==0.23.1 # via # jsonschema # referencing +sentry-sdk==2.35.0 + # via fastapi-cloud-cli shellingham==1.5.4 # via typer six==1.17.0 # via python-dateutil sniffio==1.3.1 - # via anyio -starlette==0.46.1 + # via + # anyio + # asgi-lifespan +starlette==0.47.2 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -475,27 +493,36 @@ toolz==1.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in tqdm==4.67.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.15.2 +typer==0.16.1 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in # fastapi-cli + # fastapi-cloud-cli types-python-dateutil==2.9.0.20241206 # via arrow -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # aiodebug # anyio # fastapi # faststream + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http # opentelemetry-sdk + # opentelemetry-semantic-conventions # pydantic # pydantic-core # pydantic-extra-types # rich-toolkit + # starlette # typer -urllib3==2.3.0 + # typing-inspection +typing-inspection==0.4.1 + # via pydantic +urllib3==2.5.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -510,10 +537,12 @@ urllib3==2.3.0 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # requests + # sentry-sdk uvicorn==0.34.2 # via # fastapi # fastapi-cli + # fastapi-cloud-cli uvloop==0.21.0 # via uvicorn watchfiles==1.0.4 @@ -522,7 +551,6 @@ websockets==15.0.1 # via uvicorn wrapt==1.17.2 # via - # deprecated # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-httpx diff --git a/services/invitations/requirements/_test.in b/services/invitations/requirements/_test.in index 040fb5659da1..5fdd0bcf70b4 100644 --- a/services/invitations/requirements/_test.in +++ b/services/invitations/requirements/_test.in @@ -18,6 +18,7 @@ hypothesis pytest pytest-asyncio pytest-cov +pytest-mock pytest-runner pytest-sugar python-dotenv diff --git a/services/invitations/requirements/_test.txt b/services/invitations/requirements/_test.txt index 0b1bfff0ba68..518d2664fdb0 100644 --- a/services/invitations/requirements/_test.txt +++ b/services/invitations/requirements/_test.txt @@ -18,11 +18,11 @@ coverage==7.6.12 # pytest-cov faker==37.0.0 # via -r requirements/_test.in -h11==0.14.0 +h11==0.16.0 # via # -c requirements/_base.txt # httpcore -httpcore==1.0.7 +httpcore==1.0.9 # via # -c requirements/_base.txt # httpx @@ -46,16 +46,25 @@ packaging==24.2 # pytest # pytest-sugar pluggy==1.5.0 - # via pytest -pytest==8.3.5 + # via + # pytest + # pytest-cov +pygments==2.19.1 + # via + # -c requirements/_base.txt + # pytest +pytest==8.4.1 # via # -r requirements/_test.in # pytest-asyncio # pytest-cov + # pytest-mock # pytest-sugar -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 + # via -r requirements/_test.in +pytest-cov==6.2.1 # via -r requirements/_test.in -pytest-cov==6.0.0 +pytest-mock==3.14.1 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in @@ -73,7 +82,7 @@ sortedcontainers==2.4.0 # via hypothesis termcolor==2.5.0 # via pytest-sugar -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # anyio diff --git a/services/invitations/requirements/_tools.txt b/services/invitations/requirements/_tools.txt index a2cf1e419247..2f83ca50f902 100644 --- a/services/invitations/requirements/_tools.txt +++ b/services/invitations/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # black @@ -27,9 +27,9 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via -r requirements/../../../requirements/devenv.txt -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via # black # mypy @@ -42,7 +42,9 @@ packaging==24.2 # black # build pathspec==0.12.1 - # via black + # via + # black + # mypy pip==25.0.1 # via pip-tools pip-tools==7.4.1 @@ -68,11 +70,11 @@ pyyaml==6.0.2 # watchdog ruff==0.9.10 # via -r requirements/../../../requirements/devenv.txt -setuptools==76.0.0 +setuptools==80.9.0 # via pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # -c requirements/_test.txt diff --git a/services/invitations/src/simcore_service_invitations/_meta.py b/services/invitations/src/simcore_service_invitations/_meta.py index c7b955a4db6d..58e1455bbafc 100644 --- a/services/invitations/src/simcore_service_invitations/_meta.py +++ b/services/invitations/src/simcore_service_invitations/_meta.py @@ -1,6 +1,4 @@ -""" Application's metadata - -""" +"""Application's metadata""" from typing import Final @@ -16,7 +14,7 @@ PROJECT_NAME: Final[str] = info.project_name VERSION: Final[Version] = info.version API_VERSION: Final[VersionStr] = info.__version__ -APP_NAME = PROJECT_NAME +APP_NAME: Final[str] = info.app_name API_VTAG: Final[VersionTag] = VersionTag(info.api_prefix_path_tag) SUMMARY: Final[str] = info.get_summary() diff --git a/services/invitations/src/simcore_service_invitations/cli.py b/services/invitations/src/simcore_service_invitations/cli.py index 67838b046155..f186db081ed1 100644 --- a/services/invitations/src/simcore_service_invitations/cli.py +++ b/services/invitations/src/simcore_service_invitations/cli.py @@ -1,5 +1,6 @@ import getpass import logging +import os import typer from cryptography.fernet import Fernet @@ -14,7 +15,6 @@ print_as_envfile, ) -from . import web_server from ._meta import PROJECT_NAME, __version__ from .core.settings import ApplicationSettings, MinimalApplicationSettings from .services.invitations import ( @@ -50,7 +50,7 @@ def generate_key( export INVITATIONS_SECRET_KEY=$(invitations-maker generate-key) """ assert ctx # nosec - print(Fernet.generate_key().decode()) # noqa: T201 + typer.echo(Fernet.generate_key().decode()) @main.command() @@ -106,6 +106,10 @@ def invite( None, help=InvitationInputs.model_fields["trial_account_days"].description, ), + extra_credits_in_usd: int = typer.Option( + None, + help=InvitationInputs.model_fields["extra_credits_in_usd"].description, + ), product: str = typer.Option( None, help=InvitationInputs.model_fields["product"].description, @@ -119,7 +123,7 @@ def invite( issuer=issuer, guest=TypeAdapter(EmailStr).validate_python(email), trial_account_days=trial_account_days, - extra_credits_in_usd=None, + extra_credits_in_usd=extra_credits_in_usd, product=product, ) @@ -129,7 +133,7 @@ def invite( base_url=settings.INVITATIONS_OSPARC_URL, default_product=settings.INVITATIONS_DEFAULT_PRODUCT, ) - print(invitation_link) # noqa: T201 + typer.echo(invitation_link) @main.command() @@ -149,18 +153,8 @@ def extract(ctx: typer.Context, invitation_url: str): ) assert invitation.product is not None # nosec - print(invitation.model_dump_json(indent=1)) # noqa: T201 - - except (InvalidInvitationCodeError, ValidationError): - _err_console.print("[bold red]Invalid code[/bold red]") - + typer.echo(invitation.model_dump_json(indent=1)) -@main.command() -def serve( - ctx: typer.Context, - *, - reload: bool = False, -): - """Starts server with http API""" - assert ctx # nosec - web_server.start(log_level="info", reload=reload) + except (InvalidInvitationCodeError, ValidationError) as err: + typer.secho("Invalid code", fg=typer.colors.RED, bold=True, err=True) + raise typer.Exit(os.EX_DATAERR) from err diff --git a/services/invitations/src/simcore_service_invitations/core/exceptions_handlers.py b/services/invitations/src/simcore_service_invitations/core/exceptions_handlers.py index 47c72be56a84..ba46db5b96cd 100644 --- a/services/invitations/src/simcore_service_invitations/core/exceptions_handlers.py +++ b/services/invitations/src/simcore_service_invitations/core/exceptions_handlers.py @@ -1,8 +1,8 @@ import logging +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from fastapi import FastAPI, Request, status from fastapi.responses import JSONResponse -from servicelib.logging_errors import create_troubleshotting_log_kwargs from ..services.invitations import InvalidInvitationCodeError @@ -16,7 +16,7 @@ def handle_invalid_invitation_code_error(request: Request, exception: Exception) user_msg = INVALID_INVITATION_URL_MSG _logger.warning( - **create_troubleshotting_log_kwargs( + **create_troubleshooting_log_kwargs( user_msg, error=exception, error_context={ diff --git a/services/invitations/src/simcore_service_invitations/core/settings.py b/services/invitations/src/simcore_service_invitations/core/settings.py index 2df105279290..ba4141cb7a7b 100644 --- a/services/invitations/src/simcore_service_invitations/core/settings.py +++ b/services/invitations/src/simcore_service_invitations/core/settings.py @@ -1,10 +1,11 @@ from functools import cached_property -from typing import Annotated +from typing import Annotated, cast from common_library.basic_types import DEFAULT_FACTORY +from common_library.logging.logging_utils_filtering import LoggerName, MessageSubstring from models_library.products import ProductName from pydantic import AliasChoices, Field, HttpUrl, SecretStr, field_validator -from servicelib.logging_utils_filtering import LoggerName, MessageSubstring +from servicelib.logging_utils import LogLevelInt from settings_library.application import BaseApplicationSettings from settings_library.basic_types import LogLevel, VersionTag from settings_library.tracing import TracingSettings @@ -55,8 +56,8 @@ class _BaseApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): ] = DEFAULT_FACTORY @cached_property - def LOG_LEVEL(self): - return self.INVITATIONS_LOGLEVEL + def log_level(self) -> LogLevelInt: + return cast(LogLevelInt, self.INVITATIONS_LOGLEVEL) @field_validator("INVITATIONS_LOGLEVEL", mode="before") @classmethod diff --git a/services/invitations/src/simcore_service_invitations/main.py b/services/invitations/src/simcore_service_invitations/main.py index 4a21e994b31e..d59e54918a07 100644 --- a/services/invitations/src/simcore_service_invitations/main.py +++ b/services/invitations/src/simcore_service_invitations/main.py @@ -1,24 +1,36 @@ -"""Main application to be deployed by uvicorn (or equivalent) server - -""" +"""Main application to be deployed by uvicorn (or equivalent) server""" import logging +from typing import Final +from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.logging_utils import config_all_loggers +from servicelib.fastapi.logging_lifespan import create_logging_shutdown_event from simcore_service_invitations.core.application import create_app from simcore_service_invitations.core.settings import ApplicationSettings -the_settings = ApplicationSettings.create_from_envs() +_logger = logging.getLogger(__name__) -# SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 -logging.basicConfig(level=the_settings.log_level) # NOSONAR -logging.root.setLevel(the_settings.log_level) -config_all_loggers( - log_format_local_dev_enabled=the_settings.INVITATIONS_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=the_settings.INVITATIONS_LOG_FILTER_MAPPING, - tracing_settings=the_settings.INVITATIONS_TRACING, +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aio_pika", + "aiormq", ) -# SINGLETON FastAPI app -the_app: FastAPI = create_app(the_settings) + +def app_factory() -> FastAPI: + app_settings = ApplicationSettings.create_from_envs() + logging_shutdown_event = create_logging_shutdown_event( + log_format_local_dev_enabled=app_settings.INVITATIONS_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.INVITATIONS_LOG_FILTER_MAPPING, + tracing_settings=app_settings.INVITATIONS_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) + + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + app = create_app(settings=app_settings) + app.add_event_handler("shutdown", logging_shutdown_event) + return app diff --git a/services/invitations/src/simcore_service_invitations/web_server.py b/services/invitations/src/simcore_service_invitations/web_server.py index 92015153841c..55c291e78253 100644 --- a/services/invitations/src/simcore_service_invitations/web_server.py +++ b/services/invitations/src/simcore_service_invitations/web_server.py @@ -7,9 +7,10 @@ def start( log_level: Literal["info", "debug", "warning", "error"], *, reload: bool = False ): uvicorn.run( - "simcore_service_invitations.web_main:the_app", + "simcore_service_invitations.web_main:app_factory", host="0.0.0.0", # nosec port=8000, log_level=log_level, reload=reload, + factory=True, ) diff --git a/services/invitations/tests/unit/api/test_api_invitations.py b/services/invitations/tests/unit/api/test_api_invitations.py index 84f97fb45fe3..1949a38a9662 100644 --- a/services/invitations/tests/unit/api/test_api_invitations.py +++ b/services/invitations/tests/unit/api/test_api_invitations.py @@ -40,6 +40,7 @@ def test_create_invitation( assert invitation.issuer == invitation_input.issuer assert invitation.guest == invitation_input.guest assert invitation.trial_account_days == invitation_input.trial_account_days + assert invitation.extra_credits_in_usd == invitation_input.extra_credits_in_usd # checks issue with `//` reported in https://github.com/ITISFoundation/osparc-simcore/issues/7055 assert invitation.invitation_url @@ -61,6 +62,7 @@ def test_check_invitation( "issuer": invitation_data.issuer, "guest": invitation_data.guest, "trial_account_days": invitation_data.trial_account_days, + "extra_credits_in_usd": invitation_data.extra_credits_in_usd, }, auth=basic_auth, ) @@ -85,6 +87,7 @@ def test_check_invitation( assert invitation.issuer == invitation_data.issuer assert invitation.guest == invitation_data.guest assert invitation.trial_account_days == invitation_data.trial_account_days + assert invitation.extra_credits_in_usd == invitation_data.extra_credits_in_usd def test_check_valid_invitation( diff --git a/services/invitations/tests/unit/conftest.py b/services/invitations/tests/unit/conftest.py index 1bed48254480..414062b9f944 100644 --- a/services/invitations/tests/unit/conftest.py +++ b/services/invitations/tests/unit/conftest.py @@ -10,11 +10,14 @@ from cryptography.fernet import Fernet from faker import Faker from models_library.products import ProductName +from pydantic import PositiveInt, TypeAdapter from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_service_invitations.services.invitations import InvitationInputs pytest_plugins = [ + "pytest_simcore.asyncio_event_loops", + "pytest_simcore.logging", "pytest_simcore.cli_runner", "pytest_simcore.repository_paths", ] @@ -85,6 +88,11 @@ def is_trial_account(request: pytest.FixtureRequest) -> bool: return request.param +@pytest.fixture +def extra_credits_in_usd(is_trial_account: bool) -> PositiveInt | None: + return TypeAdapter(PositiveInt).validate_python(123) if is_trial_account else None + + @pytest.fixture def default_product() -> ProductName: return "s4llite" @@ -98,7 +106,10 @@ def product(request: pytest.FixtureRequest) -> ProductName | None: @pytest.fixture def invitation_data( - is_trial_account: bool, faker: Faker, product: ProductName | None + is_trial_account: bool, + faker: Faker, + product: ProductName | None, + extra_credits_in_usd: PositiveInt | None, ) -> InvitationInputs: # first version kwargs = { @@ -110,4 +121,7 @@ def invitation_data( if product: kwargs["product"] = product + if extra_credits_in_usd is not None: + kwargs["extra_credits_in_usd"] = extra_credits_in_usd + return InvitationInputs.model_validate(kwargs) diff --git a/services/invitations/tests/unit/test_cli.py b/services/invitations/tests/unit/test_cli.py index 0c4bf15c7a8b..7afa83c08bc9 100644 --- a/services/invitations/tests/unit/test_cli.py +++ b/services/invitations/tests/unit/test_cli.py @@ -45,19 +45,24 @@ def test_invite_user_and_check_invitation( "INVITATIONS_DEFAULT_PRODUCT": default_product, } - expected = { + expected_invitation = { **invitation_data.model_dump(exclude={"product"}), "product": environs["INVITATIONS_DEFAULT_PRODUCT"], } # invitations-maker invite guest@email.com --issuer=me --trial-account-days=3 - trial_account = "" + other_options = "" if invitation_data.trial_account_days: - trial_account = f"--trial-account-days={invitation_data.trial_account_days}" + other_options = f"--trial-account-days={invitation_data.trial_account_days}" + + if invitation_data.extra_credits_in_usd: + other_options += ( + f" --extra-credits-in-usd={invitation_data.extra_credits_in_usd}" + ) result = cli_runner.invoke( main, - f"invite {invitation_data.guest} --issuer={invitation_data.issuer} {trial_account}", + f"invite {invitation_data.guest} --issuer={invitation_data.issuer} {other_options}", env=environs, ) assert result.exit_code == os.EX_OK, result.output @@ -73,7 +78,7 @@ def test_invite_user_and_check_invitation( ) assert result.exit_code == os.EX_OK, result.output assert ( - expected + expected_invitation == TypeAdapter(InvitationInputs).validate_json(result.stdout).model_dump() ) @@ -99,3 +104,24 @@ def test_list_settings(cli_runner: CliRunner, app_environment: EnvVarsDict): print(result.output) settings = ApplicationSettings.model_validate_json(result.output) assert settings == ApplicationSettings.create_from_envs() + + +def test_extract_invalid_invitation_code( + cli_runner: CliRunner, faker: Faker, app_environment: EnvVarsDict +): + """Test that extract command handles invalid invitation codes properly""" + # Create an invalid invitation URL + invalid_invitation_url = f"{faker.url()}#invitation=invalid_code_123" + + # Run extract command with invalid invitation URL + result = cli_runner.invoke( + main, + f'extract "{invalid_invitation_url}"', + env=app_environment, + ) + + # Verify command exits with correct error code + assert result.exit_code == os.EX_DATAERR + + # Verify error message is displayed via stderr + assert "Invalid code" in result.stderr diff --git a/services/invitations/tests/unit/test_core_settings.py b/services/invitations/tests/unit/test_core_settings.py index 7c68e809eda7..150f904ab09f 100644 --- a/services/invitations/tests/unit/test_core_settings.py +++ b/services/invitations/tests/unit/test_core_settings.py @@ -2,7 +2,13 @@ # pylint: disable=unused-argument # pylint: disable=unused-variable # pylint: disable=too-many-arguments +""" +We can validate actual .env files (also refered as `repo.config` files) by passing them via the CLI +$ ln -s /path/to/osparc-config/deployments/mydeploy.com/repo.config .secrets +$ pytest --external-envfile=.secrets --pdb tests/unit/test_core_settings.py + +""" import pytest from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict @@ -29,6 +35,12 @@ def test_valid_cli_application_settings( assert settings -def test_valid_web_application_settings(app_environment: EnvVarsDict): - settings = ApplicationSettings.create_from_envs() +def test_valid_application_settings(app_environment: EnvVarsDict): + assert app_environment + + settings = ApplicationSettings() # type: ignore assert settings + + assert settings == ApplicationSettings.create_from_envs() + + assert settings.INVITATIONS_LOGLEVEL == "INFO" diff --git a/services/migration/Dockerfile b/services/migration/Dockerfile index 01c428a8654c..1465e5664b20 100644 --- a/services/migration/Dockerfile +++ b/services/migration/Dockerfile @@ -2,7 +2,7 @@ # Define arguments in the global scope ARG PYTHON_VERSION="3.11.9" -ARG UV_VERSION="0.6" +ARG UV_VERSION="0.7" FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build FROM python:${PYTHON_VERSION}-slim-bookworm AS base-arm64 @@ -60,12 +60,13 @@ COPY --from=uv_build /uv /uvx /bin/ # NOTE: python virtualenv is used here such that installed packages may be moved to production image easily by copying the venv RUN uv venv "${VIRTUAL_ENV}" +# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode +ENV UV_COMPILE_BYTECODE=1 \ + UV_LINK_MODE=copy + + -RUN --mount=type=cache,target=/root/.cache/uv \ - uv pip install --upgrade \ - wheel \ - setuptools WORKDIR /build/packages/postgres-database @@ -82,17 +83,8 @@ RUN \ FROM base AS production ENV PYTHONOPTIMIZE=TRUE -# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode -ENV UV_COMPILE_BYTECODE=1 - -# testing defaults -ENV POSTGRES_USER=scu \ - POSTGRES_PASSWORD=adminadmin \ - POSTGRES_HOST=postgres \ - POSTGRES_PORT=5432 \ - POSTGRES_DB=simcoredb \ - SC_DONE_MARK_FILE=migration.done +ENV SC_DONE_MARK_FILE=migration.done WORKDIR /home/scu diff --git a/services/migration/docker/entrypoint.sh b/services/migration/docker/entrypoint.sh index 3b639e936ed0..05671ccd2c7c 100755 --- a/services/migration/docker/entrypoint.sh +++ b/services/migration/docker/entrypoint.sh @@ -12,6 +12,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" echo "$INFO ${SC_USER_NAME} rights : $(id "$SC_USER_NAME")" echo "$INFO local dir : $(ls -al)" diff --git a/services/migration/requirements/_test.txt b/services/migration/requirements/_test.txt index f807b504dae9..9f50d1b52334 100644 --- a/services/migration/requirements/_test.txt +++ b/services/migration/requirements/_test.txt @@ -23,28 +23,34 @@ jsonschema==4.23.0 # via -r requirements/_test.in jsonschema-specifications==2024.10.1 # via jsonschema -mypy==1.15.0 +mypy==1.16.1 # via sqlalchemy -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via mypy packaging==24.2 # via pytest +pathspec==0.12.1 + # via mypy pluggy==1.5.0 + # via + # pytest + # pytest-cov +pygments==2.19.2 # via pytest -pytest==8.3.5 +pytest==8.4.1 # via # -r requirements/_test.in # pytest-asyncio # pytest-cov # pytest-docker # pytest-mock -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via -r requirements/_test.in -pytest-cov==6.0.0 +pytest-cov==6.2.1 # via -r requirements/_test.in -pytest-docker==3.2.0 +pytest-docker==3.2.3 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in @@ -59,7 +65,7 @@ referencing==0.35.1 # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications -requests==2.32.3 +requests==2.32.4 # via docker rpds-py==0.23.1 # via @@ -73,11 +79,11 @@ sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy tenacity==9.0.0 # via -r requirements/_test.in -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # mypy # sqlalchemy2-stubs -urllib3==2.3.0 +urllib3==2.5.0 # via # -c requirements/../../../requirements/constraints.txt # docker diff --git a/services/migration/requirements/_tools.txt b/services/migration/requirements/_tools.txt index 19e0de206774..285ee19cda92 100644 --- a/services/migration/requirements/_tools.txt +++ b/services/migration/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.8 +click==8.2.1 # via # black # pip-tools @@ -26,11 +26,11 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via # -c requirements/_test.txt # black @@ -43,7 +43,10 @@ packaging==24.2 # black # build pathspec==0.12.1 - # via black + # via + # -c requirements/_test.txt + # black + # mypy pip==25.0.1 # via pip-tools pip-tools==7.4.1 @@ -69,11 +72,11 @@ pyyaml==6.0.2 # watchdog ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.2 +setuptools==80.9.0 # via pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_test.txt # mypy diff --git a/services/migration/tests/conftest.py b/services/migration/tests/conftest.py index 2ad21378f678..eb8c5b1c743f 100644 --- a/services/migration/tests/conftest.py +++ b/services/migration/tests/conftest.py @@ -2,5 +2,6 @@ "pytest_simcore.docker_compose", "pytest_simcore.docker_registry", "pytest_simcore.docker_swarm", + "pytest_simcore.logging", "pytest_simcore.repository_paths", ] diff --git a/services/notifications/Dockerfile b/services/notifications/Dockerfile index 1843710d03a9..f01d12c4f41b 100644 --- a/services/notifications/Dockerfile +++ b/services/notifications/Dockerfile @@ -2,7 +2,7 @@ # Define arguments in the global scope ARG PYTHON_VERSION="3.11.9" -ARG UV_VERSION="0.6" +ARG UV_VERSION="0.7" FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build FROM python:${PYTHON_VERSION}-slim-bookworm AS base-arm64 @@ -31,6 +31,7 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=private \ set -eux && \ apt-get update && \ apt-get install -y --no-install-recommends \ + fd-find \ gosu \ && apt-get clean -y \ # verify that the binary works @@ -86,10 +87,7 @@ RUN uv venv "${VIRTUAL_ENV}" -RUN --mount=type=cache,target=/root/.cache/uv \ - uv pip install --upgrade \ - wheel \ - setuptools + WORKDIR /build @@ -107,6 +105,10 @@ FROM build AS prod-only-deps ENV SC_BUILD_TARGET=prod-only-deps +# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode +ENV UV_COMPILE_BYTECODE=1 \ + UV_LINK_MODE=copy + WORKDIR /build/services/notifications RUN \ @@ -131,8 +133,6 @@ ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production ENV PYTHONOPTIMIZE=TRUE -# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode -ENV UV_COMPILE_BYTECODE=1 WORKDIR /home/scu diff --git a/services/notifications/Makefile b/services/notifications/Makefile index bc14e6354c18..13474cf27018 100644 --- a/services/notifications/Makefile +++ b/services/notifications/Makefile @@ -15,4 +15,4 @@ openapi.json: .env-ignore ## produces openapi.json @set -o allexport; \ source $<; \ set +o allexport; \ - python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(the_app.openapi(), indent=2) )" > $@ + python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(app_factory().openapi(), indent=2) )" > $@ diff --git a/services/notifications/docker/boot.sh b/services/notifications/docker/boot.sh index 8d079d9bc1be..dbae76238cd8 100755 --- a/services/notifications/docker/boot.sh +++ b/services/notifications/docker/boot.sh @@ -24,7 +24,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/notifications - uv pip --quiet sync requirements/dev.txt + uv pip --quiet sync --link-mode=copy requirements/dev.txt cd - echo "$INFO" "PIP :" uv pip list @@ -33,7 +33,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy if command -v uv >/dev/null 2>&1; then - uv pip install debugpy + uv pip install --link-mode=copy debugpy else pip install debugpy fi @@ -48,20 +48,23 @@ SERVER_LOG_LEVEL=$(echo "${APP_LOG_LEVEL}" | tr '[:upper:]' '[:lower:]') echo "$INFO" "Log-level app/server: $APP_LOG_LEVEL/$SERVER_LOG_LEVEL" if [ "${SC_BOOT_MODE}" = "debug" ]; then - reload_dir_packages=$(find /devel/packages -maxdepth 3 -type d -path "*/src/*" ! -path "*.*" -exec echo '--reload-dir {} \' \;) + reload_dir_packages=$(fdfind src /devel/packages --exec echo '--reload-dir {} ' | tr '\n' ' ') exec sh -c " cd services/notifications/src/simcore_service_notifications && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${NOTIFICATIONS_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${NOTIFICATIONS_REMOTE_DEBUGGING_PORT} -m \ + uvicorn \ + --factory main:app_factory \ --host 0.0.0.0 \ --port 8000 \ --reload \ - $reload_dir_packages + $reload_dir_packages \ --reload-dir . \ --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_notifications.main:the_app \ + exec uvicorn \ + --factory simcore_service_notifications.main:app_factory \ --host 0.0.0.0 \ --port 8000 \ --log-level "${SERVER_LOG_LEVEL}" \ diff --git a/services/notifications/docker/entrypoint.sh b/services/notifications/docker/entrypoint.sh index 1568d6affdce..357d8b604d98 100755 --- a/services/notifications/docker/entrypoint.sh +++ b/services/notifications/docker/entrypoint.sh @@ -19,6 +19,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" # # DEVELOPMENT MODE @@ -56,14 +57,12 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME" echo "$INFO" "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \; - # change user property of files already around + fdfind --owner ":$SC_USER_ID" --exclude proc --exec-batch chgrp --no-dereference "$CONT_GROUPNAME" . '/' echo "$INFO" "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \; + fdfind --owner "$SC_USER_ID:" --exclude proc --exec-batch chown --no-dereference "$SC_USER_NAME" . '/' fi fi - echo "$INFO Starting $* ..." echo " $SC_USER_NAME rights : $(id "$SC_USER_NAME")" echo " local dir : $(ls -al)" diff --git a/services/notifications/requirements/_base.txt b/services/notifications/requirements/_base.txt index bb08727f0d81..20228c9ecd33 100644 --- a/services/notifications/requirements/_base.txt +++ b/services/notifications/requirements/_base.txt @@ -10,7 +10,7 @@ aiofiles==24.1.0 # via -r requirements/../../../packages/service-library/requirements/_base.in aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -47,6 +47,8 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi asyncpg==0.30.0 @@ -75,19 +77,14 @@ certifi==2025.1.31 # httpcore # httpx # requests + # sentry-sdk charset-normalizer==3.4.1 # via requests -click==8.1.8 +click==8.2.1 # via # rich-toolkit # typer # uvicorn -deprecated==1.2.18 - # via - # opentelemetry-api - # opentelemetry-exporter-otlp-proto-grpc - # opentelemetry-exporter-otlp-proto-http - # opentelemetry-semantic-conventions dnspython==2.7.0 # via email-validator email-validator==2.2.0 @@ -98,12 +95,14 @@ exceptiongroup==1.2.2 # via aio-pika fast-depends==2.4.12 # via faststream -fastapi==0.115.12 +fastapi==0.116.1 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi-lifespan-manager -fastapi-cli==0.0.7 +fastapi-cli==0.0.8 # via fastapi +fastapi-cloud-cli==0.1.5 + # via fastapi-cli fastapi-lifespan-manager==0.1.4 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in faststream==0.5.37 @@ -112,7 +111,7 @@ frozenlist==1.5.0 # via # aiohttp # aiosignal -googleapis-common-protos==1.69.2 +googleapis-common-protos==1.70.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http @@ -120,7 +119,7 @@ greenlet==3.1.1 # via sqlalchemy grpcio==1.71.0 # via opentelemetry-exporter-otlp-proto-grpc -h11==0.14.0 +h11==0.16.0 # via # httpcore # uvicorn @@ -128,7 +127,7 @@ h2==4.2.0 # via httpx hpack==4.1.0 # via h2 -httpcore==1.0.7 +httpcore==1.0.9 # via httpx httptools==0.6.4 # via uvicorn @@ -150,6 +149,7 @@ httpx==0.28.1 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi + # fastapi-cloud-cli hyperframe==6.1.0 # via h2 idna==3.10 @@ -178,13 +178,17 @@ jinja2==3.1.6 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # fastapi +jsonref==1.1.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema==4.23.0 # via # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema-specifications==2024.10.1 # via jsonschema -mako==1.3.9 +mako==1.3.10 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -213,7 +217,7 @@ multidict==6.2.0 # via # aiohttp # yarl -opentelemetry-api==1.31.1 +opentelemetry-api==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc @@ -229,17 +233,17 @@ opentelemetry-api==1.31.1 # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.31.1 +opentelemetry-exporter-otlp==1.34.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.31.1 +opentelemetry-exporter-otlp-proto-common==1.34.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.31.1 +opentelemetry-exporter-otlp-proto-grpc==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.31.1 +opentelemetry-exporter-otlp-proto-http==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.52b1 +opentelemetry-instrumentation==0.55b1 # via # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-asgi @@ -249,33 +253,33 @@ opentelemetry-instrumentation==0.52b1 # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aio-pika==0.52b1 +opentelemetry-instrumentation-aio-pika==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-asgi==0.52b1 +opentelemetry-instrumentation-asgi==0.55b1 # via opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-asyncpg==0.52b1 - # via -r requirements/../../../packages/postgres-database/requirements/_base.in -opentelemetry-instrumentation-fastapi==0.52b1 +opentelemetry-instrumentation-asyncpg==0.55b1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-fastapi==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-httpx==0.52b1 +opentelemetry-instrumentation-httpx==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-logging==0.52b1 +opentelemetry-instrumentation-logging==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.52b1 +opentelemetry-instrumentation-redis==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.52b1 +opentelemetry-instrumentation-requests==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.31.1 +opentelemetry-proto==1.34.1 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.31.1 +opentelemetry-sdk==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.52b1 +opentelemetry-semantic-conventions==0.55b1 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi @@ -285,7 +289,7 @@ opentelemetry-semantic-conventions==0.52b1 # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.52b1 +opentelemetry-util-http==0.55b1 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi @@ -328,7 +332,7 @@ propcache==0.3.1 # via # aiohttp # yarl -protobuf==5.29.4 +protobuf==5.29.5 # via # googleapis-common-protos # opentelemetry-proto @@ -338,7 +342,7 @@ psycopg2-binary==2.9.10 # via sqlalchemy pycryptodome==3.22.0 # via stream-zip -pydantic==2.11.0 +pydantic==2.11.7 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -370,11 +374,12 @@ pydantic==2.11.0 # -r requirements/_base.in # fast-depends # fastapi + # fastapi-cloud-cli # pydantic-extra-types # pydantic-settings -pydantic-core==2.33.0 +pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.10.3 +pydantic-extra-types==2.10.5 # via # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -470,26 +475,34 @@ referencing==0.35.1 # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications -requests==2.32.3 +requests==2.32.4 # via opentelemetry-exporter-otlp-proto-http -rich==13.9.4 +rich==14.1.0 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # rich-toolkit # typer -rich-toolkit==0.14.7 - # via fastapi-cli +rich-toolkit==0.15.0 + # via + # fastapi-cli + # fastapi-cloud-cli +rignore==0.6.4 + # via fastapi-cloud-cli rpds-py==0.24.0 # via # jsonschema # referencing +sentry-sdk==2.35.0 + # via fastapi-cloud-cli shellingham==1.5.4 # via typer six==1.17.0 # via python-dateutil sniffio==1.3.1 - # via anyio + # via + # anyio + # asgi-lifespan sqlalchemy==1.4.54 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -508,7 +521,7 @@ sqlalchemy==1.4.54 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/postgres-database/requirements/_base.in # alembic -starlette==0.46.1 +starlette==0.47.2 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -533,30 +546,36 @@ toolz==1.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in tqdm==4.67.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.15.2 +typer==0.16.1 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # fastapi-cli + # fastapi-cloud-cli types-python-dateutil==2.9.0.20241206 # via arrow -typing-extensions==4.13.0 +typing-extensions==4.14.1 # via # aiodebug # alembic # anyio # fastapi # faststream + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http # opentelemetry-sdk + # opentelemetry-semantic-conventions # pydantic # pydantic-core # pydantic-extra-types # rich-toolkit + # starlette # typer # typing-inspection typing-inspection==0.4.0 # via pydantic -urllib3==2.3.0 +urllib3==2.5.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -573,10 +592,12 @@ urllib3==2.3.0 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # requests + # sentry-sdk uvicorn==0.34.2 # via # fastapi # fastapi-cli + # fastapi-cloud-cli uvloop==0.21.0 # via uvicorn watchfiles==1.0.5 @@ -585,7 +606,6 @@ websockets==15.0.1 # via uvicorn wrapt==1.17.2 # via - # deprecated # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-httpx diff --git a/services/notifications/requirements/_test.txt b/services/notifications/requirements/_test.txt index 483fca1f9a3a..4e8422dc614a 100644 --- a/services/notifications/requirements/_test.txt +++ b/services/notifications/requirements/_test.txt @@ -3,7 +3,9 @@ anyio==4.9.0 # -c requirements/_base.txt # httpx asgi-lifespan==2.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in certifi==2025.1.31 # via # -c requirements/../../../requirements/constraints.txt @@ -23,11 +25,11 @@ docker==7.1.0 # via -r requirements/_test.in faker==37.1.0 # via -r requirements/_test.in -h11==0.14.0 +h11==0.16.0 # via # -c requirements/_base.txt # httpcore -httpcore==1.0.7 +httpcore==1.0.9 # via # -c requirements/_base.txt # httpx @@ -49,18 +51,24 @@ packaging==24.2 # -c requirements/_base.txt # pytest pluggy==1.5.0 - # via pytest -pytest==8.3.5 + # via + # pytest + # pytest-cov +pygments==2.19.1 + # via + # -c requirements/_base.txt + # pytest +pytest==8.4.1 # via # -r requirements/_test.in # pytest-asyncio # pytest-cov # pytest-mock -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via -r requirements/_test.in -pytest-cov==6.0.0 +pytest-cov==6.2.1 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in @@ -68,7 +76,7 @@ python-dotenv==1.1.0 # via # -c requirements/_base.txt # -r requirements/_test.in -requests==2.32.3 +requests==2.32.4 # via # -c requirements/_base.txt # docker @@ -77,13 +85,13 @@ sniffio==1.3.1 # -c requirements/_base.txt # anyio # asgi-lifespan -typing-extensions==4.13.0 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # anyio tzdata==2025.2 # via faker -urllib3==2.3.0 +urllib3==2.5.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt diff --git a/services/notifications/requirements/_tools.txt b/services/notifications/requirements/_tools.txt index 4deff3bbf27d..44b443088762 100644 --- a/services/notifications/requirements/_tools.txt +++ b/services/notifications/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # black @@ -27,9 +27,9 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via -r requirements/../../../requirements/devenv.txt -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via # black # mypy @@ -42,7 +42,9 @@ packaging==24.2 # black # build pathspec==0.12.1 - # via black + # via + # black + # mypy pip==25.0.1 # via pip-tools pip-tools==7.4.1 @@ -67,11 +69,11 @@ pyyaml==6.0.2 # pre-commit ruff==0.11.2 # via -r requirements/../../../requirements/devenv.txt -setuptools==78.1.0 +setuptools==80.9.0 # via pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.13.0 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # -c requirements/_test.txt diff --git a/services/notifications/src/simcore_service_notifications/clients/postgres/_liveness.py b/services/notifications/src/simcore_service_notifications/clients/postgres/_liveness.py index 57bc7a400768..6d26fd83e939 100644 --- a/services/notifications/src/simcore_service_notifications/clients/postgres/_liveness.py +++ b/services/notifications/src/simcore_service_notifications/clients/postgres/_liveness.py @@ -3,9 +3,9 @@ from datetime import timedelta from typing import Final +from common_library.async_tools import cancel_wait_task from fastapi import FastAPI from models_library.healthchecks import IsResponsive, LivenessResult -from servicelib.async_utils import cancel_wait_task from servicelib.background_task import create_periodic_task from servicelib.db_asyncpg_utils import check_postgres_liveness from servicelib.fastapi.db_asyncpg_engine import get_engine diff --git a/services/notifications/src/simcore_service_notifications/core/application.py b/services/notifications/src/simcore_service_notifications/core/application.py index 5f3245d9d522..63517b52d5b4 100644 --- a/services/notifications/src/simcore_service_notifications/core/application.py +++ b/services/notifications/src/simcore_service_notifications/core/application.py @@ -1,6 +1,7 @@ import logging from fastapi import FastAPI +from servicelib.fastapi.lifespan_utils import Lifespan from servicelib.fastapi.monitoring import ( initialize_prometheus_instrumentation, ) @@ -12,7 +13,6 @@ initialize_fastapi_app_tracing, setup_tracing, ) -from servicelib.logging_utils import config_all_loggers from .._meta import API_VTAG, APP_NAME, SUMMARY, VERSION from ..api.rest.routing import initialize_rest_api @@ -22,22 +22,11 @@ _logger = logging.getLogger(__name__) -def _initialise_logger(settings: ApplicationSettings): - # SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 - logging.basicConfig(level=settings.LOG_LEVEL.value) # NOSONAR - logging.root.setLevel(settings.LOG_LEVEL.value) - config_all_loggers( - log_format_local_dev_enabled=settings.NOTIFICATIONS_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=settings.NOTIFICATIONS_VOLUMES_LOG_FILTER_MAPPING, - tracing_settings=settings.NOTIFICATIONS_TRACING, - ) - - -def create_app() -> FastAPI: - settings = ApplicationSettings.create_from_envs() - _logger.debug(settings.model_dump_json(indent=2)) - - _initialise_logger(settings) +def create_app( + settings: ApplicationSettings | None = None, + logging_lifespan: Lifespan | None = None, +) -> FastAPI: + settings = settings or ApplicationSettings.create_from_envs() assert settings.SC_BOOT_MODE # nosec app = FastAPI( @@ -46,7 +35,7 @@ def create_app() -> FastAPI: description=SUMMARY, version=f"{VERSION}", openapi_url=f"/api/{API_VTAG}/openapi.json", - lifespan=events.create_app_lifespan(), + lifespan=events.create_app_lifespan(logging_lifespan=logging_lifespan), **get_common_oas_options(is_devel_mode=settings.SC_BOOT_MODE.is_devel_mode()), ) override_fastapi_openapi_method(app) diff --git a/services/notifications/src/simcore_service_notifications/core/events.py b/services/notifications/src/simcore_service_notifications/core/events.py index 879582575c0e..2660e2f426ca 100644 --- a/services/notifications/src/simcore_service_notifications/core/events.py +++ b/services/notifications/src/simcore_service_notifications/core/events.py @@ -2,6 +2,7 @@ from fastapi import FastAPI from fastapi_lifespan_manager import LifespanManager, State +from servicelib.fastapi.lifespan_utils import Lifespan from servicelib.fastapi.monitoring import ( create_prometheus_instrumentationmain_input_state, prometheus_instrumentation_lifespan, @@ -35,9 +36,13 @@ async def _settings_lifespan(app: FastAPI) -> AsyncIterator[State]: } -def create_app_lifespan(): +def create_app_lifespan( + logging_lifespan: Lifespan | None = None, +) -> LifespanManager[FastAPI]: # WARNING: order matters app_lifespan = LifespanManager() + if logging_lifespan: + app_lifespan.add(logging_lifespan) app_lifespan.add(_settings_lifespan) # - postgres diff --git a/services/notifications/src/simcore_service_notifications/core/settings.py b/services/notifications/src/simcore_service_notifications/core/settings.py index 6f7e13a546e3..89d66154b042 100644 --- a/services/notifications/src/simcore_service_notifications/core/settings.py +++ b/services/notifications/src/simcore_service_notifications/core/settings.py @@ -1,9 +1,9 @@ from typing import Annotated from common_library.basic_types import DEFAULT_FACTORY +from common_library.logging.logging_utils_filtering import LoggerName, MessageSubstring from models_library.basic_types import BootModeEnum, LogLevel from pydantic import AliasChoices, Field, field_validator -from servicelib.logging_utils_filtering import LoggerName, MessageSubstring from settings_library.base import BaseCustomSettings from settings_library.postgres import PostgresSettings from settings_library.rabbit import RabbitSettings diff --git a/services/notifications/src/simcore_service_notifications/main.py b/services/notifications/src/simcore_service_notifications/main.py index 8b2e0ed31966..cda95f9dd9f5 100644 --- a/services/notifications/src/simcore_service_notifications/main.py +++ b/services/notifications/src/simcore_service_notifications/main.py @@ -1,3 +1,34 @@ +import logging +from typing import Final + +from common_library.json_serialization import json_dumps +from fastapi import FastAPI +from servicelib.fastapi.logging_lifespan import create_logging_lifespan from simcore_service_notifications.core.application import create_app +from simcore_service_notifications.core.settings import ( + ApplicationSettings, +) + +_logger = logging.getLogger(__name__) + +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aio_pika", + "aiormq", +) + + +def app_factory() -> FastAPI: + app_settings = ApplicationSettings.create_from_envs() + logging_lifespan = create_logging_lifespan( + log_format_local_dev_enabled=app_settings.NOTIFICATIONS_VOLUMES_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.NOTIFICATIONS_VOLUMES_LOG_FILTER_MAPPING, + tracing_settings=app_settings.NOTIFICATIONS_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) -the_app = create_app() + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + return create_app(settings=app_settings, logging_lifespan=logging_lifespan) diff --git a/services/notifications/tests/conftest.py b/services/notifications/tests/conftest.py index a310c11b5d51..6091f50b9de0 100644 --- a/services/notifications/tests/conftest.py +++ b/services/notifications/tests/conftest.py @@ -2,24 +2,42 @@ # pylint: disable=unused-argument +from pathlib import Path + import pytest from models_library.basic_types import BootModeEnum from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict pytest_plugins = [ + "pytest_simcore.asyncio_event_loops", "pytest_simcore.docker_compose", "pytest_simcore.docker_swarm", + "pytest_simcore.environment_configs", + "pytest_simcore.logging", "pytest_simcore.postgres_service", "pytest_simcore.rabbit_service", "pytest_simcore.repository_paths", ] +@pytest.fixture(scope="session") +def project_slug_dir(osparc_simcore_root_dir: Path) -> Path: + # fixtures in pytest_simcore.environs + service_folder = osparc_simcore_root_dir / "services" / "notifications" + assert service_folder.exists() + assert any(service_folder.glob("src/simcore_service_notifications")) + return service_folder + + @pytest.fixture -def mock_environment(monkeypatch: pytest.MonkeyPatch) -> EnvVarsDict: +def mock_environment( + monkeypatch: pytest.MonkeyPatch, + docker_compose_service_environment_dict: EnvVarsDict, +) -> EnvVarsDict: return setenvs_from_dict( monkeypatch, { + **docker_compose_service_environment_dict, "LOGLEVEL": "DEBUG", "SC_BOOT_MODE": BootModeEnum.DEBUG, }, diff --git a/services/notifications/tests/unit/conftest.py b/services/notifications/tests/unit/conftest.py index e1f57c7c5c76..5f785451f12a 100644 --- a/services/notifications/tests/unit/conftest.py +++ b/services/notifications/tests/unit/conftest.py @@ -14,11 +14,11 @@ @pytest.fixture -def service_env( +def app_environment( monkeypatch: pytest.MonkeyPatch, mock_environment: EnvVarsDict, rabbit_service: RabbitSettings, - postgres_db: sa.engine.Engine, + postgres_db: sa.engine.Engine, # waiting for postgres service to start postgres_env_vars_dict: EnvVarsDict, ) -> EnvVarsDict: return setenvs_from_dict( @@ -36,7 +36,7 @@ def service_env( @pytest.fixture -async def initialized_app(service_env: EnvVarsDict) -> AsyncIterator[FastAPI]: +async def initialized_app(app_environment: EnvVarsDict) -> AsyncIterator[FastAPI]: app: FastAPI = create_app() async with LifespanManager(app, startup_timeout=30, shutdown_timeout=30): diff --git a/services/notifications/tests/unit/test_cli.py b/services/notifications/tests/unit/test_cli.py index bcfc7925b61c..6caedb8ddede 100644 --- a/services/notifications/tests/unit/test_cli.py +++ b/services/notifications/tests/unit/test_cli.py @@ -21,7 +21,7 @@ @pytest.fixture -def cli_runner(service_env: EnvVarsDict) -> CliRunner: +def cli_runner(app_environment: EnvVarsDict) -> CliRunner: return CliRunner() diff --git a/services/notifications/tests/unit/test_core_settings.py b/services/notifications/tests/unit/test_core_settings.py new file mode 100644 index 000000000000..92ac7f3dc9d4 --- /dev/null +++ b/services/notifications/tests/unit/test_core_settings.py @@ -0,0 +1,18 @@ +# pylint: disable=unused-variable +# pylint: disable=unused-argument +# pylint: disable=redefined-outer-name + + +from pytest_simcore.helpers.monkeypatch_envs import ( + EnvVarsDict, +) +from simcore_service_notifications.core.settings import ApplicationSettings + + +def test_valid_application_settings(mock_environment: EnvVarsDict): + assert mock_environment + + settings = ApplicationSettings() # type: ignore + assert settings + + assert settings == ApplicationSettings.create_from_envs() diff --git a/services/payments/Dockerfile b/services/payments/Dockerfile index 78e6758868ef..23d1793eb339 100644 --- a/services/payments/Dockerfile +++ b/services/payments/Dockerfile @@ -2,7 +2,7 @@ # Define arguments in the global scope ARG PYTHON_VERSION="3.11.9" -ARG UV_VERSION="0.6" +ARG UV_VERSION="0.7" FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build FROM python:${PYTHON_VERSION}-slim-bookworm AS base-arm64 @@ -31,6 +31,7 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=private \ set -eux && \ apt-get update && \ apt-get install -y --no-install-recommends \ + fd-find \ gosu \ && apt-get clean -y \ && rm -rf /var/lib/apt/lists/* \ @@ -89,10 +90,7 @@ RUN uv venv "${VIRTUAL_ENV}" -RUN --mount=type=cache,target=/root/.cache/uv \ - uv pip install --upgrade \ - wheel \ - setuptools + WORKDIR /build @@ -110,6 +108,9 @@ WORKDIR /build FROM build AS prod-only-deps ENV SC_BUILD_TARGET=prod-only-deps +# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode +ENV UV_COMPILE_BYTECODE=1 \ + UV_LINK_MODE=copy WORKDIR /build/services/payments @@ -135,8 +136,6 @@ ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production ENV PYTHONOPTIMIZE=TRUE -# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode -ENV UV_COMPILE_BYTECODE=1 WORKDIR /home/scu diff --git a/services/payments/Makefile b/services/payments/Makefile index cf361c3c10eb..a5d4d241e1f4 100644 --- a/services/payments/Makefile +++ b/services/payments/Makefile @@ -15,7 +15,7 @@ openapi.json: .env-ignore ## produces openapi.json @set -o allexport; \ source $<; \ set +o allexport; \ - python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(the_app.openapi(), indent=2) )" > $@ + python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(app_factory().openapi(), indent=2) )" > $@ # NOTE: Create using `ln -s path/to/osparc-config/repo.config .env-secret` diff --git a/services/payments/docker/boot.sh b/services/payments/docker/boot.sh index 1cc69d836653..e2d5b5f7d218 100755 --- a/services/payments/docker/boot.sh +++ b/services/payments/docker/boot.sh @@ -24,7 +24,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/payments - uv pip --quiet sync requirements/dev.txt + uv pip --quiet sync --link-mode=copy requirements/dev.txt cd - echo "$INFO" "PIP :" uv pip list @@ -33,7 +33,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy if command -v uv >/dev/null 2>&1; then - uv pip install debugpy + uv pip install --link-mode=copy debugpy else pip install debugpy fi @@ -48,19 +48,22 @@ SERVER_LOG_LEVEL=$(echo "${APP_LOG_LEVEL}" | tr '[:upper:]' '[:lower:]') echo "$INFO" "Log-level app/server: $APP_LOG_LEVEL/$SERVER_LOG_LEVEL" if [ "${SC_BOOT_MODE}" = "debug" ]; then - reload_dir_packages=$(find /devel/packages -maxdepth 3 -type d -path "*/src/*" ! -path "*.*" -exec echo '--reload-dir {} \' \;) + reload_dir_packages=$(fdfind src /devel/packages --exec echo '--reload-dir {} ' | tr '\n' ' ') exec sh -c " cd services/payments/src/simcore_service_payments && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${PAYMENTS_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${PAYMENTS_REMOTE_DEBUGGING_PORT} -m \ + uvicorn \ + --factory main:app_factory \ --host 0.0.0.0 \ --reload \ - $reload_dir_packages + $reload_dir_packages \ --reload-dir . \ --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_payments.main:the_app \ + exec uvicorn \ + --factory simcore_service_payments.main:app_factory \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/payments/docker/entrypoint.sh b/services/payments/docker/entrypoint.sh index 25153a6b2a2a..357d8b604d98 100755 --- a/services/payments/docker/entrypoint.sh +++ b/services/payments/docker/entrypoint.sh @@ -19,6 +19,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" # # DEVELOPMENT MODE @@ -56,10 +57,9 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME" echo "$INFO" "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \; - # change user property of files already around + fdfind --owner ":$SC_USER_ID" --exclude proc --exec-batch chgrp --no-dereference "$CONT_GROUPNAME" . '/' echo "$INFO" "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \; + fdfind --owner "$SC_USER_ID:" --exclude proc --exec-batch chown --no-dereference "$SC_USER_NAME" . '/' fi fi diff --git a/services/payments/openapi.json b/services/payments/openapi.json index 3a0aaf09fb74..5f40e6b27472 100644 --- a/services/payments/openapi.json +++ b/services/payments/openapi.json @@ -370,6 +370,7 @@ }, "password": { "type": "string", + "format": "password", "title": "Password" }, "scope": { @@ -397,6 +398,7 @@ "type": "null" } ], + "format": "password", "title": "Client Secret" } }, @@ -449,6 +451,9 @@ }, "docs_url": { "type": "string", + "maxLength": 2083, + "minLength": 1, + "format": "uri", "title": "Docs Url" } }, diff --git a/services/payments/requirements/_base.in b/services/payments/requirements/_base.in index cb2613eceaa9..2d6a373fa495 100644 --- a/services/payments/requirements/_base.in +++ b/services/payments/requirements/_base.in @@ -22,4 +22,4 @@ packaging python-jose python-multipart python-socketio # notifier -typer[all] +typer diff --git a/services/payments/requirements/_base.txt b/services/payments/requirements/_base.txt index 700654a65643..6a1af15296c9 100644 --- a/services/payments/requirements/_base.txt +++ b/services/payments/requirements/_base.txt @@ -10,7 +10,7 @@ aiofiles==24.1.0 # via -r requirements/../../../packages/service-library/requirements/_base.in aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -49,6 +49,8 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi asyncpg==0.30.0 @@ -79,11 +81,12 @@ certifi==2024.8.30 # httpcore # httpx # requests + # sentry-sdk cffi==1.17.1 # via cryptography charset-normalizer==3.4.0 # via requests -click==8.1.7 +click==8.2.1 # via # rich-toolkit # typer @@ -105,12 +108,6 @@ cryptography==44.0.0 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in -deprecated==1.2.15 - # via - # opentelemetry-api - # opentelemetry-exporter-otlp-proto-grpc - # opentelemetry-exporter-otlp-proto-http - # opentelemetry-semantic-conventions dnspython==2.7.0 # via email-validator ecdsa==0.19.0 @@ -123,12 +120,14 @@ exceptiongroup==1.2.2 # via aio-pika fast-depends==2.4.12 # via faststream -fastapi==0.115.12 +fastapi==0.116.1 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi-lifespan-manager -fastapi-cli==0.0.7 +fastapi-cli==0.0.8 # via fastapi +fastapi-cloud-cli==0.1.5 + # via fastapi-cli fastapi-lifespan-manager==0.1.4 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in faststream==0.5.31 @@ -137,7 +136,7 @@ frozenlist==1.5.0 # via # aiohttp # aiosignal -googleapis-common-protos==1.66.0 +googleapis-common-protos==1.70.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http @@ -145,7 +144,7 @@ greenlet==3.1.1 # via sqlalchemy grpcio==1.68.0 # via opentelemetry-exporter-otlp-proto-grpc -h11==0.14.0 +h11==0.16.0 # via # httpcore # uvicorn @@ -154,7 +153,7 @@ h2==4.2.0 # via httpx hpack==4.1.0 # via h2 -httpcore==1.0.7 +httpcore==1.0.9 # via httpx httptools==0.6.4 # via uvicorn @@ -176,6 +175,7 @@ httpx==0.27.2 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi + # fastapi-cloud-cli hyperframe==6.1.0 # via h2 idna==3.10 @@ -205,13 +205,17 @@ jinja2==3.1.6 # -c requirements/../../../requirements/constraints.txt # -r requirements/_base.in # fastapi +jsonref==1.1.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema==4.23.0 # via # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema-specifications==2024.10.1 # via jsonschema -mako==1.3.6 +mako==1.3.10 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -240,7 +244,7 @@ multidict==6.1.0 # via # aiohttp # yarl -opentelemetry-api==1.28.2 +opentelemetry-api==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc @@ -256,17 +260,17 @@ opentelemetry-api==1.28.2 # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.28.2 +opentelemetry-exporter-otlp==1.34.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.28.2 +opentelemetry-exporter-otlp-proto-common==1.34.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.28.2 +opentelemetry-exporter-otlp-proto-grpc==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.28.2 +opentelemetry-exporter-otlp-proto-http==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.49b2 +opentelemetry-instrumentation==0.55b1 # via # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-asgi @@ -276,33 +280,33 @@ opentelemetry-instrumentation==0.49b2 # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aio-pika==0.49b2 +opentelemetry-instrumentation-aio-pika==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-asgi==0.49b2 +opentelemetry-instrumentation-asgi==0.55b1 # via opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-asyncpg==0.49b2 - # via -r requirements/../../../packages/postgres-database/requirements/_base.in -opentelemetry-instrumentation-fastapi==0.49b2 +opentelemetry-instrumentation-asyncpg==0.55b1 + # via -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-fastapi==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-httpx==0.49b2 +opentelemetry-instrumentation-httpx==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-logging==0.49b2 +opentelemetry-instrumentation-logging==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.49b2 +opentelemetry-instrumentation-redis==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.49b2 +opentelemetry-instrumentation-requests==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.28.2 +opentelemetry-proto==1.34.1 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.28.2 +opentelemetry-sdk==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.49b2 +opentelemetry-semantic-conventions==0.55b1 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi @@ -312,7 +316,7 @@ opentelemetry-semantic-conventions==0.49b2 # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.49b2 +opentelemetry-util-http==0.55b1 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi @@ -355,7 +359,7 @@ propcache==0.2.0 # via # aiohttp # yarl -protobuf==5.29.0 +protobuf==5.29.5 # via # googleapis-common-protos # opentelemetry-proto @@ -371,7 +375,7 @@ pycparser==2.22 # via cffi pycryptodome==3.21.0 # via stream-zip -pydantic==2.10.2 +pydantic==2.11.7 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -402,11 +406,12 @@ pydantic==2.10.2 # -r requirements/../../../packages/settings-library/requirements/_base.in # fast-depends # fastapi + # fastapi-cloud-cli # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.1 +pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.10.0 +pydantic-extra-types==2.10.5 # via # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -417,7 +422,7 @@ pydantic-extra-types==2.10.0 # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in -pydantic-settings==2.6.1 +pydantic-settings==2.7.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -447,7 +452,7 @@ python-dotenv==1.0.1 # via # pydantic-settings # uvicorn -python-engineio==4.10.1 +python-engineio==4.12.2 # via python-socketio python-jose==3.3.0 # via -r requirements/_base.in @@ -455,7 +460,7 @@ python-multipart==0.0.20 # via # -r requirements/_base.in # fastapi -python-socketio==5.11.4 +python-socketio==5.13.0 # via -r requirements/_base.in pyyaml==6.0.2 # via @@ -510,16 +515,20 @@ referencing==0.35.1 # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications -requests==2.32.3 +requests==2.32.4 # via opentelemetry-exporter-otlp-proto-http -rich==13.9.4 +rich==14.1.0 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # rich-toolkit # typer -rich-toolkit==0.14.7 - # via fastapi-cli +rich-toolkit==0.15.0 + # via + # fastapi-cli + # fastapi-cloud-cli +rignore==0.6.4 + # via fastapi-cloud-cli rpds-py==0.21.0 # via # jsonschema @@ -541,6 +550,8 @@ rsa==4.9 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # python-jose +sentry-sdk==2.35.0 + # via fastapi-cloud-cli shellingham==1.5.4 # via typer simple-websocket==1.1.0 @@ -552,6 +563,7 @@ six==1.16.0 sniffio==1.3.1 # via # anyio + # asgi-lifespan # httpx sqlalchemy==1.4.54 # via @@ -571,7 +583,7 @@ sqlalchemy==1.4.54 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/postgres-database/requirements/_base.in # alembic -starlette==0.41.3 +starlette==0.47.2 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -596,27 +608,36 @@ toolz==1.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in tqdm==4.67.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.13.1 +typer==0.16.1 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in # fastapi-cli + # fastapi-cloud-cli types-python-dateutil==2.9.0.20241003 # via arrow -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # aiodebug # alembic # fastapi # faststream + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http # opentelemetry-sdk + # opentelemetry-semantic-conventions # pydantic # pydantic-core # pydantic-extra-types # rich-toolkit + # starlette # typer -urllib3==2.2.3 + # typing-inspection +typing-inspection==0.4.1 + # via pydantic +urllib3==2.5.0 # via # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -633,10 +654,12 @@ urllib3==2.2.3 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # requests + # sentry-sdk uvicorn==0.34.2 # via # fastapi # fastapi-cli + # fastapi-cloud-cli uvloop==0.21.0 # via uvicorn watchfiles==1.0.0 @@ -645,7 +668,6 @@ websockets==14.1 # via uvicorn wrapt==1.17.0 # via - # deprecated # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-httpx diff --git a/services/payments/requirements/_test.txt b/services/payments/requirements/_test.txt index df63ca3109be..ae1ec37d9443 100644 --- a/services/payments/requirements/_test.txt +++ b/services/payments/requirements/_test.txt @@ -2,7 +2,7 @@ aiohappyeyeballs==2.6.1 # via # -c requirements/_base.txt # aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -16,7 +16,9 @@ anyio==4.6.2.post1 # -c requirements/_base.txt # httpx asgi-lifespan==2.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in attrs==24.2.0 # via # -c requirements/_base.txt @@ -53,12 +55,12 @@ greenlet==3.1.1 # via # -c requirements/_base.txt # sqlalchemy -h11==0.14.0 +h11==0.16.0 # via # -c requirements/_base.txt # httpcore # wsproto -httpcore==1.0.7 +httpcore==1.0.9 # via # -c requirements/_base.txt # httpx @@ -79,23 +81,29 @@ idna==3.10 iniconfig==2.0.0 # via pytest jsonref==1.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in multidict==6.1.0 # via # -c requirements/_base.txt # aiohttp # yarl -mypy==1.15.0 +mypy==1.16.1 # via sqlalchemy -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via mypy packaging==24.2 # via # -c requirements/_base.txt # pytest # pytest-sugar +pathspec==0.12.1 + # via mypy pluggy==1.5.0 - # via pytest + # via + # pytest + # pytest-cov pprintpp==0.4.0 # via pytest-icdiff propcache==0.2.0 @@ -103,7 +111,11 @@ propcache==0.2.0 # -c requirements/_base.txt # aiohttp # yarl -pytest==8.3.5 +pygments==2.18.0 + # via + # -c requirements/_base.txt + # pytest +pytest==8.4.1 # via # -r requirements/_test.in # pytest-asyncio @@ -111,13 +123,13 @@ pytest==8.3.5 # pytest-icdiff # pytest-mock # pytest-sugar -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via -r requirements/_test.in -pytest-cov==6.0.0 +pytest-cov==6.2.1 # via -r requirements/_test.in pytest-icdiff==0.9 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in @@ -127,15 +139,15 @@ python-dotenv==1.0.1 # via # -c requirements/_base.txt # -r requirements/_test.in -python-engineio==4.10.1 +python-engineio==4.12.2 # via # -c requirements/_base.txt # python-socketio -python-socketio==5.11.4 +python-socketio==5.13.0 # via # -c requirements/_base.txt # -r requirements/_test.in -requests==2.32.3 +requests==2.32.4 # via # -c requirements/_base.txt # docker @@ -168,14 +180,14 @@ types-python-jose==3.4.0.20250224 # via -r requirements/_test.in types-pyyaml==6.0.12.20241230 # via -r requirements/_test.in -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # mypy # sqlalchemy2-stubs tzdata==2025.1 # via faker -urllib3==2.2.3 +urllib3==2.5.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt diff --git a/services/payments/requirements/_tools.txt b/services/payments/requirements/_tools.txt index c49f6c3693da..8ef7a767c61a 100644 --- a/services/payments/requirements/_tools.txt +++ b/services/payments/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.7 +click==8.2.1 # via # -c requirements/_base.txt # black @@ -27,11 +27,11 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via # -c requirements/_test.txt # black @@ -45,7 +45,10 @@ packaging==24.2 # black # build pathspec==0.12.1 - # via black + # via + # -c requirements/_test.txt + # black + # mypy pip==25.0.1 # via pip-tools pip-tools==7.4.1 @@ -70,11 +73,11 @@ pyyaml==6.0.2 # pre-commit ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.2 +setuptools==80.9.0 # via pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # -c requirements/_test.txt diff --git a/services/payments/src/simcore_service_payments/_meta.py b/services/payments/src/simcore_service_payments/_meta.py index f011e70ea6e9..fa80a0c5e983 100644 --- a/services/payments/src/simcore_service_payments/_meta.py +++ b/services/payments/src/simcore_service_payments/_meta.py @@ -1,6 +1,5 @@ -""" Application's metadata +"""Application's metadata""" -""" from typing import Final from models_library.basic_types import VersionStr @@ -14,7 +13,7 @@ PROJECT_NAME: Final[str] = info.project_name VERSION: Final[Version] = info.version API_VERSION: Final[VersionStr] = info.__version__ -APP_NAME: Final[str] = PROJECT_NAME +APP_NAME: Final[str] = info.app_name API_VTAG: Final[str] = info.api_prefix_path_tag SUMMARY: Final[str] = info.get_summary() diff --git a/services/payments/src/simcore_service_payments/api/rest/_acknowledgements.py b/services/payments/src/simcore_service_payments/api/rest/_acknowledgements.py index ca0d74c8e3e7..a76fef903f72 100644 --- a/services/payments/src/simcore_service_payments/api/rest/_acknowledgements.py +++ b/services/payments/src/simcore_service_payments/api/rest/_acknowledgements.py @@ -1,12 +1,12 @@ import logging from typing import Annotated +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from fastapi import APIRouter, BackgroundTasks, Depends, HTTPException, status from models_library.api_schemas_payments.errors import ( PaymentMethodNotFoundError, PaymentNotFoundError, ) -from servicelib.logging_errors import create_troubleshotting_log_kwargs from servicelib.logging_utils import log_context from ..._constants import ACKED, PGDB @@ -80,7 +80,7 @@ async def acknowledge_payment( if ack.saved: if ack.saved.payment_method_id is None or not ack.saved.success: _logger.error( - **create_troubleshotting_log_kwargs( + **create_troubleshooting_log_kwargs( f"Got ack that {payment_id=} was completed but failed to save the payment-method used for the payment as requested.", error=RuntimeError("Failed to save payment-method after payment"), error_context={ diff --git a/services/payments/src/simcore_service_payments/api/rest/_health.py b/services/payments/src/simcore_service_payments/api/rest/_health.py index 948317cf883e..030d56afbdd6 100644 --- a/services/payments/src/simcore_service_payments/api/rest/_health.py +++ b/services/payments/src/simcore_service_payments/api/rest/_health.py @@ -23,9 +23,9 @@ class HealthCheckError(RuntimeError): async def healthcheck( rabbitmq_client: Annotated[ RabbitMQClient, Depends(get_rabbitmq_client_from_request) - ] + ], ) -> str: - _logger.info("Checking rabbit health check %s", rabbitmq_client.healthy) + _logger.debug("Checking rabbit health check %s", rabbitmq_client.healthy) if not rabbitmq_client.healthy: raise HealthCheckError(RABBITMQ_CLIENT_UNHEALTHY_MSG) diff --git a/services/payments/src/simcore_service_payments/api/rpc/_payments.py b/services/payments/src/simcore_service_payments/api/rpc/_payments.py index fe6e4db28dcf..801b47e512ec 100644 --- a/services/payments/src/simcore_service_payments/api/rpc/_payments.py +++ b/services/payments/src/simcore_service_payments/api/rpc/_payments.py @@ -1,6 +1,7 @@ import logging from decimal import Decimal +from common_library.logging.logging_base import get_log_record_extra from fastapi import FastAPI from models_library.api_schemas_payments.errors import ( PaymentsError, @@ -16,7 +17,7 @@ from models_library.users import UserID from models_library.wallets import WalletID from pydantic import EmailStr, HttpUrl -from servicelib.logging_utils import get_log_record_extra, log_context +from servicelib.logging_utils import log_context from servicelib.rabbitmq import RPCRouter from ...db.payments_transactions_repo import PaymentsTransactionsRepo @@ -80,7 +81,6 @@ async def cancel_payment( user_id: UserID, wallet_id: WalletID, ) -> None: - with log_context( _logger, logging.INFO, diff --git a/services/payments/src/simcore_service_payments/api/rpc/_payments_methods.py b/services/payments/src/simcore_service_payments/api/rpc/_payments_methods.py index 360dcf962c07..8c93fd7bde6e 100644 --- a/services/payments/src/simcore_service_payments/api/rpc/_payments_methods.py +++ b/services/payments/src/simcore_service_payments/api/rpc/_payments_methods.py @@ -1,6 +1,7 @@ import logging from decimal import Decimal +from common_library.logging.logging_base import get_log_record_extra from fastapi import FastAPI from models_library.api_schemas_payments.errors import ( PaymentsError, @@ -18,7 +19,7 @@ from models_library.users import UserID from models_library.wallets import WalletID from pydantic import EmailStr -from servicelib.logging_utils import get_log_record_extra, log_context +from servicelib.logging_utils import log_context from servicelib.rabbitmq import RPCRouter from ...db.payments_methods_repo import PaymentsMethodsRepo diff --git a/services/payments/src/simcore_service_payments/core/settings.py b/services/payments/src/simcore_service_payments/core/settings.py index 5d9c69d861b8..81f0cf237fe3 100644 --- a/services/payments/src/simcore_service_payments/core/settings.py +++ b/services/payments/src/simcore_service_payments/core/settings.py @@ -1,9 +1,11 @@ from decimal import Decimal from functools import cached_property -from typing import Annotated +from typing import Annotated, cast from common_library.basic_types import DEFAULT_FACTORY +from common_library.logging.logging_utils_filtering import LoggerName, MessageSubstring from models_library.basic_types import NonNegativeDecimal +from models_library.rabbitmq_basic_types import RPCNamespace from pydantic import ( AliasChoices, EmailStr, @@ -14,7 +16,7 @@ TypeAdapter, field_validator, ) -from servicelib.logging_utils_filtering import LoggerName, MessageSubstring +from servicelib.logging_utils import LogLevelInt from settings_library.application import BaseApplicationSettings from settings_library.basic_types import LogLevel, VersionTag from settings_library.email import SMTPSettings @@ -66,8 +68,8 @@ class _BaseApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): ] = DEFAULT_FACTORY @cached_property - def LOG_LEVEL(self): # noqa: N802 - return self.PAYMENTS_LOGLEVEL + def log_level(self) -> LogLevelInt: + return cast(LogLevelInt, self.PAYMENTS_LOGLEVEL) @field_validator("PAYMENTS_LOGLEVEL", mode="before") @classmethod @@ -200,3 +202,8 @@ class ApplicationSettings(_BaseApplicationSettings): description="optional email (see notifier_email service)", ), ] + + PAYMENTS_WEBSERVER_RPC_NAMESPACE: Annotated[ + RPCNamespace, + Field(description="Namespace to connect to correct webserver's RPC interface"), + ] diff --git a/services/payments/src/simcore_service_payments/main.py b/services/payments/src/simcore_service_payments/main.py index 53e19bd22a10..604d4adaa1bd 100644 --- a/services/payments/src/simcore_service_payments/main.py +++ b/services/payments/src/simcore_service_payments/main.py @@ -1,24 +1,39 @@ -"""Main application to be deployed by uvicorn (or equivalent) server - -""" +"""Main application to be deployed by uvicorn (or equivalent) server""" import logging +from typing import Final +from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.logging_utils import config_all_loggers +from servicelib.fastapi.logging_lifespan import create_logging_shutdown_event from simcore_service_payments.core.application import create_app from simcore_service_payments.core.settings import ApplicationSettings -_the_settings = ApplicationSettings.create_from_envs() +_logger = logging.getLogger(__name__) -# SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 -logging.basicConfig(level=_the_settings.log_level) # NOSONAR -logging.root.setLevel(_the_settings.log_level) -config_all_loggers( - log_format_local_dev_enabled=_the_settings.PAYMENTS_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=_the_settings.PAYMENTS_LOG_FILTER_MAPPING, - tracing_settings=_the_settings.PAYMENTS_TRACING, +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aiobotocore", + "aio_pika", + "aiormq", + "botocore", + "werkzeug", ) -# SINGLETON FastAPI app -the_app: FastAPI = create_app(_the_settings) + +def app_factory() -> FastAPI: + app_settings = ApplicationSettings.create_from_envs() + logging_shutdown_event = create_logging_shutdown_event( + log_format_local_dev_enabled=app_settings.PAYMENTS_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.PAYMENTS_LOG_FILTER_MAPPING, + tracing_settings=app_settings.PAYMENTS_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) + + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + app = create_app(settings=app_settings) + app.add_event_handler("shutdown", logging_shutdown_event) + return app diff --git a/services/payments/src/simcore_service_payments/services/auto_recharge_process_message.py b/services/payments/src/simcore_service_payments/services/auto_recharge_process_message.py index e87e77e5c442..0455b76eb9b9 100644 --- a/services/payments/src/simcore_service_payments/services/auto_recharge_process_message.py +++ b/services/payments/src/simcore_service_payments/services/auto_recharge_process_message.py @@ -4,17 +4,17 @@ from typing import cast from fastapi import FastAPI -from models_library.api_schemas_webserver import WEBSERVER_RPC_NAMESPACE from models_library.api_schemas_webserver.wallets import ( GetWalletAutoRecharge, PaymentMethodID, ) from models_library.basic_types import NonNegativeDecimal -from models_library.payments import InvoiceDataGet -from models_library.rabbitmq_basic_types import RPCMethodName from models_library.rabbitmq_messages import WalletCreditsMessage from models_library.wallets import WalletID from pydantic import TypeAdapter +from servicelib.rabbitmq.rpc_interfaces.webserver import ( + payments as webserver_payments_rpc, +) from simcore_service_payments.db.auto_recharge_repo import AutoRechargeRepo from simcore_service_payments.db.payments_methods_repo import PaymentsMethodsRepo from simcore_service_payments.db.payments_transactions_repo import ( @@ -163,14 +163,12 @@ async def _perform_auto_recharge( ): rabbitmq_rpc_client = get_rabbitmq_rpc_client(app) - result = await rabbitmq_rpc_client.request( - WEBSERVER_RPC_NAMESPACE, - TypeAdapter(RPCMethodName).validate_python("get_invoice_data"), + invoice_data_get = await webserver_payments_rpc.get_invoice_data( + rabbitmq_rpc_client, user_id=payment_method_db.user_id, dollar_amount=wallet_auto_recharge.top_up_amount_in_usd, product_name=rabbit_message.product_name, ) - invoice_data_get = TypeAdapter(InvoiceDataGet).validate_python(result) await pay_with_payment_method( gateway=PaymentsGatewayApi.get_from_app_state(app), diff --git a/services/payments/src/simcore_service_payments/services/notifier_email.py b/services/payments/src/simcore_service_payments/services/notifier_email.py index 29a423837df8..8c6f41452dc6 100644 --- a/services/payments/src/simcore_service_payments/services/notifier_email.py +++ b/services/payments/src/simcore_service_payments/services/notifier_email.py @@ -10,12 +10,12 @@ import httpx from aiosmtplib import SMTP from attr import dataclass +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from jinja2 import DictLoader, Environment, select_autoescape from models_library.api_schemas_webserver.wallets import PaymentMethodTransaction from models_library.products import ProductName from models_library.users import UserID from pydantic import EmailStr -from servicelib.logging_errors import create_troubleshotting_log_kwargs from settings_library.email import EmailProtocol, SMTPSettings from tenacity import ( retry, @@ -239,7 +239,7 @@ async def _create_user_email( except Exception as exc: # pylint: disable=broad-exception-caught _logger.exception( - **create_troubleshotting_log_kwargs( + **create_troubleshooting_log_kwargs( "Cannot attach invoice to payment. Email sent w/o attached pdf invoice", error=exc, error_context={ diff --git a/services/payments/src/simcore_service_payments/services/postgres.py b/services/payments/src/simcore_service_payments/services/postgres.py index fd84fba45ce7..7fecbb038ec2 100644 --- a/services/payments/src/simcore_service_payments/services/postgres.py +++ b/services/payments/src/simcore_service_payments/services/postgres.py @@ -2,6 +2,7 @@ from servicelib.fastapi.db_asyncpg_engine import close_db_connection, connect_to_db from sqlalchemy.ext.asyncio import AsyncEngine +from .._meta import APP_NAME from ..core.settings import ApplicationSettings @@ -16,7 +17,7 @@ def setup_postgres(app: FastAPI): async def _on_startup() -> None: settings: ApplicationSettings = app.state.settings - await connect_to_db(app, settings.PAYMENTS_POSTGRES) + await connect_to_db(app, settings.PAYMENTS_POSTGRES, application_name=APP_NAME) assert app.state.engine # nosec assert isinstance(app.state.engine, AsyncEngine) # nosec diff --git a/services/payments/tests/conftest.py b/services/payments/tests/conftest.py index 39608fe4e70a..45b36d3262a9 100644 --- a/services/payments/tests/conftest.py +++ b/services/payments/tests/conftest.py @@ -24,6 +24,7 @@ "pytest_simcore.faker_products_data", "pytest_simcore.faker_users_data", "pytest_simcore.httpbin_service", + "pytest_simcore.logging", "pytest_simcore.postgres_service", "pytest_simcore.socketio", "pytest_simcore.rabbit_service", diff --git a/services/payments/tests/unit/test_cli.py b/services/payments/tests/unit/test_cli.py index 1fb1db4ededa..4701f992ede1 100644 --- a/services/payments/tests/unit/test_cli.py +++ b/services/payments/tests/unit/test_cli.py @@ -55,6 +55,6 @@ def test_list_settings(cli_runner: CliRunner, app_environment: EnvVarsDict): def test_main(app_environment: EnvVarsDict): - from simcore_service_payments.main import the_app + from simcore_service_payments.main import app_factory - assert the_app + app_factory() diff --git a/services/payments/tests/unit/test_core_settings.py b/services/payments/tests/unit/test_core_settings.py index a1d84644d62e..3c7810c73f78 100644 --- a/services/payments/tests/unit/test_core_settings.py +++ b/services/payments/tests/unit/test_core_settings.py @@ -3,22 +3,14 @@ # pylint: disable=unused-variable # pylint: disable=too-many-arguments - from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_service_payments.core.settings import ApplicationSettings -def test_valid_web_application_settings(app_environment: EnvVarsDict): - """ - We validate actual envfiles (e.g. repo.config files) by passing them via the CLI - - $ ln -s /path/to/osparc-config/deployments/mydeploy.com/repo.config .secrets - $ pytest --external-envfile=.secrets --pdb tests/unit/test_core_settings.py - - """ +def test_valid_application_settings(app_environment: EnvVarsDict): settings = ApplicationSettings() # type: ignore assert settings assert settings == ApplicationSettings.create_from_envs() - assert app_environment["PAYMENTS_LOGLEVEL"] == settings.LOG_LEVEL + assert app_environment["PAYMENTS_LOGLEVEL"] == settings.PAYMENTS_LOGLEVEL diff --git a/services/payments/tests/unit/test_db_payments_users_repo.py b/services/payments/tests/unit/test_db_payments_users_repo.py index 4cff0108033d..4f63a17f4431 100644 --- a/services/payments/tests/unit/test_db_payments_users_repo.py +++ b/services/payments/tests/unit/test_db_payments_users_repo.py @@ -14,10 +14,12 @@ from models_library.users import UserID from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.postgres_tools import insert_and_get_row_lifespan +from pytest_simcore.helpers.postgres_users import ( + insert_and_get_user_and_secrets_lifespan, +) from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_postgres_database.models.payments_transactions import payments_transactions from simcore_postgres_database.models.products import products -from simcore_postgres_database.models.users import users from simcore_service_payments.db.payment_users_repo import PaymentsUsersRepo from simcore_service_payments.services.postgres import get_engine @@ -60,14 +62,10 @@ async def user( injects a user in db """ assert user_id == user["id"] - async with insert_and_get_row_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup - get_engine(app), - table=users, - values=user, - pk_col=users.c.id, - pk_value=user["id"], - ) as row: - yield row + async with insert_and_get_user_and_secrets_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup + get_engine(app), **user + ) as user_row: + yield user_row @pytest.fixture diff --git a/services/payments/tests/unit/test_services_auto_recharge_listener.py b/services/payments/tests/unit/test_services_auto_recharge_listener.py index a5e4115c9fe9..80283863144d 100644 --- a/services/payments/tests/unit/test_services_auto_recharge_listener.py +++ b/services/payments/tests/unit/test_services_auto_recharge_listener.py @@ -185,7 +185,7 @@ async def mock_rpc_client( @pytest.fixture -async def mock_rpc_server( +async def mock_webserver_rpc_server( rabbitmq_rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], mocker: MockerFixture, ) -> RabbitMQRPCClient: @@ -202,7 +202,7 @@ async def get_invoice_data( product_name: ProductName, ) -> InvoiceDataGet: return InvoiceDataGet.model_validate( - InvoiceDataGet.model_config["json_schema_extra"]["examples"][0] + InvoiceDataGet.model_json_schema()["examples"][0] ) await rpc_server.register_router(router, namespace=WEBSERVER_RPC_NAMESPACE) @@ -230,7 +230,7 @@ async def test_process_message__whole_autorecharge_flow_success( wallet_id: int, populate_test_db: None, mocked_pay_with_payment_method: mock.AsyncMock, - mock_rpc_server: RabbitMQRPCClient, + mock_webserver_rpc_server: RabbitMQRPCClient, mock_rpc_client: RabbitMQRPCClient, mock_resoruce_usage_tracker_service_api: MockRouter, postgres_db: sa.engine.Engine, diff --git a/services/postgres/README.md b/services/postgres/README.md new file mode 100644 index 000000000000..014aeeddb5be --- /dev/null +++ b/services/postgres/README.md @@ -0,0 +1,5 @@ +## Postgres configuration + +Read and follow instructons in `./scripts/init.sql` script. This needs to be executed once in every postgres database we run (both self-hosted and RDS) + +Create role and users scripts need to be run on demand (e.g. in case we need a readonly user). Generate scripts using repo config values, read and follow instructions inside. This needs to be executed once. diff --git a/services/postgres/scripts/.gitignore b/services/postgres/scripts/.gitignore index 9072771094f3..dd4d95097dbd 100644 --- a/services/postgres/scripts/.gitignore +++ b/services/postgres/scripts/.gitignore @@ -1,3 +1,4 @@ * !.gitignore -!*.template.* +!*.template +!init.sql diff --git a/services/postgres/scripts/create-readonly-role.sql.template b/services/postgres/scripts/create-readonly-role.sql.template new file mode 100644 index 000000000000..5e12dc94822d --- /dev/null +++ b/services/postgres/scripts/create-readonly-role.sql.template @@ -0,0 +1,32 @@ +/* +Create read-only role for ${POSTGRES_DB} database. + +This role can be used to give read-only access to the ${POSTGRES_DB} database +to users. + +Permission grants inspired from: https://stackoverflow.com/questions/760210/how-do-you-create-a-read-only-user-in-postgresql/762649#762649 +IMPORTANT: must be executed while connected to the ${POSTGRES_DB} database + as it refers to public schema in that database. +*/ + +CREATE ROLE ${POSTGRES_DB}_readonly NOLOGIN; + +GRANT CONNECT ON DATABASE ${POSTGRES_DB} TO ${POSTGRES_DB}_readonly; + +-- https://stackoverflow.com/questions/17338621/what-does-grant-usage-on-schema-do-exactly +GRANT USAGE ON SCHEMA public TO ${POSTGRES_DB}_readonly; + +-- Grant permissions for (existing) tables, sequences, functions +GRANT SELECT ON ALL TABLES IN SCHEMA public TO ${POSTGRES_DB}_readonly; +GRANT SELECT ON ALL SEQUENCES IN SCHEMA public TO ${POSTGRES_DB}_readonly; +GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO ${POSTGRES_DB}_readonly; + +-- Grant permissions for (future) tables, sequences, functions +ALTER DEFAULT PRIVILEGES IN SCHEMA public + GRANT SELECT ON TABLES TO ${POSTGRES_DB}_readonly; +ALTER DEFAULT PRIVILEGES IN SCHEMA public + GRANT SELECT ON SEQUENCES TO ${POSTGRES_DB}_readonly; +ALTER DEFAULT PRIVILEGES IN SCHEMA public + GRANT EXECUTE ON FUNCTIONS TO ${POSTGRES_DB}_readonly; + +SELECT * FROM pg_roles WHERE rolname NOT LIKE 'pg_%'; diff --git a/services/postgres/scripts/create-readonly-user.sql.template b/services/postgres/scripts/create-readonly-user.sql.template index 28b14f53d4fb..8f3dbd19d8f0 100644 --- a/services/postgres/scripts/create-readonly-user.sql.template +++ b/services/postgres/scripts/create-readonly-user.sql.template @@ -1,22 +1,6 @@ --- SQL script to create a read-only user and grant privileges - - ---Create the read-only user with a password CREATE USER ${POSTGRES_READONLY_USER} WITH PASSWORD '${POSTGRES_READONLY_PASSWORD}'; ---Grant CONNECT privilege to the database (e.g., 'foo' is the database name) -GRANT CONNECT ON DATABASE ${POSTGRES_DB} TO ${POSTGRES_READONLY_USER}; - ---Grant USAGE privilege on the **public** schema -GRANT USAGE ON SCHEMA public TO ${POSTGRES_READONLY_USER}; - ---Grant SELECT privilege on all existing tables and sequencies in the **public** schema -GRANT SELECT ON ALL TABLES IN SCHEMA public TO ${POSTGRES_READONLY_USER}; -GRANT SELECT ON ALL SEQUENCES IN SCHEMA public TO ${POSTGRES_READONLY_USER}; - ---Ensure that future tables created in the public schema and sequencies will have SELECT privilege for the read-only user -ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT ON TABLES TO ${POSTGRES_READONLY_USER}; -ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT ON SEQUENCES TO ${POSTGRES_READONLY_USER}; +-- Grant read-only role (privilages) to the user +GRANT ${POSTGRES_DB}_readonly TO ${POSTGRES_READONLY_USER}; --- Listing all users -SELECT * FROM pg_roles; +SELECT * FROM pg_roles WHERE rolname NOT LIKE 'pg_%'; diff --git a/services/postgres/scripts/init.sql b/services/postgres/scripts/init.sql new file mode 100644 index 000000000000..532499ac6a79 --- /dev/null +++ b/services/postgres/scripts/init.sql @@ -0,0 +1,13 @@ +/* +Do not allow users to create new objects in the public schema + +Must be executed against every created database (e.g. for simcore, for metabase, ...) +(as long as we use Postgres 14 or earlier) + +Sources: +* https://wiki.postgresql.org/wiki/A_Guide_to_CVE-2018-1058:_Protect_Your_Search_Path +* https://www.reddit.com/r/PostgreSQL/comments/1hvxw0s/understanding_the_public_schema_in_postgresql/ +*/ + +-- As a superuser, run the following command in all of your databases +REVOKE CREATE ON SCHEMA public FROM PUBLIC; diff --git a/services/postgres/scripts/remove-readonly-role.sql.template b/services/postgres/scripts/remove-readonly-role.sql.template new file mode 100644 index 000000000000..1b8b03c5a3ad --- /dev/null +++ b/services/postgres/scripts/remove-readonly-role.sql.template @@ -0,0 +1,17 @@ +-- Make sure this role is not used by any user or else this script will fail + +REVOKE CONNECT ON DATABASE ${POSTGRES_DB} FROM ${POSTGRES_DB}_readonly; + +REVOKE ALL PRIVILEGES ON SCHEMA public FROM ${POSTGRES_DB}_readonly; + +REVOKE ALL PRIVILEGES ON ALL TABLES IN SCHEMA public FROM ${POSTGRES_DB}_readonly; +REVOKE ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public FROM ${POSTGRES_DB}_readonly; +REVOKE ALL PRIVILEGES ON ALL FUNCTIONS IN SCHEMA public FROM ${POSTGRES_DB}_readonly; + +ALTER DEFAULT PRIVILEGES IN SCHEMA public REVOKE ALL ON TABLES FROM ${POSTGRES_DB}_readonly; +ALTER DEFAULT PRIVILEGES IN SCHEMA public REVOKE ALL ON SEQUENCES FROM ${POSTGRES_DB}_readonly; +ALTER DEFAULT PRIVILEGES IN SCHEMA public REVOKE ALL ON FUNCTIONS FROM ${POSTGRES_DB}_readonly; + +DROP ROLE IF EXISTS ${POSTGRES_DB}_readonly; + +SELECT * FROM pg_roles WHERE rolname NOT LIKE 'pg_%'; diff --git a/services/postgres/scripts/remove-readonly-user.sql.template b/services/postgres/scripts/remove-readonly-user.sql.template index 5a1435ed9783..693cefd80a82 100644 --- a/services/postgres/scripts/remove-readonly-user.sql.template +++ b/services/postgres/scripts/remove-readonly-user.sql.template @@ -1,16 +1,6 @@ --- Revoke all privileges the user has on the public schema -REVOKE ALL PRIVILEGES ON SCHEMA public FROM ${POSTGRES_READONLY_USER}; +-- Revoke readonly role from user +REVOKE ${POSTGRES_DB}_readonly FROM ${POSTGRES_READONLY_USER}; --- Revoke all privileges the user has on tables and sequences in the public schema -REVOKE ALL PRIVILEGES ON ALL TABLES IN SCHEMA public FROM ${POSTGRES_READONLY_USER}; -REVOKE ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public FROM ${POSTGRES_READONLY_USER}; +DROP USER IF EXISTS ${POSTGRES_READONLY_USER}; --- Revoke any future privileges set via ALTER DEFAULT PRIVILEGES -ALTER DEFAULT PRIVILEGES IN SCHEMA public REVOKE ALL ON TABLES FROM ${POSTGRES_READONLY_USER}; -ALTER DEFAULT PRIVILEGES IN SCHEMA public REVOKE ALL ON SEQUENCES FROM ${POSTGRES_READONLY_USER}; - --- Drop the user -DROP USER ${POSTGRES_READONLY_USER}; - --- Listing all users -SELECT * FROM pg_roles; +SELECT * FROM pg_roles WHERE rolname NOT LIKE 'pg_%'; diff --git a/services/resource-usage-tracker/Dockerfile b/services/resource-usage-tracker/Dockerfile index 730e73b70eab..b19ed19a4647 100644 --- a/services/resource-usage-tracker/Dockerfile +++ b/services/resource-usage-tracker/Dockerfile @@ -2,7 +2,7 @@ # Define arguments in the global scope ARG PYTHON_VERSION="3.11.9" -ARG UV_VERSION="0.6" +ARG UV_VERSION="0.7" FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build FROM python:${PYTHON_VERSION}-slim-bookworm AS base-arm64 @@ -31,6 +31,7 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=private \ set -eux && \ apt-get update && \ apt-get install -y --no-install-recommends \ + fd-find \ gosu \ && apt-get clean -y \ && rm -rf /var/lib/apt/lists/* \ @@ -90,10 +91,7 @@ RUN uv venv "${VIRTUAL_ENV}" -RUN --mount=type=cache,target=/root/.cache/uv \ - uv pip install --upgrade \ - wheel \ - setuptools + WORKDIR /build @@ -110,6 +108,9 @@ WORKDIR /build FROM build AS prod-only-deps ENV SC_BUILD_TARGET=prod-only-deps +# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode +ENV UV_COMPILE_BYTECODE=1 \ + UV_LINK_MODE=copy WORKDIR /build/services/resource-usage-tracker @@ -135,8 +136,6 @@ ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production ENV PYTHONOPTIMIZE=TRUE -# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode -ENV UV_COMPILE_BYTECODE=1 WORKDIR /home/scu diff --git a/services/resource-usage-tracker/Makefile b/services/resource-usage-tracker/Makefile index d6d8745bc13e..cb0bbe708f57 100644 --- a/services/resource-usage-tracker/Makefile +++ b/services/resource-usage-tracker/Makefile @@ -10,4 +10,4 @@ include ../../scripts/common-service.Makefile openapi-specs: openapi.json openapi.json: ## produces openapi.json # generating openapi specs file (need to have the environment set for this) - @python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(the_app.openapi(), indent=2) )" > $@ + @python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(app_factory().openapi(), indent=2) )" > $@ diff --git a/services/resource-usage-tracker/VERSION b/services/resource-usage-tracker/VERSION index 3eefcb9dd5b3..7dea76edb3dc 100644 --- a/services/resource-usage-tracker/VERSION +++ b/services/resource-usage-tracker/VERSION @@ -1 +1 @@ -1.0.0 +1.0.1 diff --git a/services/resource-usage-tracker/docker/boot.sh b/services/resource-usage-tracker/docker/boot.sh index fe90bb170508..372e0f0d730a 100755 --- a/services/resource-usage-tracker/docker/boot.sh +++ b/services/resource-usage-tracker/docker/boot.sh @@ -24,7 +24,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/resource-usage-tracker - uv pip --quiet sync requirements/dev.txt + uv pip --quiet sync --link-mode=copy requirements/dev.txt cd - echo "$INFO" "PIP :" uv pip list @@ -33,7 +33,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy if command -v uv >/dev/null 2>&1; then - uv pip install debugpy + uv pip install --link-mode=copy debugpy else pip install debugpy fi @@ -48,19 +48,22 @@ SERVER_LOG_LEVEL=$(echo "${APP_LOG_LEVEL}" | tr '[:upper:]' '[:lower:]') echo "$INFO" "Log-level app/server: $APP_LOG_LEVEL/$SERVER_LOG_LEVEL" if [ "${SC_BOOT_MODE}" = "debug" ]; then - reload_dir_packages=$(find /devel/packages -maxdepth 3 -type d -path "*/src/*" ! -path "*.*" -exec echo '--reload-dir {} \' \;) + reload_dir_packages=$(fdfind src /devel/packages --exec echo '--reload-dir {} ' | tr '\n' ' ') exec sh -c " cd services/resource-usage-tracker/src/simcore_service_resource_usage_tracker && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${RESOURCE_USAGE_TRACKER_REMOTE_DEBUGGING_PORT} -m uvicorn main:the_app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${RESOURCE_USAGE_TRACKER_REMOTE_DEBUGGING_PORT} -m \ + uvicorn \ + --factory main:app_factory \ --host 0.0.0.0 \ --reload \ - $reload_dir_packages + $reload_dir_packages \ --reload-dir . \ --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_resource_usage_tracker.main:the_app \ + exec uvicorn \ + --factory simcore_service_resource_usage_tracker.main:app_factory \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" fi diff --git a/services/resource-usage-tracker/docker/entrypoint.sh b/services/resource-usage-tracker/docker/entrypoint.sh index e89ad5408a31..a319c6824d73 100755 --- a/services/resource-usage-tracker/docker/entrypoint.sh +++ b/services/resource-usage-tracker/docker/entrypoint.sh @@ -19,6 +19,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" # # DEVELOPMENT MODE @@ -56,10 +57,9 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME" echo "$INFO" "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \; - # change user property of files already around + fdfind --owner ":$SC_USER_ID" --exclude proc --exec-batch chgrp --no-dereference "$CONT_GROUPNAME" . '/' echo "$INFO" "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \; + fdfind --owner "$SC_USER_ID:" --exclude proc --exec-batch chown --no-dereference "$SC_USER_NAME" . '/' fi fi diff --git a/services/resource-usage-tracker/openapi.json b/services/resource-usage-tracker/openapi.json index cef757856bf2..abad2c654aaf 100644 --- a/services/resource-usage-tracker/openapi.json +++ b/services/resource-usage-tracker/openapi.json @@ -3,7 +3,7 @@ "info": { "title": "simcore-service-resource-usage-tracker web API", "description": "Service that collects and stores computational resources usage used in osparc-simcore", - "version": "1.0.0" + "version": "1.0.1" }, "paths": { "/": { @@ -605,6 +605,9 @@ }, "docs_url": { "type": "string", + "maxLength": 2083, + "minLength": 1, + "format": "uri", "title": "Docs Url" } }, diff --git a/services/resource-usage-tracker/requirements/_base.in b/services/resource-usage-tracker/requirements/_base.in index 77cf7864ca68..e09b50a03ff2 100644 --- a/services/resource-usage-tracker/requirements/_base.in +++ b/services/resource-usage-tracker/requirements/_base.in @@ -20,4 +20,4 @@ aiocache packaging prometheus_api_client shortuuid -typer[all] +typer diff --git a/services/resource-usage-tracker/requirements/_base.txt b/services/resource-usage-tracker/requirements/_base.txt index 9a87a75c05c8..8aa80f04a4b5 100644 --- a/services/resource-usage-tracker/requirements/_base.txt +++ b/services/resource-usage-tracker/requirements/_base.txt @@ -27,7 +27,7 @@ aiofiles==23.2.1 # aioboto3 aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -83,6 +83,8 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in +asgi-lifespan==2.1.0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi async-timeout==4.0.3 @@ -134,9 +136,10 @@ certifi==2024.2.2 # httpcore # httpx # requests + # sentry-sdk charset-normalizer==3.3.2 # via requests -click==8.1.7 +click==8.2.1 # via # rich-toolkit # typer @@ -147,12 +150,6 @@ cycler==0.12.1 # via matplotlib dateparser==1.2.0 # via prometheus-api-client -deprecated==1.2.14 - # via - # opentelemetry-api - # opentelemetry-exporter-otlp-proto-grpc - # opentelemetry-exporter-otlp-proto-http - # opentelemetry-semantic-conventions dnspython==2.6.1 # via email-validator email-validator==2.1.1 @@ -161,12 +158,14 @@ email-validator==2.1.1 # pydantic fast-depends==2.4.12 # via faststream -fastapi==0.115.12 +fastapi==0.116.1 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi-lifespan-manager -fastapi-cli==0.0.7 +fastapi-cli==0.0.8 # via fastapi +fastapi-cloud-cli==0.1.5 + # via fastapi-cli fastapi-lifespan-manager==0.1.4 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in faststream==0.5.31 @@ -179,7 +178,7 @@ frozenlist==1.4.1 # via # aiohttp # aiosignal -googleapis-common-protos==1.65.0 +googleapis-common-protos==1.70.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http @@ -187,7 +186,7 @@ greenlet==3.0.3 # via sqlalchemy grpcio==1.66.0 # via opentelemetry-exporter-otlp-proto-grpc -h11==0.14.0 +h11==0.16.0 # via # httpcore # uvicorn @@ -197,7 +196,7 @@ hpack==4.1.0 # via h2 httmock==1.4.0 # via prometheus-api-client -httpcore==1.0.4 +httpcore==1.0.9 # via httpx httptools==0.6.4 # via uvicorn @@ -231,6 +230,7 @@ httpx==0.27.0 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi + # fastapi-cloud-cli hyperframe==6.1.0 # via h2 idna==3.6 @@ -276,6 +276,12 @@ jmespath==1.0.1 # aiobotocore # boto3 # botocore +jsonref==1.1.0 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema==4.21.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in @@ -286,7 +292,7 @@ jsonschema-specifications==2023.7.1 # via jsonschema kiwisolver==1.4.5 # via matplotlib -mako==1.3.2 +mako==1.3.10 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -336,7 +342,7 @@ numpy==1.26.4 # matplotlib # pandas # prometheus-api-client -opentelemetry-api==1.26.0 +opentelemetry-api==1.34.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in @@ -355,19 +361,19 @@ opentelemetry-api==1.26.0 # opentelemetry-propagator-aws-xray # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.26.0 +opentelemetry-exporter-otlp==1.34.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.26.0 +opentelemetry-exporter-otlp-proto-common==1.34.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.26.0 +opentelemetry-exporter-otlp-proto-grpc==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.26.0 +opentelemetry-exporter-otlp-proto-http==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.47b0 +opentelemetry-instrumentation==0.55b1 # via # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-asgi @@ -378,47 +384,50 @@ opentelemetry-instrumentation==0.47b0 # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aio-pika==0.47b0 +opentelemetry-instrumentation-aio-pika==0.55b1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-asgi==0.47b0 +opentelemetry-instrumentation-asgi==0.55b1 # via opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-asyncpg==0.47b0 - # via -r requirements/../../../packages/postgres-database/requirements/_base.in -opentelemetry-instrumentation-botocore==0.47b0 +opentelemetry-instrumentation-asyncpg==0.55b1 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-botocore==0.55b1 # via -r requirements/../../../packages/aws-library/requirements/_base.in -opentelemetry-instrumentation-fastapi==0.47b0 +opentelemetry-instrumentation-fastapi==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-httpx==0.47b0 +opentelemetry-instrumentation-httpx==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-logging==0.47b0 +opentelemetry-instrumentation-logging==0.55b1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.47b0 +opentelemetry-instrumentation-redis==0.55b1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.47b0 +opentelemetry-instrumentation-requests==0.55b1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-propagator-aws-xray==1.0.1 +opentelemetry-propagator-aws-xray==1.0.2 # via opentelemetry-instrumentation-botocore -opentelemetry-proto==1.26.0 +opentelemetry-proto==1.34.1 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.26.0 +opentelemetry-sdk==1.34.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.47b0 +opentelemetry-semantic-conventions==0.55b1 # via + # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-botocore @@ -427,7 +436,7 @@ opentelemetry-semantic-conventions==0.47b0 # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.47b0 +opentelemetry-util-http==0.55b1 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi @@ -482,6 +491,7 @@ packaging==24.0 # via # -r requirements/_base.in # matplotlib + # opentelemetry-instrumentation pamqp==3.3.0 # via aiormq pandas==2.2.1 @@ -496,7 +506,7 @@ propcache==0.3.1 # via # aiohttp # yarl -protobuf==4.25.4 +protobuf==5.29.5 # via # googleapis-common-protos # opentelemetry-proto @@ -508,7 +518,7 @@ psycopg2-binary==2.9.9 # via sqlalchemy pycryptodome==3.21.0 # via stream-zip -pydantic==2.10.2 +pydantic==2.11.7 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -563,11 +573,12 @@ pydantic==2.10.2 # -r requirements/../../../packages/settings-library/requirements/_base.in # fast-depends # fastapi + # fastapi-cloud-cli # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.1 +pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.9.0 +pydantic-extra-types==2.10.5 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -586,7 +597,7 @@ pydantic-extra-types==2.9.0 # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in -pydantic-settings==2.6.1 +pydantic-settings==2.7.0 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -741,12 +752,12 @@ referencing==0.29.3 # jsonschema-specifications regex==2023.12.25 # via dateparser -requests==2.32.2 +requests==2.32.4 # via # httmock # opentelemetry-exporter-otlp-proto-http # prometheus-api-client -rich==13.7.1 +rich==14.1.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -754,16 +765,20 @@ rich==13.7.1 # -r requirements/../../../packages/settings-library/requirements/_base.in # rich-toolkit # typer -rich-toolkit==0.14.7 - # via fastapi-cli +rich-toolkit==0.15.0 + # via + # fastapi-cli + # fastapi-cloud-cli +rignore==0.6.4 + # via fastapi-cloud-cli rpds-py==0.18.0 # via # jsonschema # referencing s3transfer==0.11.3 # via boto3 -setuptools==74.0.0 - # via opentelemetry-instrumentation +sentry-sdk==2.35.0 + # via fastapi-cloud-cli sh==2.0.6 # via -r requirements/../../../packages/aws-library/requirements/_base.in shellingham==1.5.4 @@ -775,6 +790,7 @@ six==1.16.0 sniffio==1.3.1 # via # anyio + # asgi-lifespan # httpx sqlalchemy==1.4.52 # via @@ -806,7 +822,7 @@ sqlalchemy==1.4.52 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/postgres-database/requirements/_base.in # alembic -starlette==0.41.2 +starlette==0.47.2 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -851,7 +867,7 @@ tqdm==4.66.2 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.12.3 +typer==0.16.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -859,6 +875,7 @@ typer==0.12.3 # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in # fastapi-cli + # fastapi-cloud-cli types-aiobotocore==2.19.0 # via -r requirements/../../../packages/aws-library/requirements/_base.in types-aiobotocore-ec2==2.19.0 @@ -871,27 +888,36 @@ types-awscrt==0.20.5 # via botocore-stubs types-python-dateutil==2.9.0.20240316 # via arrow -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # aiodebug # aiodocker # alembic # fastapi # faststream + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http # opentelemetry-sdk + # opentelemetry-semantic-conventions # pydantic # pydantic-core + # pydantic-extra-types # rich-toolkit + # starlette # typer # types-aiobotocore # types-aiobotocore-ec2 # types-aiobotocore-s3 # types-aiobotocore-ssm + # typing-inspection +typing-inspection==0.4.1 + # via pydantic tzdata==2024.1 # via pandas tzlocal==5.2 # via dateparser -urllib3==2.2.3 +urllib3==2.5.0 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -921,10 +947,12 @@ urllib3==2.2.3 # -c requirements/../../../requirements/constraints.txt # botocore # requests + # sentry-sdk uvicorn==0.34.2 # via # fastapi # fastapi-cli + # fastapi-cloud-cli uvloop==0.21.0 # via uvicorn watchfiles==0.21.0 @@ -934,9 +962,9 @@ websockets==12.0 wrapt==1.16.0 # via # aiobotocore - # deprecated # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis yarl==1.20.0 # via diff --git a/services/resource-usage-tracker/requirements/_test.txt b/services/resource-usage-tracker/requirements/_test.txt index 9c229a95f2d0..f9592b1fa886 100644 --- a/services/resource-usage-tracker/requirements/_test.txt +++ b/services/resource-usage-tracker/requirements/_test.txt @@ -13,7 +13,9 @@ anyio==4.3.0 # -c requirements/_base.txt # httpx asgi-lifespan==2.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in attrs==23.2.0 # via # -c requirements/_base.txt @@ -52,7 +54,7 @@ charset-normalizer==3.3.2 # via # -c requirements/_base.txt # requests -click==8.1.7 +click==8.2.1 # via # -c requirements/_base.txt # flask @@ -77,7 +79,7 @@ flask==3.1.0 # via # flask-cors # moto -flask-cors==5.0.1 +flask-cors==6.0.1 # via moto graphql-core==3.2.6 # via moto @@ -85,11 +87,11 @@ greenlet==3.0.3 # via # -c requirements/_base.txt # sqlalchemy -h11==0.14.0 +h11==0.16.0 # via # -c requirements/_base.txt # httpcore -httpcore==1.0.4 +httpcore==1.0.9 # via # -c requirements/_base.txt # httpx @@ -145,7 +147,7 @@ lazy-object-proxy==1.10.0 # via openapi-spec-validator lupa==2.4 # via fakeredis -mako==1.3.2 +mako==1.3.10 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -160,9 +162,9 @@ moto==5.1.4 # via -r requirements/_test.in mpmath==1.3.0 # via sympy -mypy==1.15.0 +mypy==1.16.1 # via sqlalchemy -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via mypy networkx==3.4.2 # via cfn-lint @@ -177,39 +179,47 @@ packaging==24.0 # pytest-sugar pathable==0.4.4 # via jsonschema-path +pathspec==0.12.1 + # via mypy pluggy==1.5.0 - # via pytest + # via + # pytest + # pytest-cov ply==3.11 # via jsonpath-ng py-partiql-parser==0.6.1 # via moto pycparser==2.22 # via cffi -pydantic==2.10.2 +pydantic==2.11.7 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator -pydantic-core==2.27.1 +pydantic-core==2.33.2 # via # -c requirements/_base.txt # pydantic +pygments==2.17.2 + # via + # -c requirements/_base.txt + # pytest pyparsing==3.1.2 # via # -c requirements/_base.txt # moto -pytest==8.3.5 +pytest==8.4.1 # via # -r requirements/_test.in # pytest-asyncio # pytest-cov # pytest-mock # pytest-sugar -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via -r requirements/_test.in -pytest-cov==6.0.0 +pytest-cov==6.2.1 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in @@ -248,7 +258,7 @@ regex==2023.12.25 # via # -c requirements/_base.txt # cfn-lint -requests==2.32.2 +requests==2.32.4 # via # -c requirements/_base.txt # docker @@ -273,10 +283,8 @@ s3transfer==0.11.3 # via # -c requirements/_base.txt # boto3 -setuptools==74.0.0 - # via - # -c requirements/_base.txt - # moto +setuptools==80.9.0 + # via moto six==1.16.0 # via # -c requirements/_base.txt @@ -304,7 +312,7 @@ termcolor==2.5.0 # via pytest-sugar types-requests==2.32.0.20250301 # via -r requirements/_test.in -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # alembic @@ -314,11 +322,16 @@ typing-extensions==4.12.2 # pydantic # pydantic-core # sqlalchemy2-stubs + # typing-inspection +typing-inspection==0.4.1 + # via + # -c requirements/_base.txt + # pydantic tzdata==2024.1 # via # -c requirements/_base.txt # faker -urllib3==2.2.3 +urllib3==2.5.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt diff --git a/services/resource-usage-tracker/requirements/_tools.txt b/services/resource-usage-tracker/requirements/_tools.txt index 4ae88566afd6..43afcef71007 100644 --- a/services/resource-usage-tracker/requirements/_tools.txt +++ b/services/resource-usage-tracker/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.7 +click==8.2.1 # via # -c requirements/_base.txt # -c requirements/_test.txt @@ -28,11 +28,11 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via # -c requirements/_test.txt # black @@ -46,7 +46,10 @@ packaging==24.0 # black # build pathspec==0.12.1 - # via black + # via + # -c requirements/_test.txt + # black + # mypy pip==25.0.1 # via pip-tools pip-tools==7.4.1 @@ -73,14 +76,13 @@ pyyaml==6.0.1 # watchdog ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==74.0.0 +setuptools==80.9.0 # via - # -c requirements/_base.txt # -c requirements/_test.txt # pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # -c requirements/_test.txt diff --git a/services/resource-usage-tracker/setup.cfg b/services/resource-usage-tracker/setup.cfg index 1dbddd94448e..361d4cd4c064 100644 --- a/services/resource-usage-tracker/setup.cfg +++ b/services/resource-usage-tracker/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.0.0 +current_version = 1.0.1 commit = True message = services/resource-usage-tracker version: {current_version} → {new_version} tag = False @@ -10,9 +10,10 @@ commit_args = --no-verify [tool:pytest] asyncio_mode = auto asyncio_default_fixture_loop_scope = function -markers = +markers = testit: "marks test to run during development" + [mypy] -plugins = +plugins = pydantic.mypy sqlalchemy.ext.mypy.plugin diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/_meta.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/_meta.py index d433237ea2ad..4c16257ba545 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/_meta.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/_meta.py @@ -1,6 +1,4 @@ -""" Application's metadata - -""" +"""Application's metadata""" from typing import Final @@ -21,7 +19,7 @@ info.api_prefix_path_tag ) SUMMARY: Final[str] = info.get_summary() -APP_NAME: Final[str] = PROJECT_NAME +APP_NAME: Final[str] = info.app_name # NOTE: https://texteditor.com/ascii-frames/ APP_STARTED_BANNER_MSG = r""" diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/application.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/application.py index 2aacbfb49902..fb3bdf2d1e84 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/application.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/application.py @@ -35,8 +35,6 @@ def create_app(settings: ApplicationSettings) -> FastAPI: - _logger.info("app settings: %s", settings.model_dump_json(indent=1)) - app = FastAPI( debug=settings.RESOURCE_USAGE_TRACKER_DEBUG, title=f"{PROJECT_NAME} web API", diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/settings.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/settings.py index 3a534b692dc9..18f5dd6795a5 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/settings.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/core/settings.py @@ -1,9 +1,11 @@ import datetime from functools import cached_property +from typing import cast +from common_library.logging.logging_utils_filtering import LoggerName, MessageSubstring from models_library.basic_types import BootModeEnum from pydantic import AliasChoices, Field, PositiveInt, field_validator -from servicelib.logging_utils_filtering import LoggerName, MessageSubstring +from servicelib.logging_utils import LogLevelInt from settings_library.base import BaseCustomSettings from settings_library.basic_types import BuildTargetEnum, LogLevel, VersionTag from settings_library.postgres import PostgresSettings @@ -76,8 +78,8 @@ class _BaseApplicationSettings(BaseCustomSettings, MixinLoggingSettings): ) @cached_property - def LOG_LEVEL(self) -> LogLevel: # noqa: N802 - return self.RESOURCE_USAGE_TRACKER_LOGLEVEL + def log_level(self) -> LogLevelInt: + return cast(LogLevelInt, self.RESOURCE_USAGE_TRACKER_LOGLEVEL) @field_validator("RESOURCE_USAGE_TRACKER_LOGLEVEL", mode="before") @classmethod diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/exceptions/handlers/_http_error.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/exceptions/handlers/_http_error.py index 3ab692a70dce..d660259577e8 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/exceptions/handlers/_http_error.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/exceptions/handlers/_http_error.py @@ -1,10 +1,9 @@ import logging -from collections.abc import Callable -from typing import Awaitable +from collections.abc import Awaitable, Callable +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from fastapi import HTTPException, status from fastapi.encoders import jsonable_encoder -from servicelib.logging_errors import create_troubleshotting_log_kwargs from servicelib.status_codes_utils import is_5xx_server_error from starlette.requests import Request from starlette.responses import JSONResponse @@ -19,7 +18,7 @@ async def http_error_handler(request: Request, exc: Exception) -> JSONResponse: if is_5xx_server_error(exc.status_code): _logger.exception( - **create_troubleshotting_log_kwargs( + **create_troubleshooting_log_kwargs( "Unexpected error happened in the Resource Usage Tracker. Please contact support.", error=exc, error_context={ diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/main.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/main.py index 079ba5cdf799..42d3184f155f 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/main.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/main.py @@ -1,24 +1,39 @@ -"""Main application to be deployed by uvicorn (or equivalent) server - -""" +"""Main application to be deployed by uvicorn (or equivalent) server""" import logging +from typing import Final +from common_library.json_serialization import json_dumps from fastapi import FastAPI -from servicelib.logging_utils import config_all_loggers +from servicelib.fastapi.logging_lifespan import create_logging_shutdown_event from simcore_service_resource_usage_tracker.core.application import create_app from simcore_service_resource_usage_tracker.core.settings import ApplicationSettings -the_settings = ApplicationSettings.create_from_envs() +_logger = logging.getLogger(__name__) -# SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 -logging.basicConfig(level=the_settings.log_level) # NOSONAR -logging.root.setLevel(the_settings.log_level) -config_all_loggers( - log_format_local_dev_enabled=the_settings.RESOURCE_USAGE_TRACKER_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=the_settings.RESOURCE_USAGE_TRACKER_LOG_FILTER_MAPPING, - tracing_settings=the_settings.RESOURCE_USAGE_TRACKER_TRACING, +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aiobotocore", + "aio_pika", + "aiormq", + "botocore", + "werkzeug", ) -# SINGLETON FastAPI app -the_app: FastAPI = create_app(the_settings) + +def app_factory() -> FastAPI: + app_settings = ApplicationSettings.create_from_envs() + logging_shutdown_event = create_logging_shutdown_event( + log_format_local_dev_enabled=app_settings.RESOURCE_USAGE_TRACKER_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.RESOURCE_USAGE_TRACKER_LOG_FILTER_MAPPING, + tracing_settings=app_settings.RESOURCE_USAGE_TRACKER_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) + + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + app = create_app(settings=app_settings) + app.add_event_handler("shutdown", logging_shutdown_event) + return app diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/background_task_periodic_heartbeat_check.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/background_task_periodic_heartbeat_check.py index 4f61fda98795..4a9a30e75130 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/background_task_periodic_heartbeat_check.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/background_task_periodic_heartbeat_check.py @@ -164,8 +164,10 @@ async def check_running_services(app: FastAPI) -> None: base_start_timestamp = datetime.now(tz=UTC) # Get all current running services (across all products) - total_count: PositiveInt = await service_runs_db.total_service_runs_with_running_status_across_all_products( - _db_engine + total_count: PositiveInt = ( + await service_runs_db.total_service_runs_with_running_status_across_all_products( + _db_engine + ) ) for offset in range(0, total_count, _BATCH_SIZE): diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/background_task_periodic_heartbeat_check_setup.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/background_task_periodic_heartbeat_check_setup.py index abaefe1e9b7f..a747cd5d476d 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/background_task_periodic_heartbeat_check_setup.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/background_task_periodic_heartbeat_check_setup.py @@ -3,8 +3,8 @@ from collections.abc import Awaitable, Callable from typing import TypedDict +from common_library.async_tools import cancel_wait_task from fastapi import FastAPI -from servicelib.async_utils import cancel_wait_task from servicelib.background_task_utils import exclusive_periodic from servicelib.logging_utils import log_catch, log_context diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/fire_and_forget_setup.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/fire_and_forget_setup.py index 2523a069974d..a1e7db5ac306 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/fire_and_forget_setup.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/fire_and_forget_setup.py @@ -1,8 +1,8 @@ import logging from collections.abc import Awaitable, Callable +from common_library.async_tools import cancel_wait_task from fastapi import FastAPI -from servicelib.async_utils import cancel_wait_task from servicelib.logging_utils import log_catch, log_context _logger = logging.getLogger(__name__) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/__init__.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/__init__.py index 1ccd94f436e6..243ca6ad4bf1 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/__init__.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/__init__.py @@ -4,6 +4,8 @@ from servicelib.fastapi.db_asyncpg_engine import close_db_connection, connect_to_db from servicelib.logging_utils import log_context +from ...._meta import APP_NAME + _logger = logging.getLogger(__name__) @@ -14,7 +16,11 @@ async def on_startup() -> None: logging.INFO, msg="RUT startup DB", ): - await connect_to_db(app, app.state.settings.RESOURCE_USAGE_TRACKER_POSTGRES) + await connect_to_db( + app, + app.state.settings.RESOURCE_USAGE_TRACKER_POSTGRES, + application_name=APP_NAME, + ) async def on_shutdown() -> None: with log_context( diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/credit_transactions_db.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/credit_transactions_db.py index b9b5f3569c5c..5f0010a54c90 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/credit_transactions_db.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/db/credit_transactions_db.py @@ -187,27 +187,31 @@ async def sum_wallet_credits( *, product_name: ProductName, wallet_id: WalletID, + include_pending_transactions: bool = True, ) -> WalletTotalCredits: async with transaction_context(engine, connection) as conn: + statuses = [ + CreditTransactionStatus.BILLED, + CreditTransactionStatus.IN_DEBT, + ] + if include_pending_transactions: + statuses.append(CreditTransactionStatus.PENDING) sum_stmt = sa.select( sa.func.sum(resource_tracker_credit_transactions.c.osparc_credits) ).where( (resource_tracker_credit_transactions.c.product_name == product_name) & (resource_tracker_credit_transactions.c.wallet_id == wallet_id) - & ( - resource_tracker_credit_transactions.c.transaction_status.in_( - [ - CreditTransactionStatus.BILLED, - CreditTransactionStatus.PENDING, - CreditTransactionStatus.IN_DEBT, - ] - ) - ) + & (resource_tracker_credit_transactions.c.transaction_status.in_(statuses)) ) result = await conn.execute(sum_stmt) row = result.first() if row is None or row[0] is None: + _logger.warning( + "No credits found for wallet %s with product %s", + wallet_id, + product_name, + ) return WalletTotalCredits( wallet_id=wallet_id, available_osparc_credits=Decimal(0) ) diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/redis.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/redis.py index e2790b2a4e94..84e0df512e54 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/redis.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/redis.py @@ -24,6 +24,7 @@ async def on_startup() -> None: app.state.redis_client_sdk = RedisClientSDK( redis_locks_dsn, client_name=APP_NAME ) + await app.state.redis_client_sdk.setup() async def on_shutdown() -> None: with log_context( diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/process_message_running_service.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/process_message_running_service.py index b1d82d825b89..aa960f4d586c 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/process_message_running_service.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/process_message_running_service.py @@ -295,17 +295,24 @@ async def _process_stop_event( msg.created_at, running_service.pricing_unit_cost, ) - - wallet_total_credits = await credit_transactions_db.sum_wallet_credits( - db_engine, - product_name=running_service.product_name, - wallet_id=running_service.wallet_id, + wallet_total_credits_without_pending_transactions = ( + # NOTE: Include_pending_transactions=False will ensure that we do not count the current running transactions. + # This is important because we are closing the transaction now and we do not want to count it again. + await credit_transactions_db.sum_wallet_credits( + db_engine, + product_name=running_service.product_name, + wallet_id=running_service.wallet_id, + include_pending_transactions=False, + ) ) _transaction_status = ( CreditTransactionStatus.BILLED - if wallet_total_credits.available_osparc_credits - computed_credits >= 0 + if wallet_total_credits_without_pending_transactions.available_osparc_credits + - computed_credits + >= 0 else CreditTransactionStatus.IN_DEBT ) + # Adjust the status if the platform status is not OK if msg.simcore_platform_status != SimcorePlatformStatus.OK: _transaction_status = CreditTransactionStatus.NOT_BILLED diff --git a/services/resource-usage-tracker/tests/unit/conftest.py b/services/resource-usage-tracker/tests/unit/conftest.py index fa1b857904ce..7269ffae0096 100644 --- a/services/resource-usage-tracker/tests/unit/conftest.py +++ b/services/resource-usage-tracker/tests/unit/conftest.py @@ -27,6 +27,7 @@ from simcore_service_resource_usage_tracker.core.settings import ApplicationSettings pytest_plugins = [ + "pytest_simcore.asyncio_event_loops", "pytest_simcore.cli_runner", "pytest_simcore.docker_compose", "pytest_simcore.docker_registry", @@ -34,6 +35,7 @@ "pytest_simcore.environment_configs", "pytest_simcore.faker_projects_data", "pytest_simcore.faker_products_data", + "pytest_simcore.logging", "pytest_simcore.postgres_service", "pytest_simcore.pydantic_models", "pytest_simcore.pytest_global_environs", diff --git a/services/resource-usage-tracker/tests/unit/test_core_settings.py b/services/resource-usage-tracker/tests/unit/test_core_settings.py index d338859a5504..4569bcae7fac 100644 --- a/services/resource-usage-tracker/tests/unit/test_core_settings.py +++ b/services/resource-usage-tracker/tests/unit/test_core_settings.py @@ -2,7 +2,13 @@ # pylint: disable=unused-argument # pylint: disable=unused-variable # pylint: disable=too-many-arguments +""" +We can validate actual .env files (also refered as `repo.config` files) by passing them via the CLI +$ ln -s /path/to/osparc-config/deployments/mydeploy.com/repo.config .secrets +$ pytest --external-envfile=.secrets --pdb tests/unit/test_core_settings.py + +""" from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_service_resource_usage_tracker.core.settings import ( @@ -18,7 +24,7 @@ def test_valid_cli_application_settings(app_environment: EnvVarsDict): assert settings.RESOURCE_USAGE_TRACKER_POSTGRES assert settings.RESOURCE_USAGE_TRACKER_REDIS assert settings.RESOURCE_USAGE_TRACKER_RABBITMQ - assert settings.LOG_LEVEL + assert settings.RESOURCE_USAGE_TRACKER_LOGLEVEL def test_valid_web_application_settings(app_environment: EnvVarsDict): @@ -28,4 +34,4 @@ def test_valid_web_application_settings(app_environment: EnvVarsDict): assert settings.RESOURCE_USAGE_TRACKER_POSTGRES assert settings.RESOURCE_USAGE_TRACKER_REDIS assert settings.RESOURCE_USAGE_TRACKER_RABBITMQ - assert settings.LOG_LEVEL + assert settings.RESOURCE_USAGE_TRACKER_LOGLEVEL diff --git a/services/resource-usage-tracker/tests/unit/test_main.py b/services/resource-usage-tracker/tests/unit/test_main.py index 6d9addd8ee2e..7fe4c95cbc2b 100644 --- a/services/resource-usage-tracker/tests/unit/test_main.py +++ b/services/resource-usage-tracker/tests/unit/test_main.py @@ -7,6 +7,6 @@ def test_main_app(app_environment: EnvVarsDict): - from simcore_service_resource_usage_tracker.main import the_app, the_settings + from simcore_service_resource_usage_tracker.main import app_factory - assert the_app.state.settings == the_settings + app_factory() diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_credit_transactions.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_credit_transactions.py index baf61f3b7fca..99ced436bd6f 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_api_credit_transactions.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_api_credit_transactions.py @@ -1,12 +1,12 @@ -from collections.abc import Iterator +from collections.abc import Callable, Iterator from datetime import UTC, datetime, timedelta from decimal import Decimal -from typing import Callable import httpx import pytest import sqlalchemy as sa from faker import Faker +from fastapi import FastAPI from models_library.api_schemas_resource_usage_tracker.credit_transactions import ( CreditTransactionCreateBody, WalletTotalCredits, @@ -32,6 +32,9 @@ from simcore_postgres_database.models.resource_tracker_service_runs import ( resource_tracker_service_runs, ) +from simcore_service_resource_usage_tracker.services.modules.db import ( + credit_transactions_db, +) from simcore_service_resource_usage_tracker.services.service_runs import ServiceRunPage from starlette import status from yarl import URL @@ -520,3 +523,32 @@ async def test_list_service_runs_with_transaction_status_filter( assert isinstance(result, ServiceRunPage) assert len(result.items) == 1 assert result.total == 1 + + +async def test_sum_wallet_credits_db( + mocked_redis_server: None, + resource_tracker_setup_db: None, + rpc_client: RabbitMQRPCClient, + product_name: ProductName, + initialized_app: FastAPI, +): + engine = initialized_app.state.engine + output_including_pending_transaction = ( + await credit_transactions_db.sum_wallet_credits( + engine, product_name=product_name, wallet_id=_WALLET_ID + ) + ) + assert output_including_pending_transaction.available_osparc_credits == Decimal( + "-310.00" + ) + output_excluding_pending_transaction = ( + await credit_transactions_db.sum_wallet_credits( + engine, + product_name=product_name, + wallet_id=_WALLET_ID, + include_pending_transactions=False, + ) + ) + assert output_excluding_pending_transaction.available_osparc_credits == Decimal( + "-240.00" + ) diff --git a/services/resource-usage-tracker/tests/unit/with_dbs/test_background_task_periodic_heartbeat_check.py b/services/resource-usage-tracker/tests/unit/with_dbs/test_background_task_periodic_heartbeat_check.py index 42249956a8e3..8fa4991d86c4 100644 --- a/services/resource-usage-tracker/tests/unit/with_dbs/test_background_task_periodic_heartbeat_check.py +++ b/services/resource-usage-tracker/tests/unit/with_dbs/test_background_task_periodic_heartbeat_check.py @@ -1,3 +1,4 @@ +import asyncio from collections.abc import Callable, Iterator from datetime import UTC, datetime, timedelta @@ -120,6 +121,9 @@ def resource_tracker_setup_db( con.execute(resource_tracker_service_runs.delete()) +_PROD_RUN_INTERVAL_SEC = 1 # in reality in production this is 5 mins + + async def test_process_event_functions( create_rabbitmq_client: Callable[[str], RabbitMQClient], mocked_redis_server: None, @@ -132,6 +136,7 @@ async def test_process_event_functions( for _ in range(app_settings.RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_COUNTER_FAIL): await check_running_services(initialized_app) + await asyncio.sleep(_PROD_RUN_INTERVAL_SEC) # NOTE: As we are doing check that the modified field needs to be older then some # threshold, we need to make this field artificaly older in this test with postgres_db.connect() as con: diff --git a/services/static-webserver/client/Makefile b/services/static-webserver/client/Makefile index 89fafcca1ae0..cb7293594589 100644 --- a/services/static-webserver/client/Makefile +++ b/services/static-webserver/client/Makefile @@ -31,10 +31,10 @@ compile-dev: qx_packages source-output ## qx compiles host' 'source' -> host's ' # qx compile 'source' $(flags) --> 'source-output' [itisfoundation/qooxdoo-kit:${QOOXDOO_KIT_TAG}] $(docker_compose) run --detach --rm --name=$(container_name) qooxdoo-kit \ qx compile $(flags) \ + --set-env osparc.vcsOriginUrl="${VCS_URL}" \ --set-env osparc.vcsRef="${VCS_REF}" \ --set-env osparc.vcsRefClient="${VCS_REF_CLIENT}" \ - --set-env osparc.vcsStatusClient="${VCS_STATUS_CLIENT}" \ - --set-env osparc.vcsOriginUrl="${VCS_URL}" + --set-env osparc.vcsStatusClient="${VCS_STATUS_CLIENT}" .PHONY: follow-dev-logs follow-dev-logs: ## follow the logs of the qx compiler @@ -44,8 +44,10 @@ follow-dev-logs: ## follow the logs of the qx compiler .PHONY: compile touch upgrade compile: ## qx compiles host' 'source' -> image's 'build-output' # qx compile 'source' within $(docker_image) image [itisfoundation/qooxdoo-kit:${QOOXDOO_KIT_TAG}] - @docker buildx build --file $(docker_file) --tag $(docker_image) \ + @docker buildx build \ --load \ + --file $(docker_file) \ + --tag $(docker_image) \ --build-arg tag=${QOOXDOO_KIT_TAG} \ --build-arg VCS_REF=${VCS_REF} \ --build-arg VCS_REF_CLIENT=${VCS_REF_CLIENT} \ @@ -56,7 +58,13 @@ compile: ## qx compiles host' 'source' -> image's 'build-output' touch: ## minimal image build with /project/output-build inside # touch /project/output-build such that multi-stage 'services/web/Dockerfile' can build development target (fixes #1097) - @docker buildx build --load --file $(docker_file) --tag $(docker_image) --build-arg tag=${QOOXDOO_KIT_TAG} --target=touch . + @docker buildx build \ + --load \ + --file $(docker_file) \ + --tag $(docker_image) \ + --build-arg tag=${QOOXDOO_KIT_TAG} \ + --target=touch \ + . upgrade: ## upgrade to official version of the tool # upgrading to ${QOOXDOO_KIT_TAG} diff --git a/services/static-webserver/client/Manifest.json b/services/static-webserver/client/Manifest.json index de9e0909368d..743a52c5c2fb 100644 --- a/services/static-webserver/client/Manifest.json +++ b/services/static-webserver/client/Manifest.json @@ -27,7 +27,7 @@ "osparc/schedulerWorker.js", "svg/svg.js", "svg/svg.path.js", - "jsondiffpatch/jsondiffpatch.min.js", + "jsondiffpatch/jsondiffpatch-0.7.3.min.js", "jsontreeviewer/jsonTree.js", "marked/marked.min.js", "DOMPurify/purify.min.js" @@ -35,6 +35,7 @@ "css": [ "jsontreeviewer/jsonTree.css", "hint/hint.css", + "marked/markdown.css", "common/common.css" ] }, diff --git a/services/static-webserver/client/compile.json b/services/static-webserver/client/compile.json index 7ede67de7479..528e75c3fc38 100644 --- a/services/static-webserver/client/compile.json +++ b/services/static-webserver/client/compile.json @@ -136,7 +136,7 @@ "class": "osparc.Application", "theme": "osparc.theme.products.tis.ThemeDark", "name": "tis", - "title": "TIP V3.0 - IT'IS", + "title": "TIP V4.0 - IT'IS", "include": [ "iconfont.material.Load", "iconfont.fontawesome5.Load", diff --git a/services/static-webserver/client/source/class/osparc/About.js b/services/static-webserver/client/source/class/osparc/About.js index 2b1781dc23fb..b232d64ebbb1 100644 --- a/services/static-webserver/client/source/class/osparc/About.js +++ b/services/static-webserver/client/source/class/osparc/About.js @@ -63,7 +63,7 @@ qx.Class.define("osparc.About", { wrap: true }); this.add(poweredByLabel); - const displayName = osparc.store.StaticInfo.getInstance().getDisplayName(); + const displayName = osparc.store.StaticInfo.getDisplayName(); const poweredText = ` is powered by the ${osparc.About.OSPARC_OFFICIAL} platform.`; poweredByLabel.setValue(displayName + poweredText); diff --git a/services/static-webserver/client/source/class/osparc/Application.js b/services/static-webserver/client/source/class/osparc/Application.js index fc167e79592c..d4ce8442efc2 100644 --- a/services/static-webserver/client/source/class/osparc/Application.js +++ b/services/static-webserver/client/source/class/osparc/Application.js @@ -135,7 +135,8 @@ qx.Class.define("osparc.Application", { osparc.auth.Manager.getInstance().validateToken() .then(() => { const studyId = urlFragment.nav[1]; - this.__loadMainPage(studyId); + const loadAfterLogin = { studyId }; + this.__loadMainPage(loadAfterLogin); }) .catch(() => this.__loadLoginPage()); } @@ -153,12 +154,27 @@ qx.Class.define("osparc.Application", { if (["anonymous", "guest"].includes(data.role.toLowerCase())) { this.__loadNodeViewerPage(studyId, viewerNodeId); } else { - this.__loadMainPage(studyId); + const loadAfterLogin = { studyId }; + this.__loadMainPage(loadAfterLogin); } }); } break; } + case "conversation": { + // Route: /#/conversation/{id} + if (urlFragment.nav.length > 1) { + osparc.utils.Utils.cookie.deleteCookie("user"); + osparc.auth.Manager.getInstance().validateToken() + .then(() => { + const conversationId = urlFragment.nav[1]; + const loadAfterLogin = { conversationId }; + this.__loadMainPage(loadAfterLogin); + }) + .catch(() => this.__loadLoginPage()); + } + break; + } case "registration": { // Route: /#/registration/?invitation={token} if (urlFragment.params && urlFragment.params.invitation) { @@ -201,9 +217,9 @@ qx.Class.define("osparc.Application", { } break; } - case "form-sandbox": { + case "form-sandbox": this.__loadView(new osparc.desktop.FormSandboxPage(), {}, false); - } + break; } }, @@ -361,7 +377,7 @@ qx.Class.define("osparc.Application", { // first, pop up new release window this.__checkNewRelease(); - const platformName = osparc.store.StaticInfo.getInstance().getPlatformName(); + const platformName = osparc.store.StaticInfo.getPlatformName(); if (platformName !== "master") { // then, pop up cookies accepted window. It will go on top. this.__checkCookiesAccepted(); @@ -450,7 +466,7 @@ qx.Class.define("osparc.Application", { view.addListener("done", () => this.__restart(), this); }, - __loadMainPage: function(studyId = null) { + __loadMainPage: function(loadAfterLogin = null) { // logged in osparc.WindowSizeTracker.getInstance().evaluateTooSmallDialog(); osparc.data.Resources.getOne("profile") @@ -497,28 +513,57 @@ qx.Class.define("osparc.Application", { }); } - if (studyId) { + if (loadAfterLogin && loadAfterLogin["studyId"]) { + const studyId = loadAfterLogin["studyId"]; osparc.store.Store.getInstance().setCurrentStudyId(studyId); } - let mainPage = null; - if (osparc.product.Utils.getProductName().includes("s4ldesktop")) { - mainPage = new osparc.desktop.MainPageDesktop(); + if (loadAfterLogin && loadAfterLogin["conversationId"]) { + const conversationId = loadAfterLogin["conversationId"]; + const supportCenterWindow = osparc.support.SupportCenter.openWindow(); + supportCenterWindow.openConversation(conversationId); + } + + const loadViewerPage = () => { + const mainPage = new osparc.desktop.MainPage(); + this.__mainPage = mainPage; + this.__loadView(mainPage); + }; + const wsInstance = osparc.wrapper.WebSocket.getInstance(); + if (wsInstance.isAppConnected()) { + loadViewerPage(); } else { - mainPage = new osparc.desktop.MainPage(); + const listenerId = wsInstance.addListener("changeAppConnected", function(e) { + if (e.getData()) { + wsInstance.removeListenerById(listenerId); + loadViewerPage(); + } + }, this); } - this.__mainPage = mainPage; - this.__loadView(mainPage); } }) .catch(err => console.error(err)); }, - __loadNodeViewerPage: async function(studyId, viewerNodeId) { + __loadNodeViewerPage: function(studyId, viewerNodeId) { this.__connectWebSocket(); - const mainPage = new osparc.viewer.MainPage(studyId, viewerNodeId); - this.__mainPage = mainPage; - this.__loadView(mainPage); + + const loadNodeViewerPage = () => { + const mainPage = new osparc.viewer.MainPage(studyId, viewerNodeId); + this.__mainPage = mainPage; + this.__loadView(mainPage); + }; + const wsInstance = osparc.wrapper.WebSocket.getInstance(); + if (wsInstance.isAppConnected()) { + loadNodeViewerPage(); + } else { + const listenerId = wsInstance.addListener("changeAppConnected", e => { + if (e.getData()) { + wsInstance.removeListenerById(listenerId); + loadNodeViewerPage(); + } + }, this); + } }, __loadView: function(view, opts, clearUrl=true) { diff --git a/services/static-webserver/client/source/class/osparc/CookieExpirationTracker.js b/services/static-webserver/client/source/class/osparc/CookieExpirationTracker.js index f3584c4c39d4..2a627d74536f 100644 --- a/services/static-webserver/client/source/class/osparc/CookieExpirationTracker.js +++ b/services/static-webserver/client/source/class/osparc/CookieExpirationTracker.js @@ -39,7 +39,7 @@ qx.Class.define("osparc.CookieExpirationTracker", { __messageInterval: null, startTracker: function() { - const cookieMaxAge = osparc.store.StaticInfo.getInstance().getCookieMaxAge(); // seconds + const cookieMaxAge = osparc.store.StaticInfo.getCookieMaxAge(); // seconds if (cookieMaxAge) { const nowDate = new Date(); const expirationDateMilliseconds = nowDate.getTime() + cookieMaxAge*1000; diff --git a/services/static-webserver/client/source/class/osparc/ErrorPage.js b/services/static-webserver/client/source/class/osparc/ErrorPage.js index 126990836662..952cf6824a1b 100644 --- a/services/static-webserver/client/source/class/osparc/ErrorPage.js +++ b/services/static-webserver/client/source/class/osparc/ErrorPage.js @@ -211,7 +211,7 @@ qx.Class.define("osparc.ErrorPage", { }, __supportEmail: function() { - const supportEmail = osparc.store.VendorInfo.getInstance().getSupportEmail(); + const supportEmail = osparc.store.VendorInfo.getSupportEmail(); const giveEmailFeedbackWindow = new osparc.ui.window.Dialog("Support", null, qx.locale.Manager.tr("Please send us an email to:")); const mailto = osparc.store.Support.getMailToLabel(supportEmail, "Access error"); mailto.setTextAlign("center"); diff --git a/services/static-webserver/client/source/class/osparc/FlashMessenger.js b/services/static-webserver/client/source/class/osparc/FlashMessenger.js index 89610ea2da64..ed5185633054 100644 --- a/services/static-webserver/client/source/class/osparc/FlashMessenger.js +++ b/services/static-webserver/client/source/class/osparc/FlashMessenger.js @@ -86,10 +86,12 @@ qx.Class.define("osparc.FlashMessenger", { console.error(error); } const msg = this.extractMessage(error, defaultMessage); - const flashMessage = this.getInstance().logAs(msg, "ERROR", duration); + let flashMessage = null; if (error && error["supportId"]) { - flashMessage.addWidget(this.__createCopyOECWidget(msg, error["supportId"])); - flashMessage.setDuration(flashMessage.getDuration()*2); + flashMessage = new osparc.ui.message.FlashMessageOEC(msg, duration, error["supportId"]); + this.getInstance().addFlashMessage(flashMessage); + } else { + flashMessage = this.getInstance().logAs(msg, "ERROR", duration); } return flashMessage; }, @@ -105,13 +107,14 @@ qx.Class.define("osparc.FlashMessenger", { allowGrowX: false, }); errorLabel.addListener("tap", () => { + const currentStudy = osparc.store.Store.getInstance().getCurrentStudy(); const dataToClipboard = { message, supportId, timestamp: new Date().toString(), url: window.location.href, releaseTag: osparc.utils.Utils.getReleaseTag(), - studyId: osparc.store.Store.getInstance().getCurrentStudy() || "", + studyId: currentStudy ? currentStudy.getUuid() : "", } osparc.utils.Utils.copyTextToClipboard(osparc.utils.Utils.prettifyJson(dataToClipboard)); }); @@ -141,14 +144,15 @@ qx.Class.define("osparc.FlashMessenger", { log: function(logMessage) { const message = this.self().extractMessage(logMessage); - const level = logMessage.level.toUpperCase(); // "DEBUG", "INFO", "WARNING", "ERROR" - const flashMessage = new osparc.ui.message.FlashMessage(message, level, logMessage.duration); + this.addFlashMessage(flashMessage); + return flashMessage; + }, + + addFlashMessage: function(flashMessage) { flashMessage.addListener("closeMessage", () => this.removeMessage(flashMessage), this); this.__messages.push(flashMessage); - - return flashMessage; }, /** diff --git a/services/static-webserver/client/source/class/osparc/MaintenanceTracker.js b/services/static-webserver/client/source/class/osparc/MaintenanceTracker.js index 2823d768e141..ab545f468356 100644 --- a/services/static-webserver/client/source/class/osparc/MaintenanceTracker.js +++ b/services/static-webserver/client/source/class/osparc/MaintenanceTracker.js @@ -41,8 +41,25 @@ qx.Class.define("osparc.MaintenanceTracker", { statics: { CHECK_INTERVAL: 15*60*1000, // Check every 15' - CLOSABLE_WARN_IN_ADVANCE: 48*60*60*1000, // Show Closable Ribbon Message 48h in advance - PERMANENT_WARN_IN_ADVANCE: 60*60*1000 // Show Permanent Ribbon Message 60' in advance + CLOSABLE_WARN_IN_ADVANCE: 4*24*60*60*1000, // Show Closable Ribbon Message 4 days in advance + PERMANENT_WARN_IN_ADVANCE: 60*60*1000, // Show Permanent Ribbon Message 60' in advance + + dataToText: function(start, end, reason) { + let text = osparc.utils.Utils.formatDateAndTime(start); + if (end) { + if (start.getDate() === end.getDate()) { + // do not print the same day twice + text += " - " + osparc.utils.Utils.formatTime(end); + } else { + text += " - " + osparc.utils.Utils.formatDateAndTime(end); + } + } + text += " (local time)"; + if (reason) { + text += ": " + reason; + } + return text; + }, }, members: { @@ -78,20 +95,7 @@ qx.Class.define("osparc.MaintenanceTracker", { return null; } - let text = osparc.utils.Utils.formatDateAndTime(this.getStart()); - if (this.getEnd()) { - if (this.getStart().getDate() === this.getEnd().getDate()) { - // do not print the same day twice - text += " - " + osparc.utils.Utils.formatTime(this.getEnd()); - } else { - text += " - " + osparc.utils.Utils.formatDateAndTime(this.getEnd()); - } - } - text += " (local time)"; - if (this.getReason()) { - text += ": " + this.getReason(); - } - return text; + return this.self().dataToText(this.getStart(), this.getEnd(), this.getReason()); }, __setMaintenance: function(maintenanceData) { @@ -129,9 +133,9 @@ qx.Class.define("osparc.MaintenanceTracker", { } }, - __messageToRibbon: function(closable) { + messageToRibbon: function(closable, message = null) { this.__removeRibbonMessage(); - const text = this.__getText(); + const text = message || this.__getText(); const notification = new osparc.notification.RibbonNotification(text, "maintenance", closable); osparc.notification.RibbonNotifications.getInstance().addNotification(notification); this.__lastRibbonMessage = notification; @@ -143,14 +147,14 @@ qx.Class.define("osparc.MaintenanceTracker", { const diffPermanent = this.getStart().getTime() - now.getTime() - this.self().PERMANENT_WARN_IN_ADVANCE; if (diffClosable < 0) { - this.__messageToRibbon(true); + this.messageToRibbon(true); } else { - setTimeout(() => this.__messageToRibbon(true), diffClosable); + setTimeout(() => this.messageToRibbon(true), diffClosable); } if (diffPermanent < 0) { - this.__messageToRibbon(false); + this.messageToRibbon(false); } else { - setTimeout(() => this.__messageToRibbon(false), diffPermanent); + setTimeout(() => this.messageToRibbon(false), diffPermanent); } }, diff --git a/services/static-webserver/client/source/class/osparc/NewRelease.js b/services/static-webserver/client/source/class/osparc/NewRelease.js index 939c45092340..2a8ba04d53ec 100644 --- a/services/static-webserver/client/source/class/osparc/NewRelease.js +++ b/services/static-webserver/client/source/class/osparc/NewRelease.js @@ -73,7 +73,7 @@ qx.Class.define("osparc.NewRelease", { const releaseTag = osparc.utils.Utils.getReleaseTag(); const releaseLink = osparc.utils.Utils.getReleaseLink(); const linkLabel = new osparc.ui.basic.LinkLabel().set({ - value: this.tr("What's new in ") + releaseTag, + value: this.tr("What's New in ") + releaseTag, url: releaseLink, font: "link-label-14" }); diff --git a/services/static-webserver/client/source/class/osparc/NewUITracker.js b/services/static-webserver/client/source/class/osparc/NewUITracker.js index 3955b4ef2b51..bf4fe89b6dd5 100644 --- a/services/static-webserver/client/source/class/osparc/NewUITracker.js +++ b/services/static-webserver/client/source/class/osparc/NewUITracker.js @@ -34,18 +34,18 @@ qx.Class.define("osparc.NewUITracker", { let msg = ""; msg += qx.locale.Manager.tr("A new version of the application is now available."); msg += "
"; - msg += qx.locale.Manager.tr("Click the Reload button to get the latest features."); + msg += qx.locale.Manager.tr("Reload the page to get the latest features."); // permanent message - const flashMessage = osparc.FlashMessenger.logAs(msg, "INFO", 0).set({ + osparc.FlashMessenger.logAs(msg, "INFO", 0).set({ maxWidth: 500 }); - const reloadButton = osparc.utils.Utils.reloadNoCacheButton(); - flashMessage.addWidget(reloadButton); + // stop tracker in case it was running this.stopTracker(); } }) .catch(() => setTimeout(() => checkNewUI(), 5*1000)); }; + checkNewUI(); this.__checkInterval = setInterval(checkNewUI, this.self().CHECK_INTERVAL); }, diff --git a/services/static-webserver/client/source/class/osparc/Preferences.js b/services/static-webserver/client/source/class/osparc/Preferences.js index b2c6977853df..2be3b7b7c7bd 100644 --- a/services/static-webserver/client/source/class/osparc/Preferences.js +++ b/services/static-webserver/client/source/class/osparc/Preferences.js @@ -119,7 +119,7 @@ qx.Class.define("osparc.Preferences", { jobConcurrencyLimit: { check: "Number", nullable: false, - init: 4, + init: 1, event: "changeJobConcurrencyLimit", apply: "__patchPreference" }, @@ -210,7 +210,11 @@ qx.Class.define("osparc.Preferences", { .catch(err => osparc.FlashMessenger.logError(err)); }, - __patchPreference: function(value, _, propName) { + __patchPreference: function(value, old, propName) { + // only patch if the value changed + if (value === old) { + return; + } this.self().patchPreference(propName, value); } } diff --git a/services/static-webserver/client/source/class/osparc/WatchDog.js b/services/static-webserver/client/source/class/osparc/WatchDog.js index 61817320ce0a..c18aea7a1f0a 100644 --- a/services/static-webserver/client/source/class/osparc/WatchDog.js +++ b/services/static-webserver/client/source/class/osparc/WatchDog.js @@ -48,11 +48,7 @@ qx.Class.define("osparc.WatchDog", { // register for socket.io event to change the default heartbeat interval const socket = osparc.wrapper.WebSocket.getInstance(); - socket.removeSlot("set_heartbeat_emit_interval"); - socket.on("set_heartbeat_emit_interval", ({ interval }) => { - const newInterval = parseInt(interval) * 1000; - this.setHeartbeatInterval(newInterval); - }, this); + socket.bind("heartbeatInterval", this, "heartbeatInterval"); }, properties: { @@ -66,10 +62,14 @@ qx.Class.define("osparc.WatchDog", { heartbeatInterval: { check: "Number", - init: 2 * 1000, // in milliseconds - nullable: false, - apply: "_applyHeartbeatInterval" - } + init: null, + nullable: true, + apply: "__applyHeartbeatInterval" + }, + }, + + statics: { + DEFAULT_HEARTBEAT_INTERVAL: 2000, // default to 2 seconds }, members: { @@ -81,15 +81,24 @@ qx.Class.define("osparc.WatchDog", { logo.setOnline(value); } - if (value) { - this.__clientHeartbeatWWPinger.postMessage(["start", this.getHeartbeatInterval()]); - } else { - this.__clientHeartbeatWWPinger.postMessage(["stop"]); + value ? this.__startPinging() : this.__stopPinging(); + }, + + __applyHeartbeatInterval: function(value) { + if (value === null) { + return; } + + this.__startPinging(); + }, + + __startPinging: function() { + const heartbeatInterval = this.getHeartbeatInterval() || osparc.WatchDog.DEFAULT_HEARTBEAT_INTERVAL; + this.__clientHeartbeatWWPinger.postMessage(["start", heartbeatInterval]); }, - _applyHeartbeatInterval: function(value) { - this.__clientHeartbeatWWPinger.postMessage(["start", value]); + __stopPinging: function() { + this.__clientHeartbeatWWPinger.postMessage(["stop"]); }, __pingServer: function() { diff --git a/services/static-webserver/client/source/class/osparc/admin/Maintenance.js b/services/static-webserver/client/source/class/osparc/admin/Maintenance.js index 395757ff4030..77396fcfdc20 100644 --- a/services/static-webserver/client/source/class/osparc/admin/Maintenance.js +++ b/services/static-webserver/client/source/class/osparc/admin/Maintenance.js @@ -50,14 +50,17 @@ qx.Class.define("osparc.admin.Maintenance", { vBox.removeAll(); if (data) { + const displayMaintenanceBtn = new qx.ui.form.Button(this.tr("Test Maintenance message")).set({ + appearance: "strong-button", + allowGrowX: false, + }); + const message = osparc.MaintenanceTracker.dataToText(new Date(data["start"]), new Date(data["end"]), data["reason"]); + displayMaintenanceBtn.addListener("execute", () => osparc.MaintenanceTracker.getInstance().messageToRibbon(true), message); + vBox.add(displayMaintenanceBtn); + const respLabel = new qx.ui.basic.Label(this.tr("Start and End dates go in UTC time zone")); vBox.add(respLabel); - const displayMaintenanceBtn = new qx.ui.form.Button(this.tr("Display Maintenance message")); - // eslint-disable-next-line no-underscore-dangle - displayMaintenanceBtn.addListener("execute", () => osparc.MaintenanceTracker.getInstance().__messageToRibbon(true)); - vBox.add(displayMaintenanceBtn); - const invitationRespViewer = new osparc.ui.basic.JsonTreeWidget(data, "maintenance-data"); const container = new qx.ui.container.Scroll(); container.add(invitationRespViewer); diff --git a/services/static-webserver/client/source/class/osparc/auth/Data.js b/services/static-webserver/client/source/class/osparc/auth/Data.js index cda4da711d40..b42c4582b18c 100644 --- a/services/static-webserver/client/source/class/osparc/auth/Data.js +++ b/services/static-webserver/client/source/class/osparc/auth/Data.js @@ -75,11 +75,11 @@ qx.Class.define("osparc.auth.Data", { check: "Number" }, - username: { + userName: { check: "String", init: null, nullable: false, - event: "changeUsername", + event: "changeUserName", }, email: { @@ -139,12 +139,12 @@ qx.Class.define("osparc.auth.Data", { return osparc.utils.Utils.cookie.getCookie("user") === "logout"; }, - getFriendlyUsername: function() { + getFriendlyUserName: function() { const firstName = this.getFirstName(); if (firstName) { return firstName; } - return this.getUsername(); + return this.getUserName(); }, getFullName: function() { @@ -163,6 +163,12 @@ qx.Class.define("osparc.auth.Data", { let friendlyRole = role.replace(/_/g, " "); friendlyRole = osparc.utils.Utils.firstsUp(friendlyRole); return friendlyRole; - } + }, + + getAvatar: function(size) { + const email = this.getEmail(); + const userName = this.getUserName(); + return osparc.utils.Avatar.emailToThumbnail(email, userName, size); + }, } }); diff --git a/services/static-webserver/client/source/class/osparc/auth/LoginWithDecorators.js b/services/static-webserver/client/source/class/osparc/auth/LoginWithDecorators.js index 28716a843692..8dab2ceddb4a 100644 --- a/services/static-webserver/client/source/class/osparc/auth/LoginWithDecorators.js +++ b/services/static-webserver/client/source/class/osparc/auth/LoginWithDecorators.js @@ -332,7 +332,7 @@ qx.Class.define("osparc.auth.LoginWithDecorators", { const organizationLink = new osparc.ui.basic.LinkLabel().set({ textColor: "text-darker" }); - const vendor = osparc.store.VendorInfo.getInstance().getVendor(); + const vendor = osparc.store.VendorInfo.getVendor(); if (vendor && "url" in vendor && "copyright" in vendor) { organizationLink.set({ value: vendor.copyright, diff --git a/services/static-webserver/client/source/class/osparc/auth/Manager.js b/services/static-webserver/client/source/class/osparc/auth/Manager.js index 0ca57ccf885c..5936e015e07b 100644 --- a/services/static-webserver/client/source/class/osparc/auth/Manager.js +++ b/services/static-webserver/client/source/class/osparc/auth/Manager.js @@ -116,6 +116,28 @@ qx.Class.define("osparc.auth.Manager", { return osparc.data.Resources.fetch("auth", "resendCode", params); }, + updatePhoneNumber: function(newPhoneNumber) { + const params = { + data: { + phone: newPhoneNumber + } + }; + return osparc.data.Resources.fetch("profile", "phoneRegister", params); + }, + + validateCodeUpdatePhoneNumber: function(code, loginCbk, failCbk, context) { + const params = { + data: { + code + } + }; + osparc.data.Resources.fetch("profile", "phoneConfirm", params) + .then(data => { + loginCbk.call(context, data); + }) + .catch(err => failCbk.call(context, err.message)); + }, + isLoggedIn: function() { return osparc.auth.Data.getInstance().isLoggedIn(); }, @@ -236,7 +258,7 @@ qx.Class.define("osparc.auth.Manager", { const authData = osparc.auth.Data.getInstance(); authData.set({ email: profile["login"], - username: profile["userName"], + userName: profile["userName"], firstName: profile["first_name"], lastName: profile["last_name"], expirationDate: profile["expirationDate"] ? new Date(profile["expirationDate"]) : null diff --git a/services/static-webserver/client/source/class/osparc/auth/core/BaseAuthPage.js b/services/static-webserver/client/source/class/osparc/auth/core/BaseAuthPage.js index 8a1488b7e4da..0b383ad776b6 100644 --- a/services/static-webserver/client/source/class/osparc/auth/core/BaseAuthPage.js +++ b/services/static-webserver/client/source/class/osparc/auth/core/BaseAuthPage.js @@ -56,7 +56,7 @@ qx.Class.define("osparc.auth.core.BaseAuthPage", { }, statics: { - FORM_WIDTH: 310 + FORM_WIDTH: 330 }, /* diff --git a/services/static-webserver/client/source/class/osparc/auth/ui/Login2FAValidationCodeView.js b/services/static-webserver/client/source/class/osparc/auth/ui/Login2FAValidationCodeView.js index ca2beb0757a5..2a79d97297e4 100644 --- a/services/static-webserver/client/source/class/osparc/auth/ui/Login2FAValidationCodeView.js +++ b/services/static-webserver/client/source/class/osparc/auth/ui/Login2FAValidationCodeView.js @@ -80,7 +80,7 @@ qx.Class.define("osparc.auth.ui.Login2FAValidationCodeView", { }); this.beautifyFormFields(); - const formRenderer = new qx.ui.form.renderer.SinglePlaceholder(this._form); + const formRenderer = new osparc.ui.form.renderer.LoginSinglePlaceholder(this._form); this.add(formRenderer); // buttons @@ -175,7 +175,7 @@ qx.Class.define("osparc.auth.ui.Login2FAValidationCodeView", { this.__validateCodeBtn.setFetching(true); const validationCodeTF = this._form.getItems()["validationCode"]; - const validationCode = validationCodeTF.getValue(); + const validationCode = validationCodeTF.getValue().trim(); const loginFun = log => { this.__validateCodeBtn.setFetching(false); diff --git a/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js b/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js index 60c97f9d2d93..59b92fde83b0 100644 --- a/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js +++ b/services/static-webserver/client/source/class/osparc/auth/ui/LoginView.js @@ -73,23 +73,27 @@ qx.Class.define("osparc.auth.ui.LoginView", { const email = new qx.ui.form.TextField().set({ required: true }); - email.getContentElement().setAttribute("autocomplete", "username"); + email.getContentElement().setAttribute("autocomplete", "userName"); osparc.utils.Utils.setIdToWidget(email, "loginUserEmailFld"); - this._form.add(email, " Your email address", qx.util.Validate.email(), "email"); - this.addListener("appear", () => { + this._form.add(email, " Email", qx.util.Validate.email(), "email"); + const focusEmail = () => { email.focus(); email.activate(); - }); + }; + this.addListener("appear", () => { + focusEmail(); + setTimeout(() => focusEmail(), 100); // refocus + }, this); const pass = new osparc.ui.form.PasswordField().set({ required: true }); pass.getChildControl("passwordField").getContentElement().setAttribute("autocomplete", "current-password"); osparc.utils.Utils.setIdToWidget(pass.getChildControl("passwordField"), "loginPasswordFld"); - this._form.add(pass, " Your password", null, "password"); + this._form.add(pass, " Password", null, "password"); this.beautifyFormFields(); - const formRenderer = new qx.ui.form.renderer.SinglePlaceholder(this._form); + const formRenderer = new osparc.ui.form.renderer.LoginSinglePlaceholder(this._form); this.add(formRenderer); // buttons diff --git a/services/static-webserver/client/source/class/osparc/auth/ui/RegistrationView.js b/services/static-webserver/client/source/class/osparc/auth/ui/RegistrationView.js index 97749ea860ee..179aca0ff62e 100644 --- a/services/static-webserver/client/source/class/osparc/auth/ui/RegistrationView.js +++ b/services/static-webserver/client/source/class/osparc/auth/ui/RegistrationView.js @@ -85,7 +85,7 @@ qx.Class.define("osparc.auth.ui.RegistrationView", { validator.setValidator(() => osparc.auth.core.Utils.checkSamePasswords(password1, password2)); this.beautifyFormFields(); - const formRenderer = new qx.ui.form.renderer.SinglePlaceholder(this._form); + const formRenderer = new osparc.ui.form.renderer.LoginSinglePlaceholder(this._form); this.add(formRenderer); // buttons diff --git a/services/static-webserver/client/source/class/osparc/auth/ui/RequestAccount.js b/services/static-webserver/client/source/class/osparc/auth/ui/RequestAccount.js index 8ab05f16cd3e..385d8b3c6d2d 100644 --- a/services/static-webserver/client/source/class/osparc/auth/ui/RequestAccount.js +++ b/services/static-webserver/client/source/class/osparc/auth/ui/RequestAccount.js @@ -77,31 +77,18 @@ qx.Class.define("osparc.auth.ui.RequestAccount", { break; } - const phone = new qx.ui.form.TextField(); + const phone = new osparc.ui.form.IntlTelInput().set({ + compactField: true, + }); this._form.add(phone, this.tr("Phone Number"), null, "phone"); - const organization = new qx.ui.form.TextField(); - doubleSpaced.push(organization); - switch (osparc.product.Utils.getProductName()) { - case "s4l": - this._form.add(organization, this.tr("Company Name"), null, "company"); - organization.setRequired(true); - break; - case "s4lacad": - case "s4ldesktopacad": - this._form.add(organization, this.tr("University"), null, "university"); - organization.setRequired(true); - break; - case "tiplite": - this._form.add(organization, this.tr("University"), null, "university"); - break; - case "tis": - this._form.add(organization, this.tr("Organization"), null, "organization"); - break; - case "osparc": - this._form.add(organization, this.tr("Research Group/Organization"), null, "organization"); - break; + const institution = new qx.ui.form.TextField(); + doubleSpaced.push(institution); + const institutionAlias = osparc.product.Utils.getInstitutionAlias(); + this._form.add(institution, institutionAlias.label, null, institutionAlias.key); + if (institutionAlias.required) { + institution.setRequired(true); } @@ -127,7 +114,7 @@ qx.Class.define("osparc.auth.ui.RequestAccount", { required: true }); doubleSpaced.push(country); - const countries = osparc.store.StaticInfo.getInstance().getCountries(); + const countries = osparc.store.StaticInfo.getCountries(); countries.forEach(c => { const cItem = new qx.ui.form.ListItem(c.name, null, c.alpha2).set({ rich: true @@ -158,6 +145,9 @@ qx.Class.define("osparc.auth.ui.RequestAccount", { case "s4ldesktopacad": { const application = new qx.ui.form.SelectBox(); [{ + id: "other", + label: "Other" + }, { id: "Antenna_Design_for_Wireless_Communication", label: "Antenna Design for Wireless Communication" }, { @@ -284,6 +274,9 @@ qx.Class.define("osparc.auth.ui.RequestAccount", { break; default: hearOptions = [{ + id: "Other", + label: "Other" + }, { id: "Search_Engine", label: "Search Engine" }, { @@ -295,9 +288,6 @@ qx.Class.define("osparc.auth.ui.RequestAccount", { }, { id: "Social_Media", label: "Social Media" - }, { - id: "Other", - label: "Other" }]; break; } @@ -413,6 +403,9 @@ qx.Class.define("osparc.auth.ui.RequestAccount", { this.fireDataEvent("done"); }) .catch(err => { + if ("errors" in err) { + osparc.utils.Utils.errorsToForm(this._form, err.errors); + } osparc.FlashMessenger.logError(err); this.__restartCaptcha(); }); diff --git a/services/static-webserver/client/source/class/osparc/auth/ui/ResetPassRequestView.js b/services/static-webserver/client/source/class/osparc/auth/ui/ResetPassRequestView.js index 096ba9967a65..5c7cbff015d2 100644 --- a/services/static-webserver/client/source/class/osparc/auth/ui/ResetPassRequestView.js +++ b/services/static-webserver/client/source/class/osparc/auth/ui/ResetPassRequestView.js @@ -49,13 +49,13 @@ qx.Class.define("osparc.auth.ui.ResetPassRequestView", { }); this.beautifyFormFields(); - const formRenderer = new qx.ui.form.renderer.SinglePlaceholder(this._form); + const formRenderer = new osparc.ui.form.renderer.LoginSinglePlaceholder(this._form); this.add(formRenderer); // buttons const grp = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); - const submitBtn = this.__submitBtn = new qx.ui.form.Button(this.tr("Submit")).set({ + const submitBtn = this.__submitBtn = new osparc.ui.form.FetchButton(this.tr("Submit")).set({ center: true, appearance: "form-button" }); @@ -83,17 +83,20 @@ qx.Class.define("osparc.auth.ui.ResetPassRequestView", { }, __submit: function(email) { - const manager = osparc.auth.Manager.getInstance(); + this.__submitBtn.setFetching(true); const successFun = log => { + this.__submitBtn.setFetching(false); this.fireDataEvent("done", log.message); osparc.FlashMessenger.getInstance().log(log); }; const failFun = err => { + this.__submitBtn.setFetching(false); osparc.FlashMessenger.logError(err, this.tr("Could not request password reset")); }; + const manager = osparc.auth.Manager.getInstance(); manager.resetPasswordRequest(email.getValue(), successFun, failFun, this); }, diff --git a/services/static-webserver/client/source/class/osparc/auth/ui/ResetPassView.js b/services/static-webserver/client/source/class/osparc/auth/ui/ResetPassView.js index 196a0364821a..aaf44dd9cbeb 100644 --- a/services/static-webserver/client/source/class/osparc/auth/ui/ResetPassView.js +++ b/services/static-webserver/client/source/class/osparc/auth/ui/ResetPassView.js @@ -29,6 +29,8 @@ qx.Class.define("osparc.auth.ui.ResetPassView", { */ members: { + __submitBtn: null, + // overrides base _buildPage: function() { this._addTitleHeader(this.tr("Reset Password")); @@ -61,14 +63,15 @@ qx.Class.define("osparc.auth.ui.ResetPassView", { }); this.beautifyFormFields(); - const formRenderer = new qx.ui.form.renderer.SinglePlaceholder(this._form); + const formRenderer = new osparc.ui.form.renderer.LoginSinglePlaceholder(this._form); this.add(formRenderer); // buttons const grp = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); - const submitBtn = new qx.ui.form.Button(this.tr("Submit")).set({ - appearance: "form-button" + const submitBtn = this.__submitBtn = new osparc.ui.form.FetchButton(this.tr("Submit")).set({ + center: true, + appearance: "strong-button", }); grp.addAt(submitBtn, 1, { flex:1 @@ -97,12 +100,16 @@ qx.Class.define("osparc.auth.ui.ResetPassView", { }, __submit: function(password, confirm, code) { + this.__submitBtn.setFetching(true); + const successFun = log => { + this.__submitBtn.setFetching(false); this.fireDataEvent("done", log.message); osparc.FlashMessenger.getInstance().log(log); }; const failFun = err => { + this.__submitBtn.setFetching(false); osparc.FlashMessenger.logError(err, this.tr("Could not reset password")); }; diff --git a/services/static-webserver/client/source/class/osparc/auth/ui/VerifyPhoneNumberView.js b/services/static-webserver/client/source/class/osparc/auth/ui/VerifyPhoneNumberView.js index 65c5996f1f87..c9cc9fc2875d 100644 --- a/services/static-webserver/client/source/class/osparc/auth/ui/VerifyPhoneNumberView.js +++ b/services/static-webserver/client/source/class/osparc/auth/ui/VerifyPhoneNumberView.js @@ -24,6 +24,12 @@ qx.Class.define("osparc.auth.ui.VerifyPhoneNumberView", { check: "String", init: "foo@mymail.com", nullable: false + }, + + updatingNumber: { + check: "Boolean", + init: false, + nullable: false, } }, @@ -32,148 +38,175 @@ qx.Class.define("osparc.auth.ui.VerifyPhoneNumberView", { }, members: { - __itiInput: null, - __verifyPhoneNumberBtn: null, - __validateCodeField: null, - __validateCodeBtn: null, - __sendViaEmail: null, - - _buildPage: function() { - this.__buildVerificationLayout(); - const validationLayout = this.__createValidationLayout().set({ - zIndex: 1 // the countries list that goes on top has a z-index of 2 - }); - this.add(validationLayout); - const sendViaEmailBtn = this.__createSendViaEmailButton().set({ - zIndex: 1 // the countries list that goes on top has a z-index of 2 - }); - this.add(sendViaEmailBtn); - this.__attachHandlers(); + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "title": + control = new qx.ui.basic.Label().set({ + value: this.tr("Two-Factor Authentication (2FA)"), + allowGrowX: true, + rich: true, + font: "text-16" + }); + this.add(control); + break; + case "intro-text": + control = new qx.ui.basic.Label().set({ + value: this.tr("If SMS is your chosen 2FA method, you'll get a text message with a code on every login to authenticate your access."), + rich: true, + wrap: true + }); + this.add(control); + break; + case "phone-number-layout": + control = new qx.ui.container.Composite(new qx.ui.layout.HBox(5)); + control.getContentElement().setStyles({ + "overflow": "visible" // needed for countries dropdown menu + }); + this.add(control); + break; + case "intl-tel-input": + control = new osparc.ui.form.IntlTelInput(); + this.getChildControl("phone-number-layout").add(control, { + flex: 1 + }); + break; + case "verify-number-button": + control = new osparc.ui.form.FetchButton(this.tr("Send SMS")).set({ + appearance: "strong-button", + center: true, + minWidth: 80 + }); + control.addListener("execute", () => this.__verifyPhoneNumber()); + this.getChildControl("phone-number-layout").add(control); + break; + case "validation-layout": + control = new qx.ui.container.Composite(new qx.ui.layout.HBox(5)).set({ + zIndex: 1 // the countries list that goes on top has a z-index of 2 + }); + this.add(control); + break; + case "validate-code-field": + control = new qx.ui.form.TextField().set({ + placeholder: this.tr("Type the SMS code"), + enabled: false, + height: 29, // to align it with the strong button next to it + }); + control.addListener("input", e => this.getChildControl("validate-code-button").setEnabled(Boolean(e.getData()))); + this.getChildControl("validation-layout").add(control, { + flex: 1 + }); + break; + case "validate-code-button": + control = new osparc.ui.form.FetchButton(this.tr("Validate")).set({ + appearance: "strong-button", + center: true, + enabled: false, + minWidth: 80, + }); + control.addListener("execute", () => this.__validateCodeRegister()); + this.getChildControl("validation-layout").add(control); + break; + case "send-via-email-button": + control = new osparc.ui.form.FetchButton().set({ + label: this.tr("Skip phone registration and send code via email"), + textColor: "text", + zIndex: 1 // the countries list that goes on top has a z-index of 2 + }); + control.addListener("execute", () => this.__requestCodeViaEmail(), this); + this.add(control); + break; + } + return control || this.base(arguments, id); }, - __buildVerificationLayout: function() { - const verificationInfoTitle = new qx.ui.basic.Label().set({ - value: this.tr("Two-Factor Authentication (2FA)"), - allowGrowX: true, - rich: true, - font: "text-16" - }); - this.add(verificationInfoTitle); - - const verificationInfoDesc = new qx.ui.basic.Label().set({ - value: this.tr("A text message will be sent to your mobile phone for authentication each time you log in."), - rich: true, - wrap: true - }); - this.add(verificationInfoDesc); - - const phoneNumberVerifyLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox(5)); - phoneNumberVerifyLayout.getContentElement().setStyles({ - "overflow": "visible" // needed for countries dropdown menu - }); - - const itiInput = this.__itiInput = new osparc.widget.IntlTelInput(); - phoneNumberVerifyLayout.add(itiInput, { - flex: 1 - }); - - const verifyPhoneNumberBtn = this.__verifyPhoneNumberBtn = new osparc.ui.form.FetchButton(this.tr("Send SMS")).set({ - appearance: "strong-button", - center: true, - minWidth: 80 - }); - phoneNumberVerifyLayout.add(verifyPhoneNumberBtn); - this.add(phoneNumberVerifyLayout); - }, + _buildPage: function() { + this.getChildControl("title"); + this.getChildControl("intro-text"); - __createValidationLayout: function() { - const smsValidationLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox(5)); - const validateCodeTF = this.__validateCodeField = new qx.ui.form.TextField().set({ - placeholder: this.tr("Type the SMS code"), - enabled: false - }); - smsValidationLayout.add(validateCodeTF, { - flex: 1 - }); - const validateCodeBtn = this.__validateCodeBtn = new osparc.ui.form.FetchButton(this.tr("Validate")).set({ - appearance: "strong-button", - center: true, - minWidth: 80 - }); - validateCodeBtn.setEnabled(false); - validateCodeTF.addListener("input", e => validateCodeBtn.setEnabled(Boolean(e.getData()))); - smsValidationLayout.add(validateCodeBtn); - return smsValidationLayout; - }, + this.getChildControl("intl-tel-input"); + this.getChildControl("verify-number-button"); - __createSendViaEmailButton: function() { - const txt = this.tr("Skip phone registration and send code via email"); - const sendViaEmail = this.__sendViaEmail = new osparc.ui.form.FetchButton(txt).set({ - textColor: "text", - zIndex: 1 // the countries list that goes on top has a z-index of 2 - }); - return sendViaEmail; - }, + this.getChildControl("validate-code-field"); + this.getChildControl("validate-code-button"); - __attachHandlers: function() { - this.__verifyPhoneNumberBtn.addListener("execute", () => this.__verifyPhoneNumber()); - this.__validateCodeBtn.addListener("execute", () => this.__validateCodeRegister()); - this.__sendViaEmail.addListener("execute", () => this.__requestCodeViaEmail(), this); + this.getChildControl("send-via-email-button"); }, __verifyPhoneNumber: function() { - this.__itiInput.verifyPhoneNumber(); - const isValid = this.__itiInput.isValidNumber(); + const itiInput = this.getChildControl("intl-tel-input"); + const verifyPhoneNumberBtn = this.getChildControl("verify-number-button"); + const validateCodeBtn = this.getChildControl("validate-code-button"); + itiInput.verifyPhoneNumber(); + const isValid = itiInput.isValidNumber(); if (isValid) { - this.__itiInput.setEnabled(false); - this.__verifyPhoneNumberBtn.setFetching(true); - osparc.auth.Manager.getInstance().verifyPhoneNumber(this.getUserEmail(), this.__itiInput.getNumber()) + itiInput.setEnabled(false); + verifyPhoneNumberBtn.setFetching(true); + const promise = this.isUpdatingNumber() ? + osparc.auth.Manager.getInstance().updatePhoneNumber(itiInput.getNumber()) : + osparc.auth.Manager.getInstance().verifyPhoneNumber(this.getUserEmail(), itiInput.getNumber()); + promise .then(resp => { - osparc.FlashMessenger.logAs(resp.message, "INFO"); - this.__verifyPhoneNumberBtn.setFetching(false); + const msg = (resp && resp.message) ? resp.message : "A verification code has been sent via SMS"; + osparc.FlashMessenger.logAs(msg, "INFO"); + verifyPhoneNumberBtn.setFetching(false); + verifyPhoneNumberBtn.setEnabled(false); + const resendCodeTimeout = 10000; + setTimeout(() => verifyPhoneNumberBtn.setEnabled(true), resendCodeTimeout); // enable, focus and listen to Enter - this.__validateCodeField.setEnabled(true); - this.__validateCodeField.focus(); - this.__validateCodeField.activate(); - this.__enableEnterCommand(this.__validateCodeBtn); + const validateCodeField = this.getChildControl("validate-code-field"); + validateCodeField.setEnabled(true); + validateCodeField.focus(); + validateCodeField.activate(); + this.__enableEnterCommand(validateCodeBtn); }) .catch(err => { osparc.FlashMessenger.logError(err); - this.__verifyPhoneNumberBtn.setFetching(false); - this.__itiInput.setEnabled(true); + verifyPhoneNumberBtn.setFetching(false); + itiInput.setEnabled(true); }); } }, __validateCodeRegister: function() { - this.__validateCodeBtn.setFetching(true); + const validateCodeField = this.getChildControl("validate-code-field"); + const validateCodeBtn = this.getChildControl("validate-code-button"); + + validateCodeBtn.setFetching(true); const loginFun = log => { - osparc.FlashMessenger.logAs(log.message, "INFO"); - this.__validateCodeBtn.setFetching(false); - this.__validateCodeField.setEnabled(false); - this.__validateCodeBtn.setEnabled(false); - this.__validateCodeBtn.setIcon("@FontAwesome5Solid/check/12"); - this.fireDataEvent("done", log.message); + const msg = (log && log.message) ? log.message : "The phone number was updated successfully"; + osparc.FlashMessenger.logAs(msg, "INFO"); + validateCodeField.setEnabled(false); + validateCodeBtn.setFetching(false); + validateCodeBtn.setEnabled(false); + validateCodeBtn.setIcon("@FontAwesome5Solid/check/12"); + this.fireDataEvent("done", msg); }; const failFun = err => { osparc.FlashMessenger.logError(err); - this.__validateCodeBtn.setFetching(false); + validateCodeBtn.setFetching(false); // TODO: can get field info from response here err = String(err) || this.tr("Invalid code"); - this.__validateCodeField.set({ + validateCodeField.set({ invalidMessage: err, valid: false }); }; const manager = osparc.auth.Manager.getInstance(); - manager.validateCodeRegister(this.getUserEmail(), this.__itiInput.getNumber(), this.__validateCodeField.getValue(), loginFun, failFun, this); + const itiInput = this.getChildControl("intl-tel-input"); + if (this.isUpdatingNumber()) { + manager.validateCodeUpdatePhoneNumber(validateCodeField.getValue(), loginFun, failFun, this); + } else { + manager.validateCodeRegister(this.getUserEmail(), itiInput.getNumber(), validateCodeField.getValue(), loginFun, failFun, this); + } }, __requestCodeViaEmail: function() { - this.__sendViaEmail.setFetching(true); + const sendViaEmail = this.getChildControl("send-via-email-button"); + sendViaEmail.setFetching(true); osparc.auth.Manager.getInstance().resendCodeViaEmail(this.getUserEmail()) .then(data => { const message = osparc.auth.core.Utils.extractMessage(data); @@ -186,7 +219,7 @@ qx.Class.define("osparc.auth.ui.VerifyPhoneNumberView", { }); }) .catch(err => osparc.FlashMessenger.logError(err)) - .finally(() => this.__sendViaEmail.setFetching(false)); + .finally(() => sendViaEmail.setFetching(false)); }, __enableEnterCommand: function(onBtn) { @@ -197,12 +230,16 @@ qx.Class.define("osparc.auth.ui.VerifyPhoneNumberView", { }, __disableCommands: function() { - this.__verifyPhoneNumberBtn.setCommand(null); - this.__validateCodeBtn.setCommand(null); + const verifyPhoneNumberBtn = this.getChildControl("verify-number-button"); + verifyPhoneNumberBtn.setCommand(null); + + const validateCodeBtn = this.getChildControl("validate-code-button"); + validateCodeBtn.setCommand(null); }, _onAppear: function() { - this.__enableEnterCommand(this.__verifyPhoneNumberBtn); + const verifyPhoneNumberBtn = this.getChildControl("verify-number-button"); + this.__enableEnterCommand(verifyPhoneNumberBtn); }, _onDisappear: function() { diff --git a/services/static-webserver/client/source/class/osparc/conversation/AddMessage.js b/services/static-webserver/client/source/class/osparc/conversation/AddMessage.js new file mode 100644 index 000000000000..b2c1314d3cf9 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/conversation/AddMessage.js @@ -0,0 +1,264 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2023 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + + +qx.Class.define("osparc.conversation.AddMessage", { + extend: qx.ui.core.Widget, + + construct: function() { + this.base(arguments); + + this._setLayout(new qx.ui.layout.VBox(5)); + + this.__buildLayout(); + }, + + properties: { + conversationId: { + check: "String", + init: null, + nullable: true, + event: "changeConversationId", + }, + + studyData: { + check: "Object", + init: null, + nullable: true, + event: "changeStudyData", + apply: "__applyStudyData", + }, + + message: { + check: "Object", + init: null, + nullable: true, + event: "changeMessage", + apply: "__applyMessage", + } + }, + + events: { + "addMessage": "qx.event.type.Data", + "updateMessage": "qx.event.type.Data", + "notifyUser": "qx.event.type.Data", + }, + + members: { + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "add-comment-layout": { + control = new qx.ui.container.Composite(new qx.ui.layout.HBox(0)); + this._add(control, { + flex: 1 + }); + break; + } + case "avatar": { + control = osparc.utils.Utils.createThumbnail(32); + const authStore = osparc.auth.Data.getInstance(); + control.set({ + source: authStore.getAvatar(32), + alignX: "center", + alignY: "middle", + marginRight: 8, + }); + this.getChildControl("add-comment-layout").add(control); + break; + } + case "comment-field": + control = new osparc.editor.MarkdownEditor(); + control.addListener("textChanged", () => this.__addCommentPressed(), this); + control.setCompact(true); + control.getChildControl("text-area").set({ + maxLength: osparc.data.model.Conversation.MAX_CONTENT_LENGTH, + }); + // make it visually connected to the button + control.getChildControl("text-area").getContentElement().setStyles({ + "border-top-right-radius": "0px", // no roundness there to match the arrow button + }); + // make it more compact + this.getChildControl("add-comment-layout").add(control, { + flex: 1 + }); + break; + case "add-comment-button": + control = new qx.ui.form.Button(null, "@FontAwesome5Solid/arrow-up/16").set({ + toolTipText: this.tr("Ctrl+Enter"), + backgroundColor: "input_background", + allowGrowX: false, + alignX: "right", + alignY: "middle", + }); + control.getContentElement().setStyles({ + "border-bottom": "1px solid " + qx.theme.manager.Color.getInstance().resolve("default-button-active"), + "border-top-left-radius": "0px", // no roundness there to match the message field + "border-bottom-left-radius": "0px", // no roundness there to match the message field + "border-bottom-right-radius": "0px", // no roundness there to match the message field + }); + const commentField = this.getChildControl("comment-field").getChildControl("text-area"); + commentField.addListener("focus", () => { + control.getContentElement().setStyles({ + "border-bottom": "1px solid " + qx.theme.manager.Color.getInstance().resolve("product-color"), + }); + }, this); + commentField.addListener("focusout", () => { + control.getContentElement().setStyles({ + "border-bottom": "1px solid " + qx.theme.manager.Color.getInstance().resolve("default-button-active"), + }); + }, this); + control.addListener("execute", this.__addCommentPressed, this); + this.getChildControl("add-comment-layout").add(control); + break; + case "notify-user-button": + control = new qx.ui.form.Button("🔔 " + this.tr("Notify user")).set({ + appearance: "form-button", + allowGrowX: false, + alignX: "right" + }); + control.addListener("execute", () => this.__notifyUserTapped()); + this._add(control); + break; + } + + return control || this.base(arguments, id); + }, + + __buildLayout: function() { + this.getChildControl("avatar"); + this.getChildControl("comment-field"); + this.getChildControl("add-comment-button"); + }, + + __applyStudyData: function(studyData) { + const notifyUserButton = this.getChildControl("notify-user-button"); + if (studyData) { + const canIWrite = osparc.data.model.Study.canIWrite(studyData["accessRights"]) + this.getChildControl("add-comment-button").setEnabled(canIWrite); + notifyUserButton.show(); + notifyUserButton.setEnabled(canIWrite); + } else { + notifyUserButton.exclude(); + } + }, + + __applyMessage: function(message) { + if (message) { + // edit mode + const commentField = this.getChildControl("comment-field"); + commentField.setText(message["content"]); + } + }, + + __addCommentPressed: function() { + this.getMessage() ? this.__editComment() : this.addComment(); + }, + + addComment: function() { + const commentField = this.getChildControl("comment-field"); + const content = commentField.getChildControl("text-area").getValue(); + if (content) { + this.fireDataEvent("addMessage", content); + commentField.getChildControl("text-area").setValue(""); + } + }, + + __editComment: function() { + const commentField = this.getChildControl("comment-field"); + const content = commentField.getChildControl("text-area").getValue(); + if (content) { + this.fireDataEvent("updateMessage", content); + } + }, + + /* NOTIFY USERS */ + __notifyUserTapped: function() { + const studyData = this.getStudyData(); + if (!studyData) { + return; + } + + const showOrganizations = false; + const showAccessRights = false; + const userManager = new osparc.share.NewCollaboratorsManager(studyData, showOrganizations, showAccessRights).set({ + acceptOnlyOne: true, + }); + userManager.setCaption(this.tr("Notify user")); + userManager.getActionButton().setLabel(this.tr("Notify")); + userManager.addListener("addCollaborators", e => { + userManager.close(); + const data = e.getData(); + const userGids = data["selectedGids"]; + if (userGids && userGids.length) { + const userGid = parseInt(userGids[0]); + this.__notifyUser(userGid); + } + }); + }, + + __notifyUser: function(userGid) { + const studyData = this.getStudyData(); + if (!studyData) { + return; + } + + // Note! + // This check only works if the project is directly shared with the user. + // If it's shared through a group, it might be a bit confusing + if (userGid in studyData["accessRights"]) { + this.__doNotifyUser(userGid); + } else { + const msg = this.tr("This user has no access to the project. Do you want to share it?"); + const win = new osparc.ui.window.Confirmation(msg).set({ + caption: this.tr("Share"), + confirmText: this.tr("Share"), + confirmAction: "create" + }); + win.center(); + win.open(); + win.addListener("close", () => { + if (win.getConfirmed()) { + const newCollaborators = { + [userGid]: osparc.data.Roles.STUDY["write"].accessRights + }; + osparc.store.Study.getInstance().addCollaborators(studyData, newCollaborators) + .then(() => { + this.__doNotifyUser(userGid); + const potentialCollaborators = osparc.store.Groups.getInstance().getPotentialCollaborators() + if (userGid in potentialCollaborators && "getUserId" in potentialCollaborators[userGid]) { + const uid = potentialCollaborators[userGid].getUserId(); + osparc.notification.Notifications.pushStudyShared(uid, studyData["uuid"]); + } + }) + .catch(err => osparc.FlashMessenger.logError(err)); + } + }, this); + } + }, + + __doNotifyUser: function(userGid) { + const studyData = this.getStudyData(); + if (!studyData) { + return; + } + + this.fireDataEvent("notifyUser", userGid); + }, + /* NOTIFY USERS */ + } +}); diff --git a/services/static-webserver/client/source/class/osparc/conversation/Conversation.js b/services/static-webserver/client/source/class/osparc/conversation/Conversation.js new file mode 100644 index 000000000000..e36342ce1953 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/conversation/Conversation.js @@ -0,0 +1,244 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + + +qx.Class.define("osparc.conversation.Conversation", { + extend: qx.ui.core.Widget, + + /** + * @param conversation {osparc.data.model.Conversation} Conversation + */ + construct: function(conversation) { + this.base(arguments); + + this._messages = []; + + this._setLayout(new qx.ui.layout.VBox(5)); + + this._buildLayout(); + + if (conversation) { + this.setConversation(conversation); + } + }, + + properties: { + conversation: { + check: "osparc.data.model.Conversation", + init: null, + nullable: true, + event: "changeConversation", + apply: "_applyConversation", + }, + }, + + events: { + "messagesChanged": "qx.event.type.Event", + }, + + members: { + _messages: null, + + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "spacer-top": + control = new qx.ui.core.Spacer(); + this._addAt(control, 0, { + flex: 100 // high number to keep even a one message list at the bottom + }); + break; + case "messages-container-scroll": + control = new qx.ui.container.Scroll(); + this._addAt(control, 1, { + flex: 1 + }); + break; + case "messages-container": + control = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)).set({ + alignY: "middle" + }); + this.getChildControl("messages-container-scroll").add(control); + break; + case "load-more-button": + control = new osparc.ui.form.FetchButton(this.tr("Load more messages...")); + control.addListener("execute", () => this.__reloadMessages(false)); + this._addAt(control, 2); + break; + case "add-message": + control = new osparc.conversation.AddMessage().set({ + padding: 5, + }); + this.bind("conversation", control, "conversationId", { + converter: conversation => conversation ? conversation.getConversationId() : null + }); + this._addAt(control, 3); + break; + } + return control || this.base(arguments, id); + }, + + _buildLayout: function() { + this.getChildControl("spacer-top"); + this.getChildControl("messages-container"); + this.getChildControl("add-message"); + }, + + _applyConversation: function(conversation) { + this.__reloadMessages(true); + + if (conversation) { + conversation.addListener("messageAdded", e => { + const data = e.getData(); + this.addMessage(data); + }); + conversation.addListener("messageUpdated", e => { + const data = e.getData(); + this.updateMessage(data); + }); + conversation.addListener("messageDeleted", e => { + const data = e.getData(); + this.deleteMessage(data); + }); + } + }, + + __reloadMessages: function(removeMessages = true) { + if (removeMessages) { + this.clearAllMessages(); + } + + const loadMoreMessages = this.getChildControl("load-more-button"); + if (this.getConversation() === null) { + loadMoreMessages.hide(); + return; + } + + loadMoreMessages.show(); + loadMoreMessages.setFetching(true); + this.getConversation().getNextMessages() + .then(resp => { + const messages = resp["data"]; + messages.forEach(message => this.addMessage(message)); + if (resp["_links"]["next"] === null && loadMoreMessages) { + loadMoreMessages.exclude(); + } + }) + .finally(() => loadMoreMessages.setFetching(false)); + }, + + _createMessageUI: function(message) { + return new osparc.conversation.MessageUI(message); + }, + + getMessages: function() { + return this._messages; + }, + + clearAllMessages: function() { + this._messages = []; + this.getChildControl("messages-container").removeAll(); + + this.fireEvent("messagesChanged"); + }, + + addMessage: function(message) { + // ignore it if it was already there + const messageIndex = this._messages.findIndex(msg => msg["messageId"] === message["messageId"]); + if (messageIndex !== -1) { + return; + } + + // determine insertion index for latest‐first order + const newTime = new Date(message["created"]); + let insertAt = this._messages.findIndex(m => new Date(m["created"]) > newTime); + if (insertAt === -1) { + insertAt = this._messages.length; + } + + // Insert the message in the messages array + this._messages.splice(insertAt, 0, message); + + // Add the UI element to the messages list + let control = null; + switch (message["type"]) { + case "MESSAGE": + control = this._createMessageUI(message); + control.addListener("messageUpdated", e => this.updateMessage(e.getData())); + control.addListener("messageDeleted", e => this.deleteMessage(e.getData())); + break; + case "NOTIFICATION": + control = new osparc.conversation.NotificationUI(message); + break; + } + if (control) { + // insert into the UI at the same position + const messagesContainer = this.getChildControl("messages-container"); + messagesContainer.addAt(control, insertAt); + } + + // scroll to bottom + // add timeout to ensure the scroll happens after the UI is updated + setTimeout(() => { + const messagesScroll = this.getChildControl("messages-container-scroll"); + messagesScroll.scrollToY(messagesScroll.getChildControl("pane").getScrollMaxY()); + }, 50); + + this.fireEvent("messagesChanged"); + }, + + deleteMessage: function(message) { + // remove it from the messages array + const messageIndex = this._messages.findIndex(msg => msg["messageId"] === message["messageId"]); + if (messageIndex === -1) { + return; + } + this._messages.splice(messageIndex, 1); + + // Remove the UI element from the messages list + const messagesContainer = this.getChildControl("messages-container"); + const children = messagesContainer.getChildren(); + const controlIndex = children.findIndex( + ctrl => ("getMessage" in ctrl && ctrl.getMessage()["messageId"] === message["messageId"]) + ); + if (controlIndex > -1) { + messagesContainer.remove(children[controlIndex]); + } + + this.fireEvent("messagesChanged"); + }, + + updateMessage: function(message) { + // Replace the message in the messages array + const messageIndex = this._messages.findIndex(msg => msg["messageId"] === message["messageId"]); + if (messageIndex === -1) { + return; + } + this._messages[messageIndex] = message; + + // Update the UI element from the messages list + const messagesContainer = this.getChildControl("messages-container"); + const messageUI = messagesContainer.getChildren().find(control => { + return "getMessage" in control && control.getMessage()["messageId"] === message["messageId"]; + }); + if (messageUI) { + // Force a new reference + messageUI.setMessage(Object.assign({}, message)); + } + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/conversation/MessageUI.js b/services/static-webserver/client/source/class/osparc/conversation/MessageUI.js new file mode 100644 index 000000000000..d7ded5e9d36c --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/conversation/MessageUI.js @@ -0,0 +1,240 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2023 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + + +qx.Class.define("osparc.conversation.MessageUI", { + extend: qx.ui.core.Widget, + + /** + * @param message {Object} message data + * @param studyData {Object?null} serialized Study Data + */ + construct: function(message, studyData = null) { + this.base(arguments); + + this.__studyData = studyData; + + this._setLayout(new qx.ui.layout.HBox(10)); + this.setPadding(5); + + this.set({ + message, + }); + }, + + statics: { + isMyMessage: function(message) { + if (message["userGroupId"] === "system") { + return false; + } + return message && osparc.auth.Data.getInstance().getGroupId() === message["userGroupId"]; + } + }, + + events: { + "messageUpdated": "qx.event.type.Data", + "messageDeleted": "qx.event.type.Data", + }, + + properties: { + message: { + check: "Object", + init: null, + nullable: false, + apply: "__applyMessage", + }, + }, + + members: { + _createChildControlImpl: function(id) { + const isMyMessage = this.self().isMyMessage(this.getMessage()); + let control; + switch (id) { + case "avatar": + control = new osparc.ui.basic.UserThumbnail(32).set({ + marginTop: 4, + alignY: "top", + }); + this._addAt(control, isMyMessage ? 1 : 0); + break; + case "main-layout": + control = new qx.ui.container.Composite(new qx.ui.layout.VBox(2).set({ + alignX: isMyMessage ? "right" : "left" + })); + this._addAt(control, isMyMessage ? 0 : 1, { flex: 1}); + break; + case "header-layout": + control = new qx.ui.container.Composite(new qx.ui.layout.HBox(5).set({ + alignX: isMyMessage ? "right" : "left" + })); + control.addAt(new qx.ui.basic.Label("-"), 1); + this.getChildControl("main-layout").addAt(control, 0); + break; + case "user-name": + control = new qx.ui.basic.Label().set({ + font: "text-12", + textColor: "text-disabled", + }); + this.getChildControl("header-layout").addAt(control, isMyMessage ? 2 : 0); + break; + case "last-updated": + control = new qx.ui.basic.Label().set({ + font: "text-12", + textColor: "text-disabled", + }); + this.getChildControl("header-layout").addAt(control, isMyMessage ? 0 : 2); + break; + case "message-bubble": + control = new qx.ui.container.Composite(new qx.ui.layout.VBox().set({ + alignX: isMyMessage ? "right" : "left" + })).set({ + decorator: "chat-bubble", + allowGrowX: false, + padding: 8, + }); + const bubbleStyle = isMyMessage ? { "border-top-right-radius": "0px" } : { "border-top-left-radius": "0px" }; + control.getContentElement().setStyles(bubbleStyle); + this.getChildControl("main-layout").addAt(control, 1); + break; + case "message-content": + control = new osparc.ui.markdown.MarkdownChat(); + this.getChildControl("message-bubble").add(control); + break; + case "menu-button": { + const buttonSize = 22; + control = new qx.ui.form.MenuButton().set({ + width: buttonSize, + height: buttonSize, + allowGrowX: false, + allowGrowY: false, + marginTop: 4, + alignY: "top", + icon: "@FontAwesome5Solid/ellipsis-v/12", + focusable: false + }); + this._addAt(control, 2); + break; + } + } + + return control || this.base(arguments, id); + }, + + __applyMessage: function(message) { + const createdDateData = new Date(message["created"]); + const createdDate = osparc.utils.Utils.formatDateAndTime(createdDateData); + const lastUpdate = this.getChildControl("last-updated"); + if (message["created"] === message["modified"]) { + lastUpdate.setValue(createdDate); + } else { + const updatedDateData = new Date(message["modified"]); + const updatedDate = osparc.utils.Utils.formatDateAndTime(updatedDateData); + lastUpdate.setValue(createdDate + " (" + this.tr("edited") + " "+ updatedDate + ")"); + } + + const messageContent = this.getChildControl("message-content"); + messageContent.setValue(message["content"]); + + const avatar = this.getChildControl("avatar"); + const userName = this.getChildControl("user-name"); + if (message["userGroupId"] === "system") { + userName.setValue("Support"); + } else { + osparc.store.Users.getInstance().getUser(message["userGroupId"]) + .then(user => { + avatar.setUser(user); + userName.setValue(user ? user.getLabel() : "Unknown user"); + }) + .catch(() => { + avatar.setSource(osparc.utils.Avatar.emailToThumbnail()); + userName.setValue("Unknown user"); + }); + } + + if (this.self().isMyMessage(message)) { + const menuButton = this.getChildControl("menu-button"); + + const menu = new qx.ui.menu.Menu().set({ + position: "bottom-right", + }); + menuButton.setMenu(menu); + + const editButton = new qx.ui.menu.Button(this.tr("Edit...")); + editButton.addListener("execute", () => this.__editMessage(), this); + menu.add(editButton); + + const deleteButton = new qx.ui.menu.Button(this.tr("Delete...")); + deleteButton.addListener("execute", () => this.__deleteMessage(), this); + menu.add(deleteButton); + } + }, + + __editMessage: function() { + const message = this.getMessage(); + + const addMessage = new osparc.conversation.AddMessage().set({ + studyData: this.__studyData, + conversationId: message["conversationId"], + message, + }); + const title = this.tr("Edit message"); + const win = osparc.ui.window.Window.popUpInWindow(addMessage, title, 570, 135).set({ + clickAwayClose: false, + resizable: true, + showClose: true, + }); + addMessage.addListener("updateMessage", e => { + const content = e.getData(); + const conversationId = message["conversationId"]; + const messageId = message["messageId"]; + if (this.__studyData) { + promise = osparc.store.ConversationsProject.getInstance().editMessage(this.__studyData["uuid"], conversationId, messageId, content); + } else { + promise = osparc.store.ConversationsSupport.getInstance().editMessage(conversationId, messageId, content); + } + promise.then(data => { + win.close(); + this.fireDataEvent("messageUpdated", data); + }); + }); + }, + + __deleteMessage: function() { + const message = this.getMessage(); + + const win = new osparc.ui.window.Confirmation(this.tr("Delete message?")).set({ + caption: this.tr("Delete"), + confirmText: this.tr("Delete"), + confirmAction: "delete", + }); + win.open(); + win.addListener("close", () => { + if (win.getConfirmed()) { + let promise = null; + if (this.__studyData) { + promise = osparc.store.ConversationsProject.getInstance().deleteMessage(message); + } else { + promise = osparc.store.ConversationsSupport.getInstance().deleteMessage(message); + } + promise + .then(() => this.fireDataEvent("messageDeleted", message)) + .catch(err => osparc.FlashMessenger.logError(err)); + } + }); + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/conversation/NotificationUI.js b/services/static-webserver/client/source/class/osparc/conversation/NotificationUI.js new file mode 100644 index 000000000000..34247eea09ec --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/conversation/NotificationUI.js @@ -0,0 +1,148 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2023 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + + +qx.Class.define("osparc.conversation.NotificationUI", { + extend: qx.ui.core.Widget, + + /** + * @param message {Object} message + */ + construct: function(message) { + this.base(arguments); + + const isMyMessage = osparc.conversation.MessageUI.isMyMessage(message); + const layout = new qx.ui.layout.Grid(4, 4); + layout.setColumnFlex(isMyMessage ? 0 : 3, 3); // spacer + layout.setRowAlign(0, "center", "middle"); + this._setLayout(layout); + this.setPadding(5); + + this.set({ + message, + }); + }, + + properties: { + message: { + check: "Object", + init: null, + nullable: false, + apply: "__applyMessage", + }, + }, + + members: { + // spacer - date - content - (thumbnail-spacer) + // (thumbnail-spacer) - content - date - spacer + _createChildControlImpl: function(id) { + const isMyMessage = osparc.conversation.MessageUI.isMyMessage(this.getMessage()); + let control; + switch (id) { + case "thumbnail-spacer": + control = new qx.ui.core.Spacer().set({ + width: 32, + }); + this._add(control, { + row: 0, + column: isMyMessage ? 3 : 0, + }); + break; + case "message-content": + control = new qx.ui.basic.Label().set({ + }); + control.getContentElement().setStyles({ + "text-align": isMyMessage ? "right" : "left", + }); + this._add(control, { + row: 0, + column: isMyMessage ? 2 : 1, + }); + break; + case "last-updated": + control = new qx.ui.basic.Label().set({ + font: "text-12" + }); + this._add(control, { + row: 0, + column: isMyMessage ? 1 : 2, + }); + break; + case "spacer": + control = new qx.ui.core.Spacer(); + this._add(control, { + row: 0, + column: isMyMessage ? 0 : 3, + }); + break; + } + + return control || this.base(arguments, id); + }, + + __applyMessage: function(message) { + this._removeAll(); + + this.getChildControl("thumbnail-spacer"); + + const isMyMessage = osparc.conversation.MessageUI.isMyMessage(message); + + const modifiedDate = new Date(message["modified"]); + const date = osparc.utils.Utils.formatDateAndTime(modifiedDate); + const lastUpdate = this.getChildControl("last-updated"); + lastUpdate.setValue(isMyMessage ? date + " -" : " - " + date); + + const messageContent = this.getChildControl("message-content"); + const notifierUserGroupId = parseInt(message["userGroupId"]); + const notifiedUserGroupId = parseInt(message["content"]); + let msgContent = "🔔 "; + Promise.all([ + osparc.store.Users.getInstance().getUser(notifierUserGroupId), + osparc.store.Users.getInstance().getUser(notifiedUserGroupId), + ]) + .then(values => { + const notifierUser = values[0]; + if (isMyMessage) { + msgContent += "You"; + } else if (notifierUser) { + msgContent += notifierUser.getLabel(); + } else { + msgContent += "unknown user"; + } + + msgContent += " notified "; + + const notifiedUser = values[1]; + if (osparc.auth.Data.getInstance().getGroupId() === notifiedUserGroupId) { + msgContent += "You"; + } else if (notifiedUser) { + msgContent += notifiedUser.getLabel(); + } else { + msgContent += "unknown user"; + } + }) + .catch(() => { + msgContent += "unknown user notified"; + }) + .finally(() => { + messageContent.setValue(msgContent); + }); + + this.getChildControl("spacer"); + } + } +}); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/AppBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/AppBrowser.js index de4f2614836f..fb73d5d3d4aa 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/AppBrowser.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/AppBrowser.js @@ -42,6 +42,7 @@ qx.Class.define("osparc.dashboard.AppBrowser", { } this._resourcesInitialized = true; + this._showLoadingPage(this.tr("Loading Apps...")); this._resourcesList = []; Promise.all([ osparc.store.Services.getServicesLatest(), @@ -56,26 +57,28 @@ qx.Class.define("osparc.dashboard.AppBrowser", { msg += "
"; msg += this.tr("Please contact us:"); msg += "
"; - const supportEmail = osparc.store.VendorInfo.getInstance().getSupportEmail(); + const supportEmail = osparc.store.VendorInfo.getSupportEmail(); msg += supportEmail; osparc.FlashMessenger.getInstance().logAs(msg, "WARNING"); } this.getChildControl("resources-layout"); this.reloadResources(); - this._hideLoadingPage(); }); }, reloadResources: function(useCache = true) { - this.__loadServices(); - this.__loadHypertools(useCache); + Promise.all([ + this.__loadServices(), + this.__loadHypertools(useCache), + ]) + .finally(() => this._hideLoadingPage()); }, __loadServices: function() { const excludeFrontend = true; const excludeDeprecated = true - osparc.store.Services.getServicesLatestList(excludeFrontend, excludeDeprecated) + return osparc.store.Services.getServicesLatestList(excludeFrontend, excludeDeprecated) .then(servicesList => { servicesList.forEach(service => service["resourceType"] = "service"); this._resourcesList.push(...servicesList.filter(service => service !== null)); @@ -84,7 +87,7 @@ qx.Class.define("osparc.dashboard.AppBrowser", { }, __loadHypertools: function(useCache = true) { - osparc.store.Templates.getHypertools(useCache) + return osparc.store.Templates.getHypertools(useCache) .then(hypertoolsList => { hypertoolsList.forEach(hypertool => hypertool["resourceType"] = "hypertool"); this._resourcesList.push(...hypertoolsList.filter(hypertool => hypertool !== null)); @@ -185,7 +188,7 @@ qx.Class.define("osparc.dashboard.AppBrowser", { }, __addNewServiceButtons: function() { - const platformName = osparc.store.StaticInfo.getInstance().getPlatformName(); + const platformName = osparc.store.StaticInfo.getPlatformName(); const hasRights = osparc.data.Permissions.getInstance().canDo("studies.template.create.productAll"); if (platformName === "dev") { const testDataButton = new qx.ui.form.Button(this.tr("Test with data"), "@FontAwesome5Solid/plus-circle/14"); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js b/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js index 8d65da2689b5..910ddca5cf24 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js @@ -41,6 +41,7 @@ qx.Class.define("osparc.dashboard.CardBase", { "updateStudy": "qx.event.type.Data", "updateTemplate": "qx.event.type.Data", "updateTutorial": "qx.event.type.Data", + "updateFunction": "qx.event.type.Data", "updateService": "qx.event.type.Data", "updateHypertool": "qx.event.type.Data", "publishTemplate": "qx.event.type.Data", @@ -51,6 +52,7 @@ qx.Class.define("osparc.dashboard.CardBase", { statics: { SHARE_ICON: "@FontAwesome5Solid/share-alt/13", SHARED_USER: "@FontAwesome5Solid/user/13", + SHARED_SUPPORT: "@FontAwesome5Solid/question-circle/13", SHARED_ORGS: "@FontAwesome5Solid/users/13", SHARED_ALL: "@FontAwesome5Solid/globe/13", PERM_READ: "@FontAwesome5Solid/eye/13", @@ -147,10 +149,7 @@ qx.Class.define("osparc.dashboard.CardBase", { return false; } case "shared-with-everyone": { - const everyoneGroupIds = [ - groupsStore.getEveryoneProductGroup().getGroupId(), - groupsStore.getEveryoneGroup().getGroupId(), - ]; + const everyoneGroupIds = groupsStore.getEveryoneGroupIds(); const found = Object.keys(checks).some(gId => everyoneGroupIds.includes(parseInt(gId))); // show those that are shared with "1" or product everyone's groupId return !found; @@ -189,19 +188,26 @@ qx.Class.define("osparc.dashboard.CardBase", { // Icon const groupsStore = osparc.store.Groups.getInstance(); - const groupEveryone = groupsStore.getEveryoneGroup(); - const groupProductEveryone = groupsStore.getEveryoneProductGroup(); + const everyoneGroupIds = groupsStore.getEveryoneGroupIds(); + const supportGroup = groupsStore.getSupportGroup(); const organizations = groupsStore.getOrganizations(); const myGroupId = groupsStore.getMyGroupId(); const organizationIds = Object.keys(organizations).map(key => parseInt(key)); - if (gids.includes(groupEveryone.getGroupId()) || gids.includes(groupProductEveryone.getGroupId())) { + if (gids.some(gid => everyoneGroupIds.includes(gid))) { + // shared with "1" or product everyone shareIcon.setSource(osparc.dashboard.CardBase.SHARED_ALL); + } else if (supportGroup && gids.includes(supportGroup.getGroupId())) { + // shared with support group, show as if it was a group + shareIcon.setSource(osparc.dashboard.CardBase.SHARED_ORGS); } else if (organizationIds.filter(value => gids.includes(value)).length) { // find intersection + // shared with at least one organization shareIcon.setSource(osparc.dashboard.CardBase.SHARED_ORGS); } else if (gids.length === 1 && gids[0] === myGroupId) { + // not shared shareIcon.setSource(osparc.dashboard.CardBase.SHARE_ICON); } else { + // shared with some users shareIcon.setSource(osparc.dashboard.CardBase.SHARED_USER); } @@ -219,16 +225,25 @@ qx.Class.define("osparc.dashboard.CardBase", { this.addHintFromGids(shareIcon, gids); }, + populateMyAccessRightsIcon: function(shareIcon, myAccessRights) { + const canIWrite = Boolean(myAccessRights["write"]); + shareIcon.set({ + source: canIWrite ? osparc.dashboard.CardBase.SHARE_ICON : osparc.dashboard.CardBase.SHARED_USER, + toolTipText: canIWrite ? "" : qx.locale.Manager.tr("Shared"), + }); + }, + addHintFromGids: function(icon, gids) { const groupsStore = osparc.store.Groups.getInstance(); - const groupEveryone = groupsStore.getEveryoneGroup(); - const groupProductEveryone = groupsStore.getEveryoneProductGroup(); + const everyoneGroups = groupsStore.getEveryoneGroups(); + const supportGroup = groupsStore.getSupportGroup(); const organizations = groupsStore.getOrganizations(); const myGroupId = groupsStore.getMyGroupId(); - const groups = []; - groups.push(groupEveryone); - groups.push(groupProductEveryone); + const groups = everyoneGroups.slice(); + if (supportGroup) { + groups.push(supportGroup); + } groups.push(...Object.values(organizations)); const sharedGrps = []; groups.forEach(group => { @@ -265,10 +280,14 @@ qx.Class.define("osparc.dashboard.CardBase", { sharedGrpLabels.push("..."); break; } - let sharedGrpLabel = sharedGrps[i].getLabel(); - if ([groupEveryone, groupProductEveryone].includes(sharedGrps[i])) { + const sharedGroup = sharedGrps[i]; + let sharedGrpLabel = sharedGroup.getLabel(); + if (everyoneGroups.includes(sharedGroup)) { sharedGrpLabel = "Public"; } + if (supportGroup && supportGroup.getGroupId() === sharedGroup.getGroupId()) { + sharedGrpLabel = supportGroup.getLabel(); + } if (!sharedGrpLabels.includes(sharedGrpLabel)) { sharedGrpLabels.push(sharedGrpLabel); } @@ -326,6 +345,7 @@ qx.Class.define("osparc.dashboard.CardBase", { check: [ "study", "template", + "function", "tutorial", "hypertool", "service", @@ -405,7 +425,7 @@ qx.Class.define("osparc.dashboard.CardBase", { services: { check: "Array", init: true, - nullable: false, + nullable: true, apply: "__applyServices", event: "changeServices", }, @@ -508,12 +528,13 @@ qx.Class.define("osparc.dashboard.CardBase", { const studyBrowserContext = osparc.store.Store.getInstance().getStudyBrowserContext(); return ( this.getBlocked() === true || // It could be blocked by IN_USE or UNKNOWN_SERVICE - (this.isResourceType("study") && (studyBrowserContext === "trash")) // It could a trashed study + (this.isResourceType("study") && (studyBrowserContext === osparc.dashboard.StudyBrowser.CONTEXT.TRASH)) // It could a trashed study ); }, __applyResourceData: function(resourceData) { let uuid = null; + let title = ""; let owner = null; let workbench = null; let defaultHits = null; @@ -524,11 +545,19 @@ qx.Class.define("osparc.dashboard.CardBase", { case "tutorial": case "hypertool": uuid = resourceData.uuid ? resourceData.uuid : null; + title = resourceData.name, owner = resourceData.prjOwner ? resourceData.prjOwner : ""; workbench = resourceData.workbench ? resourceData.workbench : {}; break; + case "function": + uuid = resourceData.uuid ? resourceData.uuid : null; + title = resourceData.title, + owner = ""; + workbench = resourceData.workbench ? resourceData.workbench : {}; + break; case "service": uuid = resourceData.key ? resourceData.key : null; + title = resourceData.name, owner = resourceData.owner ? resourceData.owner : resourceData.contact; icon = resourceData["icon"] || osparc.dashboard.CardBase.PRODUCT_ICON; defaultHits = 0; @@ -538,7 +567,7 @@ qx.Class.define("osparc.dashboard.CardBase", { this.set({ resourceType: resourceData.resourceType, uuid, - title: resourceData.name, + title, description: resourceData.description, owner, accessRights: resourceData.accessRights ? resourceData.accessRights : {}, @@ -555,22 +584,35 @@ qx.Class.define("osparc.dashboard.CardBase", { workbench }); - if ([ - "study", - "template", - "tutorial", - "hypertool" - ].includes(resourceData["resourceType"])) { - osparc.store.Services.getStudyServices(resourceData.uuid) - .then(resp => { - const services = resp["services"]; - resourceData["services"] = services; - this.setServices(services); - }) - .catch(err => console.error(err)); - - osparc.study.Utils.guessIcon(resourceData) - .then(iconSource => this.setIcon(iconSource)); + switch (resourceData["resourceType"]) { + case "study": + case "template": + case "tutorial": + case "hypertool": { + osparc.store.Services.getStudyServices(resourceData.uuid) + .then(resp => { + const services = resp["services"]; + resourceData["services"] = services; + this.setServices(services); + }) + .catch(err => { + resourceData["services"] = null; + this.setServices(null); + console.error(err); + }); + + osparc.study.Utils.guessIcon(resourceData) + .then(iconSource => this.setIcon(iconSource)); + + break; + } + case "function": + if (resourceData["functionClass"] === osparc.data.model.Function.FUNCTION_CLASS.PROJECT) { + this.setIcon(osparc.data.model.StudyUI.PIPELINE_ICON); + } else { + this.setIcon(osparc.dashboard.CardBase.PRODUCT_ICON); + } + break; } }, @@ -688,27 +730,38 @@ qx.Class.define("osparc.dashboard.CardBase", { }, __applyServices: function(services) { - this.setEmptyWorkbench(services.length === 0); - - // Updatable study - if (osparc.study.Utils.anyServiceRetired(services)) { - this.setUpdatable("retired"); - } else if (osparc.study.Utils.anyServiceDeprecated(services)) { - this.setUpdatable("deprecated"); - } else if (osparc.study.Utils.anyServiceUpdatable(services)) { - this.setUpdatable("updatable"); - } - - // Block card - const cantReadServices = osparc.study.Utils.getCantExecuteServices(services); - if (cantReadServices.length) { + const unknownServices = cantReadServices => { + // Block card this.setBlocked("UNKNOWN_SERVICES"); const image = "@FontAwesome5Solid/ban/"; - let toolTipText = this.tr("Inaccessible service(s):"); - cantReadServices.forEach(unSrv => { - toolTipText += "
" + unSrv.key + ":" + osparc.service.Utils.extractVersionDisplay(unSrv.release); - }); + let toolTipText = this.tr("Unknown service(s)"); + if (cantReadServices && cantReadServices.length) { + toolTipText = this.tr("Inaccessible service(s)"); + cantReadServices.forEach(unSrv => { + toolTipText += "
" + unSrv.key + ":" + osparc.service.Utils.extractVersionDisplay(unSrv.release); + }); + } this.__showBlockedCard(image, toolTipText); + }; + + if (services) { + this.setEmptyWorkbench(services.length === 0); + + // Updatable study + if (osparc.study.Utils.anyServiceRetired(services)) { + this.setUpdatable("retired"); + } else if (osparc.study.Utils.anyServiceDeprecated(services)) { + this.setUpdatable("deprecated"); + } else if (osparc.study.Utils.anyServiceUpdatable(services)) { + this.setUpdatable("updatable"); + } + + const cantReadServices = osparc.study.Utils.getCantReadServices(services); + if (cantReadServices.length) { + unknownServices(cantReadServices); + } + } else { + unknownServices(); } this.evaluateMenuButtons(); @@ -753,35 +806,38 @@ qx.Class.define("osparc.dashboard.CardBase", { }, __applyState: function(state) { - let lockInUse = false; - if ("locked" in state && "value" in state["locked"]) { - lockInUse = state["locked"]["value"]; - } - this.setBlocked(lockInUse ? "IN_USE" : false); - if (lockInUse) { - this.__showBlockedCardFromStatus("IN_USE", state["locked"]); + const projectLocked = osparc.study.Utils.state.isProjectLocked(state); + const currentUserGroupIds = osparc.study.Utils.state.getCurrentGroupIds(state); + const pipelineState = osparc.study.Utils.state.getPipelineState(state); + + this.__showCurrentUserGroupIds(currentUserGroupIds); + + this.setBlocked(projectLocked ? "IN_USE" : false); + if (projectLocked) { + this.__showBlockedCardFromStatus("IN_USE", state); } - const pipelineState = ("state" in state) ? state["state"]["value"] : undefined; if (pipelineState) { - this.__applyPipelineState(state["state"]["value"]); + this.__applyPipelineState(pipelineState); } }, __applyDebt: function(debt) { this.setBlocked(debt ? "IN_DEBT" : false); if (debt) { - this.__showBlockedCardFromStatus("IN_DEBT", debt); + this.__showBlockedCardFromStatus("IN_DEBT"); } }, - // pipelineState: ["NOT_STARTED", "STARTED", "SUCCESS", "ABORTED", "FAILED", "UNKNOWN"] + // pipelineState: ["NOT_STARTED", "PUBLISHED", "STOPPING", "STARTED", "SUCCESS", "ABORTED", "FAILED", "UNKNOWN"] __applyPipelineState: function(pipelineState) { let iconSource; let toolTipText; let borderColor; switch (pipelineState) { + case "PUBLISHED": case "STARTED": + case "STOPPING": iconSource = "@FontAwesome5Solid/spinner/10"; toolTipText = this.tr("Running"); borderColor = "info"; @@ -837,48 +893,70 @@ qx.Class.define("osparc.dashboard.CardBase", { }); }, - __showBlockedCardFromStatus: function(reason, moreInfo) { + __showCurrentUserGroupIds: function(currentUserGroupIds) { + const avatarGroup = this.getChildControl("avatar-group"); + avatarGroup.setUserGroupIds(currentUserGroupIds); + }, + + __showBlockedCardFromStatus: function(reason, state) { switch (reason) { case "IN_USE": - this.__blockedInUse(moreInfo); + this.__blockedInUse(state); break; case "IN_DEBT": - this.__blockedInDebt(moreInfo); + this.__blockedInDebt(); break; } }, - __blockedInUse: function(lockedStatus) { - const status = lockedStatus["status"]; - const owner = lockedStatus["owner"]; - let toolTip = osparc.utils.Utils.firstsUp(owner["first_name"] || this.tr("A user"), owner["last_name"] || ""); // it will be replaced by "userName" + __blockedInUse: function(state) { + const projectStatus = osparc.study.Utils.state.getProjectStatus(state); + const currentUserGroupIds = osparc.study.Utils.state.getCurrentGroupIds(state); + const usersStore = osparc.store.Users.getInstance(); + const userPromises = currentUserGroupIds.map(userGroupId => usersStore.getUser(userGroupId)); + const userNames = []; + let toolTip = ""; let image = null; - switch (status) { - case "CLOSING": - image = "@FontAwesome5Solid/key/"; - toolTip += this.tr(" is closing it..."); - break; - case "CLONING": - image = "@FontAwesome5Solid/clone/"; - toolTip += this.tr(" is cloning it..."); - break; - case "EXPORTING": - image = osparc.task.Export.ICON+"/"; - toolTip += this.tr(" is exporting it..."); - break; - case "OPENING": - image = "@FontAwesome5Solid/key/"; - toolTip += this.tr(" is opening it..."); - break; - case "OPENED": - image = "@FontAwesome5Solid/lock/"; - toolTip += this.tr(" is using it."); - break; - default: - image = "@FontAwesome5Solid/lock/"; - break; - } - this.__showBlockedCard(image, toolTip); + Promise.all(userPromises) + .then(usersResult => { + usersResult.forEach(user => { + userNames.push(user.getUserName()); + }); + }) + .catch(error => { + console.error("Failed to fetch user data for avatars:", error); + }) + .finally(() => { + switch (projectStatus) { + case "CLOSING": + image = "@FontAwesome5Solid/key/"; + toolTip += this.tr("Closing..."); + break; + case "CLONING": + image = "@FontAwesome5Solid/clone/"; + toolTip += this.tr("Cloning..."); + break; + case "EXPORTING": + image = osparc.task.Export.ICON+"/"; + toolTip += this.tr("Exporting..."); + break; + case "OPENING": + image = "@FontAwesome5Solid/key/"; + toolTip += this.tr("Opening..."); + break; + case "OPENED": + image = "@FontAwesome5Solid/lock/"; + toolTip += this.tr("In use..."); + break; + default: + image = "@FontAwesome5Solid/lock/"; + break; + } + userNames.forEach(userName => { + toolTip += "
" + userName; + }); + this.__showBlockedCard(image, toolTip); + }); }, __blockedInDebt: function() { @@ -985,8 +1063,17 @@ qx.Class.define("osparc.dashboard.CardBase", { throw new Error("Abstract method called!"); }, - _applyMenu: function(value, old) { - throw new Error("Abstract method called!"); + _applyMenu: function(menu) { + const menuButton = this.getChildControl("menu-button"); + if (menu) { + menuButton.setMenu(menu).set({ + appearance: "menu-wider", + position: "bottom-left", + }); + osparc.utils.Utils.setIdToWidget(menu, "studyItemMenuMenu"); + menu.addListener("appear", () => this.evaluateMenuButtons()); + } + menuButton.setVisibility(menu ? "visible" : "excluded"); }, _setStudyPermissions: function(accessRights) { @@ -1000,28 +1087,32 @@ qx.Class.define("osparc.dashboard.CardBase", { __openResourceDetails: function(openWindowCB) { const resourceData = this.getResourceData(); - const resourceDetails = new osparc.dashboard.ResourceDetails(resourceData); + const { + resourceDetails, + window, + } = osparc.dashboard.ResourceDetails.popUpInWindow(resourceData); + resourceDetails.addListenerOnce("pagesAdded", () => { if (openWindowCB in resourceDetails) { resourceDetails[openWindowCB](); } - }) - const win = osparc.dashboard.ResourceDetails.popUpInWindow(resourceDetails); + }); [ "updateStudy", "updateTemplate", "updateTutorial", + "updateFunction", "updateService", "updateHypertool", ].forEach(ev => { resourceDetails.addListener(ev, e => this.fireDataEvent(ev, e.getData())); }); resourceDetails.addListener("publishTemplate", e => { - win.close(); + window.close(); this.fireDataEvent("publishTemplate", e.getData()); }); resourceDetails.addListener("openStudy", e => { - const openCB = () => win.close(); + const openCB = () => window.close(); const studyId = e.getData()["uuid"]; const isStudyCreation = false; this._startStudyById(studyId, openCB, null, isStudyCreation); @@ -1035,7 +1126,7 @@ qx.Class.define("osparc.dashboard.CardBase", { openData: function() { const resourceData = this.getResourceData(); - osparc.widget.StudyDataManager.popUpInWindow(resourceData["uuid"]); + osparc.widget.StudyDataManager.popUpInWindow(resourceData); }, openBilling: function() { diff --git a/services/static-webserver/client/source/class/osparc/dashboard/CardContainer.js b/services/static-webserver/client/source/class/osparc/dashboard/CardContainer.js index 047b047e8f7a..281b16bfb4bc 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/CardContainer.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/CardContainer.js @@ -105,7 +105,11 @@ qx.Class.define("osparc.dashboard.CardContainer", { }, areMoreResourcesRequired: function(loadingResourcesBtn) { - if (this.nextRequest !== null && loadingResourcesBtn && osparc.utils.Utils.isWidgetOnScreen(loadingResourcesBtn)) { + if ( + this.nextRequest !== null && + loadingResourcesBtn && + osparc.utils.Utils.isWidgetOnScreen(loadingResourcesBtn) + ) { return true; } return false; diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ContextBreadcrumbs.js b/services/static-webserver/client/source/class/osparc/dashboard/ContextBreadcrumbs.js index 4358bc68848b..60155dedd0ae 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ContextBreadcrumbs.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ContextBreadcrumbs.js @@ -55,18 +55,16 @@ qx.Class.define("osparc.dashboard.ContextBreadcrumbs", { this._removeAll(); const currentContext = osparc.store.Store.getInstance().getStudyBrowserContext(); - if (currentContext !== "studiesAndFolders") { - return; - } - - if (this.getCurrentFolderId()) { - const currentFolder = osparc.store.Folders.getInstance().getFolder(this.getCurrentFolderId()); - this.__createUpstreamButtons(currentFolder); - } + if (currentContext === osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS) { + if (this.getCurrentFolderId()) { + const currentFolder = osparc.store.Folders.getInstance().getFolder(this.getCurrentFolderId()); + this.__createUpstreamButtons(currentFolder); + } - const currentFolderButton = this.__createCurrentFolderButton(); - if (currentFolderButton) { - this._add(currentFolderButton); + const currentFolderButton = this.__createCurrentFolderButton(); + if (currentFolderButton) { + this._add(currentFolderButton); + } } }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/Dashboard.js b/services/static-webserver/client/source/class/osparc/dashboard/Dashboard.js index cb83065b35fd..5e54b68c3844 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/Dashboard.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/Dashboard.js @@ -54,16 +54,12 @@ qx.Class.define("osparc.dashboard.Dashboard", { osparc.wrapper.JsonTreeViewer.getInstance().init(); osparc.wrapper.JsonFormatter.getInstance().init(); osparc.wrapper.DOMPurify.getInstance().init(); - osparc.wrapper.RadialMenu.getInstance().init() - .then(loaded => { - if (loaded) { - // hack to trigger fonts loading - const menu = osparc.wrapper.RadialMenu.getInstance().createMenu(); - menu.show(); - menu.hide(); - } - }); + osparc.wrapper.RadialMenu.getInstance().init(); + this.__createMainViewLayout(); + + + qx.event.message.Bus.getInstance().subscribe("showTab", msg => this.showTab(msg.getData()), this); }, properties: { @@ -99,6 +95,13 @@ qx.Class.define("osparc.dashboard.Dashboard", { return this.__appBrowser; }, + showTab: function(tabId) { + const tabFound = this.getSelectables().find(s => s.id === tabId); + if (tabFound) { + this.setSelection([tabFound]); + } + }, + __createMainViewLayout: function() { const permissions = osparc.data.Permissions.getInstance(); const tabIconSize = 20; @@ -209,7 +212,7 @@ qx.Class.define("osparc.dashboard.Dashboard", { selectedTab.resourceBrowser.initResources(); } else { const initTab = () => { - selectedTab.resourceBrowser.initResources() + selectedTab.resourceBrowser.initResources(); this.removeListener("preResourcesLoaded", initTab); }; this.addListener("preResourcesLoaded", initTab, this); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/DataBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/DataBrowser.js index f109994119c2..024cca2100a1 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/DataBrowser.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/DataBrowser.js @@ -43,7 +43,9 @@ qx.Class.define("osparc.dashboard.DataBrowser", { let control; switch (id) { case "tree-folder-view": - control = new osparc.file.TreeFolderView(); + control = new osparc.file.TreeFolderView().set({ + paddingBottom: 15, + }); this._addToLayout(control, { flex: 1 }); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonItem.js b/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonItem.js index dfd0b2453a3c..48fb0effa961 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonItem.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonItem.js @@ -276,19 +276,19 @@ qx.Class.define("osparc.dashboard.FolderButtonItem", { const studyBrowserContext = osparc.store.Store.getInstance().getStudyBrowserContext(); if ( - studyBrowserContext === "search" || - studyBrowserContext === "studiesAndFolders" + studyBrowserContext === osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS || + studyBrowserContext === osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PROJECTS ) { const editButton = new qx.ui.menu.Button(this.tr("Rename..."), "@FontAwesome5Solid/pencil-alt/12"); editButton.addListener("execute", () => this.__editFolder(), this); menu.add(editButton); - if (studyBrowserContext === "search") { + if (studyBrowserContext === osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PROJECTS) { const openLocationButton = new qx.ui.menu.Button(this.tr("Open location"), "@FontAwesome5Solid/external-link-alt/12"); openLocationButton.addListener("execute", () => { const folder = this.getFolder(); this.fireDataEvent("changeContext", { - context: "studiesAndFolders", + context: osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS, workspaceId: folder.getWorkspaceId(), folderId: folder.getParentFolderId(), }); @@ -306,7 +306,7 @@ qx.Class.define("osparc.dashboard.FolderButtonItem", { const trashButton = new qx.ui.menu.Button(this.tr("Delete"), "@FontAwesome5Solid/trash/12"); trashButton.addListener("execute", () => this.fireDataEvent("trashFolderRequested", this.getFolderId()), this); menu.add(trashButton); - } else if (studyBrowserContext === "trash") { + } else if (studyBrowserContext === osparc.dashboard.StudyBrowser.CONTEXT.TRASH) { const restoreButton = new qx.ui.menu.Button(this.tr("Restore"), "@MaterialIcons/restore_from_trash/16"); restoreButton.addListener("execute", () => this.fireDataEvent("untrashFolderRequested", this.getFolder()), this); menu.add(restoreButton); @@ -325,7 +325,7 @@ qx.Class.define("osparc.dashboard.FolderButtonItem", { __itemSelected: function() { const studyBrowserContext = osparc.store.Store.getInstance().getStudyBrowserContext(); // do not allow selecting workspace - if (studyBrowserContext !== "trash") { + if (studyBrowserContext !== osparc.dashboard.StudyBrowser.CONTEXT.TRASH) { this.fireDataEvent("folderSelected", this.getFolderId()); } }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonBase.js b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonBase.js index 34796bcec189..1a97ff977761 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonBase.js @@ -43,35 +43,41 @@ qx.Class.define("osparc.dashboard.GridButtonBase", { statics: { ITEM_WIDTH: 190, ITEM_HEIGHT: 220, - PADDING: 10, - TITLE_PADDING: 6, - SPACING_IN: 5, + PADDING: 6, SPACING: 15, THUMBNAIL_SIZE: 50, POS: { TITLE: { row: 0, column: 0, - rowSpan: 1, - colSpan: 4 }, - THUMBNAIL: { - row: 2, + BODY: { + row: 1, column: 0, - rowSpan: 1, - colSpan: 4 - }, - TAGS: { - row: 3, - column: 0 }, FOOTER: { - row: 4, + row: 2, column: 0, - rowSpan: 1, - colSpan: 4 } }, + HPOS: { + ICON: { + column: 0, + row: 0, + }, + TITLE: { + column: 1, + row: 0, + }, + SUBTITLE_ICON: { + column: 0, + row: 1, + }, + SUBTITLE_TEXT: { + column: 1, + row: 1, + }, + }, FPOS: { MODIFIED: { row: 0, @@ -97,7 +103,7 @@ qx.Class.define("osparc.dashboard.GridButtonBase", { row: 1, column: 2, } - } + }, }, events: { @@ -113,20 +119,21 @@ qx.Class.define("osparc.dashboard.GridButtonBase", { switch (id) { case "main-layout": { const grid = new qx.ui.layout.Grid(); - grid.setSpacing(this.self().SPACING_IN); - grid.setRowFlex(2, 1); + grid.setRowFlex(this.self().POS.BODY.row, 1); grid.setColumnFlex(0, 1); control = new qx.ui.container.Composite().set({ + minWidth: this.self().ITEM_WIDTH, + minHeight: this.self().ITEM_HEIGHT, maxWidth: this.self().ITEM_WIDTH, - maxHeight: this.self().ITEM_HEIGHT + maxHeight: this.self().ITEM_HEIGHT, }); control.setLayout(grid); const header = this.getChildControl("header"); const body = this.getChildControl("body"); const footer = this.getChildControl("footer"); control.add(header, this.self().POS.TITLE); - control.add(body, this.self().POS.THUMBNAIL); + control.add(body, this.self().POS.BODY); control.add(footer, this.self().POS.FOOTER); this._add(control, { top: 0, @@ -140,8 +147,8 @@ qx.Class.define("osparc.dashboard.GridButtonBase", { const hGrid = new qx.ui.layout.Grid().set({ spacing: 6, }); - hGrid.setRowFlex(0, 1); - hGrid.setColumnFlex(1, 1); + hGrid.setRowFlex(this.self().HPOS.TITLE.row, 1); + hGrid.setColumnFlex(this.self().HPOS.TITLE.column, 1); hGrid.setColumnAlign(0, "right", "middle"); hGrid.setColumnAlign(1, "left", "middle"); hGrid.setColumnAlign(2, "center", "middle"); @@ -157,24 +164,17 @@ qx.Class.define("osparc.dashboard.GridButtonBase", { break; } case "body": - control = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)).set({ - decorator: "main", - allowGrowY: true, - allowGrowX: true, - allowShrinkX: true, - padding: this.self().PADDING - }); - control.getContentElement().setStyles({ - "border-width": 0 + control = new qx.ui.container.Composite(new qx.ui.layout.VBox(6)).set({ + padding: this.self().PADDING, }); break; case "footer": { const fGrid = new qx.ui.layout.Grid(); fGrid.setSpacing(2); - fGrid.setColumnFlex(0, 1); + fGrid.setColumnFlex(this.self().FPOS.MODIFIED.row, 1); control = new qx.ui.container.Composite().set({ backgroundColor: "background-card-overlay", - padding: this.self().PADDING - 2, + padding: this.self().PADDING, maxWidth: this.self().ITEM_WIDTH, maxHeight: this.self().ITEM_HEIGHT }); @@ -184,10 +184,7 @@ qx.Class.define("osparc.dashboard.GridButtonBase", { case "icon": { control = osparc.dashboard.CardBase.createCardIcon(); layout = this.getChildControl("header"); - layout.add(control, { - column: 0, - row: 0, - }); + layout.add(control, this.self().HPOS.ICON); break; } case "title": @@ -196,10 +193,7 @@ qx.Class.define("osparc.dashboard.GridButtonBase", { font: "text-14", }); layout = this.getChildControl("header"); - layout.add(control, { - column: 1, - row: 0, - }); + layout.add(control, this.self().HPOS.TITLE); break; case "subtitle-icon": { control = new qx.ui.basic.Image().set({ @@ -207,10 +201,7 @@ qx.Class.define("osparc.dashboard.GridButtonBase", { allowShrinkX: false, }); layout = this.getChildControl("header"); - layout.add(control, { - column: 0, - row: 1, - }); + layout.add(control, this.self().HPOS.SUBTITLE_ICON); break; } case "subtitle-text": { @@ -225,10 +216,7 @@ qx.Class.define("osparc.dashboard.GridButtonBase", { allowGrowY: false }); layout = this.getChildControl("header"); - layout.add(control, { - column: 1, - row: 1, - }); + layout.add(control, this.self().HPOS.SUBTITLE_TEXT); break; } case "thumbnail": { @@ -322,32 +310,6 @@ qx.Class.define("osparc.dashboard.GridButtonBase", { return; }, - __fitThumbnailHeight: function() { - const thumbnailLayout = this.getChildControl("thumbnail"); - let maxHeight = this.getHeight() - this.getPaddingTop() - this.getPaddingBottom() - 5; - const checkThis = [ - "title", - "body", - "footer", - "tags" - ]; - const layout = this.getChildControl("main-layout"); - // eslint-disable-next-line no-underscore-dangle - layout._getChildren().forEach(child => { - if (checkThis.includes(child.getSubcontrolId()) && child.getBounds()) { - maxHeight -= (child.getBounds().height + this.self().SPACING_IN); - if (child.getSubcontrolId() === "tags") { - maxHeight -= 8; - } - } - }); - // maxHeight -= 4; // for Roboto - maxHeight -= 18; // for Manrope - thumbnailLayout.getChildControl("image").setMaxHeight(maxHeight); - thumbnailLayout.setMaxHeight(maxHeight); - thumbnailLayout.recheckSize(); - }, - /** * Event handler for the pointer over event. */ diff --git a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonItem.js b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonItem.js index 858328268c08..2823929df4d2 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonItem.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonItem.js @@ -34,7 +34,12 @@ qx.Class.define("osparc.dashboard.GridButtonItem", { }, statics: { - MENU_BTN_DIMENSIONS: 24 + MENU_BTN_DIMENSIONS: 24, + + BODY_POS: { + AVATAR_GROUP: 0, + TAGS: 1, + }, }, members: { @@ -81,16 +86,25 @@ qx.Class.define("osparc.dashboard.GridButtonItem", { layout = this.getChildControl("footer"); layout.add(control, osparc.dashboard.GridButtonBase.FPOS.HITS); break; - case "tags": - control = new qx.ui.container.Composite(new qx.ui.layout.Flow(5, 3)).set({ + case "avatar-group": { + const maxWidth = osparc.dashboard.GridButtonBase.ITEM_WIDTH - osparc.dashboard.GridButtonBase.PADDING * 2; + control = new osparc.ui.basic.AvatarGroup(24, "left", maxWidth); + this.getChildControl("body").addAt(control, this.self().BODY_POS.AVATAR_GROUP); + break; + } + case "tags": { + const wrapper = new qx.ui.container.Composite(new qx.ui.layout.VBox()); + // Add spacer to push tags to bottom + wrapper.add(new qx.ui.core.Spacer(), {flex: 1}); + control = new qx.ui.container.Composite(new qx.ui.layout.Flow(4, 4)).set({ anonymous: true, - paddingLeft: osparc.dashboard.GridButtonBase.PADDING, - paddingRight: osparc.dashboard.GridButtonBase.PADDING, - paddingBottom: osparc.dashboard.GridButtonBase.PADDING / 2 }); - layout = this.getChildControl("main-layout"); - layout.add(control, osparc.dashboard.GridButtonBase.POS.TAGS); + wrapper.add(control); + this.getChildControl("body").addAt(wrapper, this.self().BODY_POS.TAGS, { + flex: 1, + }); break; + } case "menu-selection-stack": control = new qx.ui.container.Stack(); control.set({ @@ -190,6 +204,7 @@ qx.Class.define("osparc.dashboard.GridButtonItem", { "template", "tutorial", "hypertool", + "function", ].includes(this.getResourceType())) { const dateBy = this.getChildControl("date-by"); dateBy.set({ @@ -234,9 +249,15 @@ qx.Class.define("osparc.dashboard.GridButtonItem", { // overridden _applyOwner: function(value, old) { const label = this.getChildControl("subtitle-text"); - const user = this.__createOwner(value); - label.setValue(user); - label.setVisibility(value ? "visible" : "excluded"); + if (osparc.utils.Resources.isFunction(this.getResourceData())) { + // Functions don't have 'owner' + const canIWrite = osparc.data.model.Function.canIWrite(this.getResourceData()["accessRights"]); + label.setValue(canIWrite ? "My Function" : "Read Only"); + } else { + const user = this.__createOwner(value); + label.setValue(user); + label.setVisibility(value ? "visible" : "excluded"); + } }, _applyAccessRights: function(value) { @@ -261,7 +282,7 @@ qx.Class.define("osparc.dashboard.GridButtonItem", { const tagsContainer = this.getChildControl("tags"); tagsContainer.setVisibility(tags.length ? "visible" : "excluded"); tagsContainer.removeAll(); - for (let i=0; i<=tags.length && i this.__modeChanged(contentContainer)); + [ + "changeSelection", + "changeVisibility" + ].forEach(signalName => { + contentContainer.addListener(signalName, e => this.fireDataEvent(signalName, e.getData()), this); + }); + showAllBtn.show(); } else { const spacing = osparc.dashboard.GridButtonBase.SPACING; @@ -136,7 +158,8 @@ qx.Class.define("osparc.dashboard.GroupedCardContainer", { }); } contentContainer.set({ - padding: 5, + paddingTop: 5, + paddingBottom: 5, allowGrowX: false }); this._addAt(contentContainer, 1, { @@ -174,6 +197,10 @@ qx.Class.define("osparc.dashboard.GroupedCardContainer", { return this.__contentContainer; }, + getExpandButton: function() { + return this.__expandButton; + }, + // overridden add: function(child, idx) { if (osparc.dashboard.CardContainer.isValidCard(child)) { diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonBase.js b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonBase.js index 8e7bdd144883..e8fa47dc9bf6 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonBase.js @@ -50,14 +50,15 @@ qx.Class.define("osparc.dashboard.ListButtonBase", { TITLE: 2, SPACER: 3, PROGRESS: 4, - TAGS: 5, - ICONS_LAYOUT: 6, - OWNER: 7, - SHARED: 8, - LAST_CHANGE: 9, - TSR: 10, - HITS: 11, - OPTIONS: 12 + AVATAR_GROUP: 5, + TAGS: 6, + ICONS_LAYOUT: 7, + OWNER: 8, + SHARED: 9, + LAST_CHANGE: 10, + TSR: 11, + HITS: 12, + OPTIONS: 13, } }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonItem.js b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonItem.js index 23cc75e57145..9c13fb21f357 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonItem.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonItem.js @@ -45,6 +45,15 @@ qx.Class.define("osparc.dashboard.ListButtonItem", { column: osparc.dashboard.ListButtonBase.POS.LOCK_STATUS }); break; + case "avatar-group": + control = new osparc.ui.basic.AvatarGroup(24, "right", 100).set({ + paddingTop: 4, // to align it in the middle + }); + this._add(control, { + row: 0, + column: osparc.dashboard.ListButtonBase.POS.AVATAR_GROUP + }); + break; case "tags": control = new qx.ui.container.Composite(new qx.ui.layout.HBox(3).set({ alignY: "middle" @@ -234,10 +243,16 @@ qx.Class.define("osparc.dashboard.ListButtonItem", { _applyOwner: function(value, old) { const label = this.getChildControl("owner"); - const user = this.__createOwner(value); - label.setValue(user); + if (osparc.utils.Resources.isFunction(this.getResourceData())) { + // Functions don't have 'owner' + const canIWrite = osparc.data.model.Function.canIWrite(this.getResourceData()["accessRights"]); + label.setValue(canIWrite ? "My Function" : "Read Only"); + } else { + const user = this.__createOwner(value); + label.setValue(user); + } + this.__makeItemResponsive(label); - return; }, _applyAccessRights: function(value) { @@ -295,18 +310,5 @@ qx.Class.define("osparc.dashboard.ListButtonItem", { }); }); }, - - _applyMenu: function(menu, old) { - const menuButton = this.getChildControl("menu-button"); - if (menu) { - menuButton.setMenu(menu).set({ - appearance: "menu-wider", - position: "bottom-left", - }); - osparc.utils.Utils.setIdToWidget(menu, "studyItemMenuMenu"); - this.evaluateMenuButtons(); - } - menuButton.setVisibility(menu ? "visible" : "excluded"); - } } }); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonNew.js b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonNew.js index 7ae28a96cf4a..4fef6ef947ab 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonNew.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonNew.js @@ -37,7 +37,7 @@ qx.Class.define("osparc.dashboard.ListButtonNew", { title = osparc.utils.Utils.replaceTokens( title, "replace_me_product_name", - osparc.store.StaticInfo.getInstance().getDisplayName() + osparc.store.StaticInfo.getDisplayName() ); const titleLabel = this.getChildControl("title"); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/NewStudies.js b/services/static-webserver/client/source/class/osparc/dashboard/NewPlans.js similarity index 92% rename from services/static-webserver/client/source/class/osparc/dashboard/NewStudies.js rename to services/static-webserver/client/source/class/osparc/dashboard/NewPlans.js index 9b828f9bb876..053da289379e 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/NewStudies.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/NewPlans.js @@ -15,7 +15,7 @@ ************************************************************************ */ -qx.Class.define("osparc.dashboard.NewStudies", { +qx.Class.define("osparc.dashboard.NewPlans", { extend: qx.ui.core.Widget, construct: function(newStudiesData) { @@ -38,7 +38,8 @@ qx.Class.define("osparc.dashboard.NewStudies", { osparc.store.Templates.getHypertools() .then(hypertools => { - this.__newStudies = newButtonsInfo.filter(newButtonInfo => { + // TIP and TIP lite templates are template_type: "hypertool" + this.__newPlans = newButtonsInfo.filter(newButtonInfo => { if (newButtonInfo.showDisabled) { return true; } @@ -59,7 +60,7 @@ qx.Class.define("osparc.dashboard.NewStudies", { }, events: { - "newStudyClicked": "qx.event.type.Data", + "newPlanClicked": "qx.event.type.Data", }, statics: { @@ -67,7 +68,7 @@ qx.Class.define("osparc.dashboard.NewStudies", { }, members: { - __newStudies: null, + __newPlans: null, __groups: null, __flatList: null, __groupedContainers: null, @@ -103,7 +104,7 @@ qx.Class.define("osparc.dashboard.NewStudies", { } const newCards = []; - this.__newStudies.forEach(resourceData => { + this.__newPlans.forEach(resourceData => { const cards = this.__resourceToCards(resourceData); cards.forEach(newCard => { if (resourceData.showDisabled) { @@ -112,7 +113,7 @@ qx.Class.define("osparc.dashboard.NewStudies", { const reason = osparc.utils.Utils.replaceTokens( resourceData.reason, "replace_me_product_name", - osparc.store.StaticInfo.getInstance().getDisplayName() + osparc.store.StaticInfo.getDisplayName() ); const descLabel = newCard.getChildControl("subtitle-text"); descLabel.setValue(reason.toString()); @@ -175,7 +176,7 @@ qx.Class.define("osparc.dashboard.NewStudies", { }, __createCard: function(templateInfo) { - const newStudyClicked = () => this.fireDataEvent("newStudyClicked", templateInfo); + const newPlanClicked = () => this.fireDataEvent("newPlanClicked", templateInfo); const title = templateInfo.title; const desc = templateInfo.description; @@ -184,7 +185,7 @@ qx.Class.define("osparc.dashboard.NewStudies", { newPlanButton.setCardKey(templateInfo["idToWidget"]); osparc.utils.Utils.setIdToWidget(newPlanButton, templateInfo["idToWidget"]); } - newPlanButton.addListener("tap", () => newStudyClicked()); + newPlanButton.addListener("tap", () => newPlanClicked()); return newPlanButton; }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/NewPlusMenu.js b/services/static-webserver/client/source/class/osparc/dashboard/NewPlusMenu.js index 8137aeefe2d3..318ba51e1f52 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/NewPlusMenu.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/NewPlusMenu.js @@ -98,7 +98,7 @@ qx.Class.define("osparc.dashboard.NewPlusMenu", { title = osparc.utils.Utils.replaceTokens( title, "replace_me_product_name", - osparc.store.StaticInfo.getInstance().getDisplayName() + osparc.store.StaticInfo.getDisplayName() ); title = title.replace(/
/g, " "); const menuButton = new qx.ui.menu.Button().set({ @@ -118,7 +118,7 @@ qx.Class.define("osparc.dashboard.NewPlusMenu", { infoText = osparc.utils.Utils.replaceTokens( infoText, "replace_me_product_name", - osparc.store.StaticInfo.getInstance().getDisplayName() + osparc.store.StaticInfo.getDisplayName() ); const infoHint = new osparc.ui.hint.InfoHint(infoText).set({ source: osparc.ui.hint.InfoHint.INFO_ICON + "/16", @@ -168,9 +168,7 @@ qx.Class.define("osparc.dashboard.NewPlusMenu", { __addItems: function() { this.__addUIConfigItems(); - if (osparc.product.Utils.isS4LProduct()) { - this.__addHypertools(); - } + this.__addHypertools(); this.__addMoreMenu(); this.getChildControl("new-folder"); }, @@ -196,18 +194,21 @@ qx.Class.define("osparc.dashboard.NewPlusMenu", { }, __addHypertools: function() { + const hypertoolsMenuButton = this.self().createMenuButton(null, this.tr("Hypertools")); + hypertoolsMenuButton.exclude(); + this.addAt(hypertoolsMenuButton, this.__itemIdx); + this.__itemIdx++; + this.self().setIcon(hypertoolsMenuButton, osparc.data.model.StudyUI.HYPERTOOL_ICON); + osparc.store.Templates.getHypertools() .then(hypertools => { + hypertoolsMenuButton.setVisibility(hypertools.length > 0 ? "visible" : "excluded"); + // add entry for hypertools if there are any if (hypertools.length) { - const hypertoolsMenuButton = this.self().createMenuButton(null, this.tr("Hypertools")); - this.addAt(hypertoolsMenuButton, this.__itemIdx); - this.__itemIdx++; - const hypertoolsMenu = new qx.ui.menu.Menu().set({ appearance: "menu-wider", }); hypertoolsMenuButton.setMenu(hypertoolsMenu); - this.self().setIcon(hypertoolsMenuButton, osparc.data.model.StudyUI.HYPERTOOL_ICON); hypertools.forEach(templateData => { const hypertoolButton = this.self().createMenuButton(null, templateData["name"]); @@ -244,9 +245,9 @@ qx.Class.define("osparc.dashboard.NewPlusMenu", { const permissions = osparc.data.Permissions.getInstance(); if (permissions.canDo("dashboard.templates.read")) { - const templatesButton = this.self().createMenuButton("@FontAwesome5Solid/copy/16", this.tr("Tutorials...")); - templatesButton.addListener("execute", () => this.fireDataEvent("changeTab", "templatesTab"), this); - moreMenu.add(templatesButton); + const tutorialsButton = this.self().createMenuButton("@FontAwesome5Solid/copy/16", this.tr("Tutorials...")); + tutorialsButton.addListener("execute", () => this.fireDataEvent("changeTab", "tutorialsTab"), this); + moreMenu.add(tutorialsButton); } if (permissions.canDo("dashboard.services.read")) { @@ -359,10 +360,13 @@ qx.Class.define("osparc.dashboard.NewPlusMenu", { // so that is not consumed by the menu button itself e.stopPropagation(); latestMetadata["resourceType"] = "service"; - const resourceDetails = new osparc.dashboard.ResourceDetails(latestMetadata); - const win = osparc.dashboard.ResourceDetails.popUpInWindow(resourceDetails); + const { + resourceDetails, + window, + } = osparc.dashboard.ResourceDetails.popUpInWindow(latestMetadata); + resourceDetails.addListener("openService", ev => { - win.close(); + window.close(); const openServiceData = ev.getData(); this.fireDataEvent("newStudyFromServiceClicked", { serviceMetadata: openServiceData, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js index 6dbca0245bf9..a8b5967cfeea 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js @@ -33,7 +33,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { this._resourcesInitialized = false; - this._showLoadingPage(this.tr("Starting") + " " + osparc.store.StaticInfo.getInstance().getDisplayName()); + this._showLoadingPage(this.tr("Starting") + " " + osparc.store.StaticInfo.getDisplayName()); const padding = osparc.dashboard.Dashboard.PADDING; const leftColumnWidth = this.self().SIDE_SPACER_WIDTH; @@ -121,19 +121,20 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { const walletsEnabled = osparc.desktop.credits.Utils.areWalletsEnabled(); if (walletsEnabled) { - const params = { - url: { - studyId - } - }; - osparc.data.Resources.fetch("studies", "getWallet", params) - .then(wallet => { + Promise.all([ + osparc.store.Study.getInstance().getWallet(studyId), + osparc.store.Study.getInstance().getOne(studyId), + ]).then(([wallet, latestStudyData]) => { + const currentUserGroupIds = osparc.study.Utils.state.getCurrentGroupIds(latestStudyData["state"]); if ( isStudyCreation || wallet === null || - osparc.desktop.credits.Utils.getWallet(wallet["walletId"]) === null + (osparc.desktop.credits.Utils.getWallet(wallet["walletId"]) === null && currentUserGroupIds.length === 0) ) { - // pop up study options if the study was just created or if it has no wallet assigned or user has no access to it + // pop up StudyOptions if: + // - the study was just created + // - it has no wallet assigned + // - I do not have access to it and the project is not being used const resourceSelector = new osparc.study.StudyOptions(studyId); if (isStudyCreation) { resourceSelector.getChildControl("open-button").setLabel(qx.locale.Manager.tr("New")); @@ -162,10 +163,30 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { } }); } else { - openStudy(); + const found = osparc.store.Store.getInstance().getWallets().find(w => w.getWalletId() === wallet["walletId"]); + if (found) { + // I have access to the wallet + if (osparc.store.Store.getInstance().getContextWallet() !== found) { + // switch to that wallet and inform the user that the context wallet has changed + const text = qx.locale.Manager.tr("Switched to Credit Account") + " '" + found.getName() + "'"; + osparc.FlashMessenger.logAs(text); + } + osparc.store.Store.getInstance().setActiveWallet(found); + openStudy(); + } else { + // I do not have access to the wallet or it's being used + // cancel and explain the user why + const isRTCEnabled = osparc.utils.DisabledPlugins.isRTCEnabled(); + const msg = isRTCEnabled ? + qx.locale.Manager.tr("You can't join the project because you don't have access to the Credit Account associated with it. Please contact the project owner.") : + qx.locale.Manager.tr("You can't join the project because it's already open by another user."); + osparc.FlashMessenger.logAs(msg, "ERROR"); + if (cancelCB) { + cancelCB(); + } + } } - }) - .catch(err => osparc.FlashMessenger.logError(err)); + }); } else { openStudy(); } @@ -202,7 +223,22 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { osparc.utils.Utils.addBorderRightRadius(rButton); } return rButton; - } + }, + + getOpenText: function(resourceData) { + const studyAlias = osparc.product.Utils.getStudyAlias({firstUpperCase: true}); + let openText = qx.locale.Manager.tr("New") + " " + studyAlias; + if (resourceData["resourceType"] === "study") { + // if it's in use call it join + const isRTCEnabled = osparc.utils.DisabledPlugins.isRTCEnabled(); + if (osparc.study.Utils.state.getCurrentGroupIds(resourceData["state"]).length && isRTCEnabled) { + openText = qx.locale.Manager.tr("Join"); + } else { + openText = qx.locale.Manager.tr("Open"); + } + } + return openText; + }, }, members: { @@ -288,6 +324,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { resourcesContainer.addListener("updateStudy", e => this._updateStudyData(e.getData())); resourcesContainer.addListener("updateTemplate", e => this._updateTemplateData(e.getData())); resourcesContainer.addListener("updateTutorial", e => this._updateTutorialData(e.getData())); + resourcesContainer.addListener("updateFunction", e => this._updateFunctionData(e.getData())); resourcesContainer.addListener("updateService", e => this._updateServiceData(e.getData())); resourcesContainer.addListener("updateHypertool", e => this._updateHypertoolData(e.getData())); resourcesContainer.addListener("publishTemplate", e => this.fireDataEvent("publishTemplate", e.getData())); @@ -324,10 +361,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { this._addToLayout(resourcesContainer); }, - __groupByChanged: function(groupBy) { - // if cards are grouped they need to be in grid mode - this._resourcesContainer.setMode("grid"); - this.__viewModeLayout.setVisibility(groupBy ? "excluded" : "visible"); + _groupByChanged: function(groupBy) { this._resourcesContainer.setGroupBy(groupBy); this._reloadCards(); }, @@ -356,27 +390,34 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { const dontGroup = new qx.ui.menu.RadioButton(this.tr("None")); osparc.utils.Utils.setIdToWidget(dontGroup, "groupByNone"); - dontGroup.addListener("execute", () => this.__groupByChanged(null)); + dontGroup.addListener("execute", () => this._groupByChanged(null)); groupByMenu.add(dontGroup); groupOptions.add(dontGroup); if (this._resourceType === "template") { - const tagByGroup = new qx.ui.menu.RadioButton(this.tr("Tags")); - tagByGroup.addListener("execute", () => this.__groupByChanged("tags")); - groupByMenu.add(tagByGroup); - groupOptions.add(tagByGroup); + const groupByTag = new qx.ui.menu.RadioButton(this.tr("Tags")); + groupByTag.addListener("execute", () => this._groupByChanged("tags")); + groupByMenu.add(groupByTag); + groupOptions.add(groupByTag); if ( osparc.product.Utils.isProduct("s4l") || osparc.product.Utils.isProduct("s4lacad") || osparc.product.Utils.isProduct("s4llite") ) { - tagByGroup.execute(); + groupByTag.execute(); } + } else if (this._resourceType === "service" && osparc.product.Utils.groupServices()) { + const groupByFeatured = new qx.ui.menu.RadioButton(this.tr("Featured")); + groupByFeatured.addListener("execute", () => this._groupByChanged("groupedServices")); + groupByMenu.add(groupByFeatured); + groupOptions.add(groupByFeatured); + groupByFeatured.execute(); + groupByButton.exclude(); // don't let users change the grouping } const groupByShared = new qx.ui.menu.RadioButton(this.tr("Shared with")); - groupByShared.addListener("execute", () => this.__groupByChanged("shared")); + groupByShared.addListener("execute", () => this._groupByChanged("shared")); groupByMenu.add(groupByShared); groupOptions.add(groupByShared); @@ -522,7 +563,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { }); task.addListener("taskAborted", () => { finished(); - const msg = this.tr("Study to Template cancelled"); + const msg = this.tr("Project to Template cancelled"); osparc.FlashMessenger.logAs(msg, "WARNING"); }); task.addListener("pollingError", e => { @@ -667,10 +708,10 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { const arCopy = osparc.utils.Utils.deepCloneObject(templateData["accessRights"]); // remove collaborator delete arCopy[myGid]; - operationPromise = osparc.store.Study.patchStudyData(templateData, "accessRights", arCopy); + operationPromise = osparc.store.Study.getInstance().patchStudyData(templateData, "accessRights", arCopy); } else { // delete study - operationPromise = osparc.store.Store.getInstance().deleteStudy(templateData.uuid); + operationPromise = osparc.store.Study.getInstance().deleteStudy(templateData.uuid); } operationPromise .then(() => this.__removeFromTemplateList(templateData.uuid)) @@ -694,7 +735,11 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { } }, - updateTutorialData: function(tutorialData) { + _updateTutorialData: function(tutorialData) { + throw new Error("Abstract method called!"); + }, + + _updateFunctionData: function(functionData) { throw new Error("Abstract method called!"); }, @@ -734,6 +779,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { win.close(); } win.addListener("cancel", () => cancelStudyOptions()); + win.getChildControl("close-button").addListener("tap", () => cancelStudyOptions()); studyOptions.addListener("cancel", () => cancelStudyOptions()); studyOptions.addListener("startStudy", () => { const newName = studyOptions.getChildControl("title-field").getValue(); @@ -750,12 +796,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { }; const cancelCB = () => { this._hideLoadingPage(); - const params = { - url: { - studyId - } - }; - osparc.data.Resources.fetch("studies", "delete", params); + osparc.store.Study.getInstance().deleteStudy(studyId); }; const promises = []; @@ -783,7 +824,8 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { if (selectedPricingUnitId) { const nodeId = nodesIdsListed[idx]; const pricingPlanId = nodePricingUnits.getPricingPlanId(); - promises.push(osparc.study.NodePricingUnits.patchPricingUnitSelection(studyId, nodeId, pricingPlanId, selectedPricingUnitId)); + const selectedUnit = nodePricingUnits.getPricingUnits().getSelectedUnit(); + promises.push(osparc.store.Study.getInstance().updateSelectedPricingUnit(studyId, nodeId, pricingPlanId, selectedUnit)); } }); @@ -806,12 +848,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { const openCB = () => this._hideLoadingPage(); const cancelCB = () => { this._hideLoadingPage(); - const params = { - url: { - studyId - } - }; - osparc.data.Resources.fetch("studies", "delete", params); + osparc.store.Study.getInstance().deleteStudy(studyId); }; const isStudyCreation = true; this._startStudyById(studyId, openCB, cancelCB, isStudyCreation); @@ -836,12 +873,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { const openCB = () => this._hideLoadingPage(); const cancelCB = () => { this._hideLoadingPage(); - const params = { - url: { - studyId - } - }; - osparc.data.Resources.fetch("studies", "delete", params); + osparc.store.Study.getInstance().deleteStudy(studyId); }; const isStudyCreation = true; this._startStudyById(studyId, openCB, cancelCB, isStudyCreation); @@ -909,8 +941,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { }, _getOpenMenuButton: function(resourceData) { - const studyAlias = osparc.product.Utils.getStudyAlias({firstUpperCase: true}); - const openText = (resourceData["resourceType"] === "study") ? this.tr("Open") : this.tr("New") + " " + studyAlias; + const openText = osparc.dashboard.ResourceBrowserBase.getOpenText(resourceData); const openButton = new qx.ui.menu.Button(openText); openButton["openResourceButton"] = true; openButton.addListener("execute", () => { @@ -934,19 +965,23 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { }, _openResourceDetails: function(resourceData) { - const resourceDetails = new osparc.dashboard.ResourceDetails(resourceData); - const win = osparc.dashboard.ResourceDetails.popUpInWindow(resourceDetails); + const { + resourceDetails, + window, + } = osparc.dashboard.ResourceDetails.popUpInWindow(resourceData); + resourceDetails.addListener("updateStudy", e => this._updateStudyData(e.getData())); resourceDetails.addListener("updateTemplate", e => this._updateTemplateData(e.getData())); resourceDetails.addListener("updateTutorial", e => this._updateTutorialData(e.getData())); + resourceDetails.addListener("updateFunction", e => this._updateFunctionData(e.getData())); resourceDetails.addListener("updateService", e => this._updateServiceData(e.getData())); resourceDetails.addListener("updateHypertool", e => this._updateHypertoolData(e.getData())); resourceDetails.addListener("publishTemplate", e => { - win.close(); + window.close(); this.fireDataEvent("publishTemplate", e.getData()); }); resourceDetails.addListener("openStudy", e => { - const openCB = () => win.close(); + const openCB = () => window.close(); const studyId = e.getData()["uuid"]; const isStudyCreation = false; this._startStudyById(studyId, openCB, null, isStudyCreation); @@ -957,13 +992,13 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { "openHypertool", ].forEach(eventName => { resourceDetails.addListener(eventName, e => { - win.close(); + window.close(); const templateData = e.getData(); this._createStudyFromTemplate(templateData); }); }); resourceDetails.addListener("openService", e => { - win.close(); + window.close(); const openServiceData = e.getData(); this._createStudyFromService(openServiceData["key"], openServiceData["version"]); }); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserFilter.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserFilter.js index 4b2009279e58..9f1d9528cd2e 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserFilter.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserFilter.js @@ -35,7 +35,8 @@ qx.Class.define("osparc.dashboard.ResourceBrowserFilter", { events: { "templatesContext": "qx.event.type.Event", - "publicContext": "qx.event.type.Event", + "publicTemplatesContext": "qx.event.type.Event", + "functionsContext": "qx.event.type.Event", "trashContext": "qx.event.type.Event", "changeTab": "qx.event.type.Data", "trashStudyRequested": "qx.event.type.Data", @@ -50,13 +51,41 @@ qx.Class.define("osparc.dashboard.ResourceBrowserFilter", { __workspacesAndFoldersTree: null, __templatesButton: null, __publicProjectsButton: null, + __functionsButton: null, __trashButton: null, __sharedWithButtons: null, __tagButtons: null, __appTypeButtons: null, + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "filters-spacer": + control = new qx.ui.core.Spacer(10, 10); + this._add(control); + break; + case "shared-with-layout": + control = this.__createSharedWithFilterLayout(); + this._add(control); + break; + case "app-type-layout": + control = this.__createAppTypeFilterLayout(); + this._add(control); + break; + case "tags-layout": { + control = this.__createTagsFilterLayout(); + const scrollView = new qx.ui.container.Scroll(); + scrollView.add(control); + this._add(scrollView, { + flex: 1 + }); + break; + } + } + return control || null; + }, + __buildLayout: function() { - const filtersSpacer = new qx.ui.core.Spacer(10, 10); switch (this.__resourceType) { case "study": { this._add(this.__createWorkspacesAndFoldersTree()); @@ -66,29 +95,22 @@ qx.Class.define("osparc.dashboard.ResourceBrowserFilter", { if (osparc.product.Utils.showPublicProjects()) { this._add(this.__createPublicProjects()); } + if (osparc.product.Utils.showFunctions()) { + this._add(this.__createFunctions()); + } this._add(this.__createTrashBin()); - this._add(filtersSpacer); - const scrollView = new qx.ui.container.Scroll(); - scrollView.add(this.__createTagsFilterLayout()); - this._add(scrollView, { - flex: 1 - }); + this.getChildControl("filters-spacer"); break; } - case "template": { - this._add(filtersSpacer); - this._add(this.__createSharedWithFilterLayout()); - const scrollView = new qx.ui.container.Scroll(); - scrollView.add(this.__createTagsFilterLayout()); - this._add(scrollView, { - flex: 1 - }); + case "template": + this.getChildControl("filters-spacer"); + this.getChildControl("shared-with-layout"); + this.getChildControl("tags-layout"); break; - } case "service": - this._add(filtersSpacer); - this._add(this.__createSharedWithFilterLayout()); - this._add(this.__createAppTypeFilterLayout()); + this.getChildControl("filters-spacer"); + this.getChildControl("shared-with-layout"); + this.getChildControl("app-type-layout"); break; } }, @@ -100,9 +122,18 @@ qx.Class.define("osparc.dashboard.ResourceBrowserFilter", { }); this.__workspacesAndFoldersTree.contextChanged(context); - this.__templatesButton.setValue(context === "templates"); - this.__publicProjectsButton.setValue(context === "public"); - this.__trashButton.setValue(context === "trash"); + if (this.__templatesButton) { + this.__templatesButton.setValue(context === osparc.dashboard.StudyBrowser.CONTEXT.TEMPLATES); + } + if (this.__publicProjectsButton) { + this.__publicProjectsButton.setValue(context === osparc.dashboard.StudyBrowser.CONTEXT.PUBLIC_TEMPLATES); + } + if (this.__functionsButton) { + this.__functionsButton.setValue(context === osparc.dashboard.StudyBrowser.CONTEXT.FUNCTIONS); + } + if (this.__trashButton) { + this.__trashButton.setValue(context === osparc.dashboard.StudyBrowser.CONTEXT.TRASH); + } }, /* WORKSPACES AND FOLDERS */ @@ -136,6 +167,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserFilter", { icon: "@FontAwesome5Solid/copy/16", paddingLeft: 10, // align it with the context }); + osparc.utils.Utils.setIdToWidget(templatesButton, "templatesFilterItem"); templatesButton.addListener("changeValue", e => { const templatesEnabled = e.getData(); if (templatesEnabled) { @@ -153,15 +185,34 @@ qx.Class.define("osparc.dashboard.ResourceBrowserFilter", { icon: "@FontAwesome5Solid/globe/16", paddingLeft: 10, // align it with the context }); + osparc.utils.Utils.setIdToWidget(publicProjectsButton, "publicProjectsFilterItem"); publicProjectsButton.addListener("changeValue", e => { const templatesEnabled = e.getData(); if (templatesEnabled) { - this.fireEvent("publicContext"); + this.fireEvent("publicTemplatesContext"); } }); return publicProjectsButton; }, + __createFunctions: function() { + const functionsButton = this.__functionsButton = new qx.ui.toolbar.RadioButton().set({ + value: false, + appearance: "filter-toggle-button", + label: this.tr("Functions"), + icon: "@MaterialIcons/functions/20", + paddingLeft: 10, // align it with the context + }); + osparc.utils.Utils.setIdToWidget(functionsButton, "functionsFilterItem"); + functionsButton.addListener("changeValue", e => { + const functionsEnabled = e.getData(); + if (functionsEnabled) { + this.fireEvent("functionsContext"); + } + }); + return functionsButton; + }, + /* TRASH BIN */ __createTrashBin: function() { const trashButton = this.__trashButton = new qx.ui.toolbar.RadioButton().set({ @@ -245,7 +296,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserFilter", { } }; Promise.all([ - osparc.data.Resources.fetch("studies", "getPageTrashed", studiesParams), + osparc.store.Study.getInstance().getPageTrashed(studiesParams), osparc.data.Resources.fetch("folders", "getPageTrashed", foldersParams), osparc.data.Resources.fetch("workspaces", "getPageTrashed", workspacesParams), ]) @@ -320,12 +371,6 @@ qx.Class.define("osparc.dashboard.ResourceBrowserFilter", { __createTagsFilterLayout: function() { const tagsLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(2)); osparc.utils.Utils.setIdToWidget(tagsLayout, this.__resourceType + "-tagsFilter"); - - this.__populateTags(tagsLayout, []); - osparc.store.Tags.getInstance().addListener("tagsChanged", () => { - this.__populateTags(tagsLayout, this.__getSelectedTagIds()); - }, this); - return tagsLayout; }, @@ -334,11 +379,17 @@ qx.Class.define("osparc.dashboard.ResourceBrowserFilter", { return selectedTagIds; }, - __populateTags: function(tagsLayout, selectedTagIds) { - const maxTags = 5; - this.__tagButtons = []; + populateTags: function(presentTagIds = []) { + const selectedTagIds = this.__getSelectedTagIds(); + const tagsLayout = this.getChildControl("tags-layout"); tagsLayout.removeAll(); - osparc.store.Tags.getInstance().getTags().forEach((tag, idx) => { + const maxTags = 10; + this.__tagButtons = []; + presentTagIds.forEach(tagId => { + const tag = osparc.store.Tags.getInstance().getTag(tagId); + if (!tag) { + return; + } const button = new qx.ui.form.ToggleButton(null, "@FontAwesome5Solid/tag/16"); button.id = tag.getTagId(); tag.bind("name", button, "label"); @@ -357,7 +408,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserFilter", { this.fireDataEvent("changeSelectedTags", selection); }, this); - button.setVisibility(idx >= maxTags ? "excluded" : "visible"); + button.setVisibility(this.__tagButtons.length >= maxTags ? "excluded" : "visible"); this.__tagButtons.push(button); }); @@ -392,6 +443,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserFilter", { myAccountWindow.openTags(); }); tagsLayout.add(editTagsButton); + editTagsButton.exclude(); // excluded for now, they will be used as categories if (this.__resourceType === "study") { tagsLayout.getChildren().forEach(item => item.setPaddingLeft(10)); // align them with the context @@ -421,6 +473,11 @@ qx.Class.define("osparc.dashboard.ResourceBrowserFilter", { // hypertools filter const button = new qx.ui.toolbar.RadioButton("Hypertools", null); + button.exclude(); + osparc.store.Templates.getHypertools() + .then(hypertools => { + button.setVisibility(hypertools.length > 0 ? "visible" : "excluded"); + }); osparc.utils.Utils.replaceIconWithThumbnail(button, osparc.data.model.StudyUI.HYPERTOOL_ICON, 26); // align it with the rest of icons button.set({ diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js index 9398d8eba77f..8ff84ed5bce2 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js @@ -31,6 +31,7 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { this.__foldersList = []; this.__resourcesList = []; this.__groupedContainersList = []; + this.__resourceType = resourceType || "study"; if (resourceType === "study") { const workspacesContainer = this.__workspacesContainer = new osparc.dashboard.CardContainer(); @@ -42,6 +43,13 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { this._add(foldersContainer); } + const noResourcesFound = this.__noResourcesFound = new qx.ui.basic.Label("No resources found").set({ + visibility: "excluded", + font: "text-14" + }); + noResourcesFound.exclude(); + this._add(noResourcesFound); + const nonGroupedContainer = this.__nonGroupedContainer = this.__createFlatList(); this._add(nonGroupedContainer); @@ -58,7 +66,7 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { }, groupBy: { - check: [null, "tags", "shared"], + check: [null, "tags", "shared", "groupedServices"], init: null, nullable: true } @@ -68,6 +76,7 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { "updateStudy": "qx.event.type.Data", "updateTemplate": "qx.event.type.Data", "updateTutorial": "qx.event.type.Data", + "updateFunction": "qx.event.type.Data", "updateService": "qx.event.type.Data", "updateHypertool": "qx.event.type.Data", "publishTemplate": "qx.event.type.Data", @@ -110,7 +119,15 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { } } return false; - } + }, + + updateSpacing: function(mode, container) { + const spacing = mode === "grid" ? osparc.dashboard.GridButtonBase.SPACING : osparc.dashboard.ListButtonBase.SPACING; + container.getLayout().set({ + spacingX: spacing, + spacingY: spacing + }); + }, }, members: { @@ -122,6 +139,61 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { __workspacesContainer: null, __nonGroupedContainer: null, __groupedContainers: null, + __resourceType: null, + __noResourcesFound: null, + __noResourcesFoundTimer: null, + + __evaluateNoResourcesFoundLabel: function() { + let text = null; + switch (this.__resourceType) { + case "study": { + const studyBrowserContext = osparc.store.Store.getInstance().getStudyBrowserContext(); + switch (studyBrowserContext) { + case osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS: + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PROJECTS: + case osparc.dashboard.StudyBrowser.CONTEXT.TRASH: + text = this.tr("No Projects found"); + break; + case osparc.dashboard.StudyBrowser.CONTEXT.TEMPLATES: + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_TEMPLATES: + text = this.tr("No Templates found"); + break; + case osparc.dashboard.StudyBrowser.CONTEXT.PUBLIC_TEMPLATES: + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PUBLIC_TEMPLATES: + text = this.tr("No Public Projects found"); + break; + case osparc.dashboard.StudyBrowser.CONTEXT.FUNCTIONS: + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_FUNCTIONS: + text = this.tr("No Functions found"); + break; + } + break; + } + case "template": + text = this.tr("No Tutorials found"); + break; + case "service": + text = this.tr("No Apps found"); + break; + default: + text = this.tr("No Resources found"); + break; + } + + this.__noResourcesFound.exclude(); + if (this.__noResourcesFoundTimer) { + clearTimeout(this.__noResourcesFoundTimer); + } + if (text && this.__resourcesList.length === 0) { + // delay it a bit to avoid the initial flickering + this.__noResourcesFoundTimer = setTimeout(() => { + this.__noResourcesFound.set({ + value: text, + visibility: "visible", + }); + }, 2000); + } + }, addNonResourceCard: function(card) { if (osparc.dashboard.CardContainer.isValidCard(card)) { @@ -171,18 +243,6 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { return this.__nonGroupedContainer; }, - __createGroupContainer: function(groupId, headerLabel, headerColor = "text") { - const groupContainer = new osparc.dashboard.GroupedCardContainer().set({ - groupId: groupId.toString(), - headerLabel, - headerIcon: "", - headerColor, - visibility: "excluded" - }); - this.__groupedContainersList.push(groupContainer); - return groupContainer; - }, - areMoreResourcesRequired: function(loadingResourcesBtn) { if (this.__nonGroupedContainer) { return this.__nonGroupedContainer.areMoreResourcesRequired(loadingResourcesBtn); @@ -242,6 +302,7 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { "updateStudy", "updateTemplate", "updateTutorial", + "updateFunction", "updateService", "updateHypertool", "publishTemplate", @@ -275,6 +336,8 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { setResourcesToList: function(resourcesList) { this.__resourcesList = resourcesList; + + this.__evaluateNoResourcesFoundLabel(); }, __cleanAll: function() { @@ -309,8 +372,11 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { case "shared": groupTitle = "Not Shared"; break; + case "groupedServices": + groupTitle = "Misc"; + break; } - const noGroupContainer = this.__createGroupContainer("no-group", groupTitle, "transparent"); + const noGroupContainer = this.__createGroupContainer("no-group", groupTitle, "text"); this.__groupedContainers.add(noGroupContainer); this._add(this.__groupedContainers); } else { @@ -322,15 +388,8 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { __createFlatList: function() { const flatList = new osparc.dashboard.CardContainer(); - const setContainerSpacing = () => { - const spacing = this.getMode() === "grid" ? osparc.dashboard.GridButtonBase.SPACING : osparc.dashboard.ListButtonBase.SPACING; - flatList.getLayout().set({ - spacingX: spacing, - spacingY: spacing - }); - }; - setContainerSpacing(); - this.addListener("changeMode", () => setContainerSpacing()); + osparc.dashboard.ResourceContainerManager.updateSpacing(this.getMode(), flatList); + this.addListener("changeMode", () => osparc.dashboard.ResourceContainerManager.updateSpacing(this.getMode(), flatList)); [ "changeSelection", "changeVisibility" @@ -340,6 +399,27 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { return flatList; }, + __createGroupContainer: function(groupId, headerLabel, headerColor = "text") { + const groupContainer = new osparc.dashboard.GroupedCardContainer().set({ + groupId: groupId.toString(), + headerLabel, + headerIcon: "", + headerColor, + visibility: "excluded" + }); + + this.bind("mode", groupContainer, "mode"); + [ + "changeSelection", + "changeVisibility" + ].forEach(signalName => { + groupContainer.addListener(signalName, e => this.fireDataEvent(signalName, e.getData()), this); + }); + + this.__groupedContainersList.push(groupContainer); + return groupContainer; + }, + reloadCards: function(resourceType) { this.__rebuildLayout(resourceType); @@ -500,11 +580,11 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { let icon = ""; if (group.getThumbnail()) { icon = group.getThumbnail(); - } else if (group["collabType"] === 0) { + } else if (group["collabType"] === osparc.store.Groups.COLLAB_TYPE.EVERYONE) { icon = "@FontAwesome5Solid/globe/24"; - } else if (group["collabType"] === 1) { + } else if (group["collabType"] === osparc.store.Groups.COLLAB_TYPE.ORGANIZATION) { icon = "@FontAwesome5Solid/users/24"; - } else if (group["collabType"] === 2) { + } else if (group["collabType"] === osparc.store.Groups.COLLAB_TYPE.USER) { icon = "@FontAwesome5Solid/user/24"; } groupContainer.set({ @@ -525,12 +605,55 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { } }, + __groupByGroupedServices: function(cards, resourceData) { + const groupedServicesConfig = osparc.store.Products.getInstance().getGroupedServicesUiConfig(); + if (groupedServicesConfig == null) { + return; + } + + // create group containers for each category + groupedServicesConfig["categories"].forEach(category => { + if (this.__getGroupContainer(category["id"]) === null) { + const groupContainer = this.__createGroupContainer(category["id"], category["title"], category["color"]); + groupContainer.setHeaderIcon("@FontAwesome5Solid/tag/24"); + this.__groupedContainers.add(groupContainer); + } + }); + + // get the right container + let container = null; + const serviceKey = resourceData["key"]; + if (serviceKey) { + const groupInfo = groupedServicesConfig["services"].find(serviceInfo => serviceInfo["serviceKey"] === serviceKey); + if (groupInfo) { + container = this.__getGroupContainer(groupInfo["category"]); + } + } + if (container === null) { + container = this.__getGroupContainer("no-group"); + container.setHeaderIcon("@FontAwesome5Solid/tag/24"); + } + + // create the card and add it to the container + const card = this.__createCard(resourceData); + this.__addCardToContainer(card, container); + cards.push(card); + + this.__moveNoGroupToLast(); + this.__groupedContainersList.forEach(groupedContainer => { + groupedContainer.setExpanded(true); + groupedContainer.getExpandButton().exclude(); + }); + }, + __resourceToCards: function(resourceData) { const cardsCreated = []; if (this.getGroupBy() === "tags") { this.__groupByTags(cardsCreated, resourceData); } else if (this.getGroupBy() === "shared") { this.__groupByShareWith(cardsCreated, resourceData); + } else if (this.getGroupBy() === "groupedServices") { + this.__groupByGroupedServices(cardsCreated, resourceData); } else { const card = this.__createCard(resourceData); this.__addCardToContainer(card, this.__nonGroupedContainer); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceDetails.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceDetails.js index 2f9454212e38..d617ea8ddd63 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceDetails.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceDetails.js @@ -21,6 +21,7 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { construct: function(resourceData) { this.base(arguments); + this.__widgets = []; this.__resourceData = resourceData; let latestPromise = null; @@ -29,14 +30,16 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { case "template": case "tutorial": case "hypertool": { - const params = { - url: { - "studyId": resourceData["uuid"] - } - }; - latestPromise = osparc.data.Resources.fetch("studies", "getOne", params); + latestPromise = osparc.store.Study.getInstance().getOne(resourceData["uuid"]); + break; + } + case "functionedTemplate": { + latestPromise = osparc.store.Templates.fetchTemplate(resourceData["uuid"]); break; } + case "function": + latestPromise = osparc.store.Functions.fetchFunction(resourceData["uuid"]); + break; case "service": { latestPromise = osparc.store.Services.getService(resourceData["key"], resourceData["version"]); break; @@ -57,14 +60,44 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { case "template": case "tutorial": case "hypertool": + case "functionedTemplate": + // when getting the latest study data, the debt information was lost + if (osparc.study.Utils.isInDebt(this.__resourceData)) { + const studyStore = osparc.store.Study.getInstance(); + this.__resourceData["debt"] = studyStore.getStudyDebt(this.__resourceData["uuid"]); + } + // prefetch project's services metadata osparc.store.Services.getStudyServicesMetadata(latestResourceData) .finally(() => { this.__resourceModel = new osparc.data.model.Study(latestResourceData); this.__resourceModel["resourceType"] = resourceData["resourceType"]; this.__resourceData["services"] = resourceData["services"]; this.__addPages(); - }) + }); break; + case "function": { + addPages = (functionData, templateData = null) => { + this.__resourceModel = new osparc.data.model.Function(functionData, templateData); + this.__resourceModel["resourceType"] = resourceData["resourceType"]; + this.__addPages(); + } + // use latestResourceData, resourceData doesn't have the functionClass nor the templateId + if (latestResourceData["functionClass"] === osparc.data.model.Function.FUNCTION_CLASS.PROJECT) { + // this is only required for functions that have a template linked + osparc.store.Templates.fetchTemplate(latestResourceData["templateId"]) + .then(templateData => { + // prefetch function's underlying template's services metadata + osparc.store.Services.getStudyServicesMetadata(templateData) + .finally(() => { + this.__resourceData["services"] = resourceData["services"]; + addPages(latestResourceData, templateData); + }); + }); + } else { + addPages(latestResourceData); + } + break; + } case "service": { this.__resourceModel = new osparc.data.model.Service(latestResourceData); this.__resourceModel["resourceType"] = resourceData["resourceType"]; @@ -86,9 +119,11 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { "updateStudy": "qx.event.type.Data", "updateTemplate": "qx.event.type.Data", "updateTutorial": "qx.event.type.Data", + "updateFunction": "qx.event.type.Data", "updateService": "qx.event.type.Data", "updateHypertool": "qx.event.type.Data", "publishTemplate": "qx.event.type.Data", + "closeWindow": "qx.event.type.Event", }, @@ -96,18 +131,22 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { WIDTH: 830, HEIGHT: 700, - popUpInWindow: function(resourceDetails) { - // eslint-disable-next-line no-underscore-dangle - const title = resourceDetails.__resourceData.name; - const win = osparc.ui.window.Window.popUpInWindow(resourceDetails, title, this.WIDTH, this.HEIGHT).set({ + popUpInWindow: function(resourceData) { + const resourceDetails = new osparc.dashboard.ResourceDetails(resourceData); + const title = resourceData.name || resourceData.title; // title is used by functions + const window = osparc.ui.window.Window.popUpInWindow(resourceDetails, title, this.WIDTH, this.HEIGHT).set({ layout: new qx.ui.layout.Grow(), + ...osparc.ui.window.TabbedWindow.DEFAULT_PROPS, }); - win.set(osparc.ui.window.TabbedWindow.DEFAULT_PROPS); - win.set({ - width: this.WIDTH, - height: this.HEIGHT, + resourceDetails.addListener("closeWindow", () => window.close()); + window.addListener("close", () => { + // trigger children's destroy functions + resourceDetails.destroy(); }); - return win; + return { + resourceDetails, + window, + }; }, createToolbar: function() { @@ -118,7 +157,14 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { maxHeight: 40 }); return toolbar; - } + }, + + disableIfInUse: function(resourceData, widget) { + if (resourceData["resourceType"] === "study") { + // disable if it's being used + widget.setEnabled(!osparc.study.Utils.state.getCurrentGroupIds(resourceData["state"]).length); + } + }, }, properties: { @@ -133,31 +179,51 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { members: { __resourceData: null, __resourceModel: null, + __widgets: null, __infoPage: null, __servicesUpdatePage: null, + __conversationsPage: null, __permissionsPage: null, __tagsPage: null, __billingSettings: null, __classifiersPage: null, __qualityPage: null, - __addOpenButton: function(page) { + __addToolbarButtons: function(page) { const resourceData = this.__resourceData; + if (osparc.utils.Resources.isFunction(this.__resourceData)) { + return; // no toolbar buttons for functions + } + const toolbar = this.self().createToolbar(); page.addToHeader(toolbar); + if (["study", "template", "tutorial"].includes(resourceData["resourceType"])) { + const cantReadServices = osparc.study.Utils.getCantReadServices(resourceData["services"]); + if (cantReadServices.length) { + const requestAccessButton = new qx.ui.form.Button(this.tr("Request Apps Access")); + osparc.dashboard.resources.pages.BasePage.decorateHeaderButton(requestAccessButton); + requestAccessButton.set({ + minWidth: 170, + maxWidth: 170, + }); + requestAccessButton.addListener("execute", () => { + osparc.share.RequestServiceAccess.openRequestAccess(cantReadServices); + }); + toolbar.add(requestAccessButton); + } + } + if (this.__resourceData["resourceType"] === "study") { const payDebtButton = new qx.ui.form.Button(this.tr("Credits required")); page.payDebtButton = payDebtButton; osparc.dashboard.resources.pages.BasePage.decorateHeaderButton(payDebtButton); payDebtButton.addListener("execute", () => this.openBillingSettings()); - if (this.__resourceData["resourceType"] === "study") { - const studyData = this.__resourceData; - payDebtButton.set({ - visibility: osparc.study.Utils.isInDebt(studyData) ? "visible" : "excluded" - }); - } + const studyData = this.__resourceData; + payDebtButton.set({ + visibility: osparc.study.Utils.isInDebt(studyData) ? "visible" : "excluded" + }); toolbar.add(payDebtButton); } @@ -166,8 +232,7 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { toolbar.add(serviceVersionSelector); } - const studyAlias = osparc.product.Utils.getStudyAlias({firstUpperCase: true}); - const openText = (this.__resourceData["resourceType"] === "study") ? this.tr("Open") : this.tr("New") + " " + studyAlias; + const openText = osparc.dashboard.ResourceBrowserBase.getOpenText(this.__resourceData); const openButton = new osparc.ui.form.FetchButton(openText).set({ enabled: true }); @@ -183,10 +248,10 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { }); openButton.addListener("execute", () => this.__openTapped(openButton)); - if (this.__resourceData["resourceType"] === "study") { + if (["study", "template"].includes(this.__resourceData["resourceType"])) { const studyData = this.__resourceData; - const canBeOpened = osparc.study.Utils.canBeOpened(studyData); - openButton.setEnabled(canBeOpened); + const enabled = osparc.study.Utils.canBeOpened(studyData); + openButton.setEnabled(enabled); } toolbar.add(openButton); @@ -199,13 +264,8 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { return; } openButton.setFetching(true); - const params = { - url: { - "studyId": this.__resourceData["uuid"] - } - }; Promise.all([ - osparc.data.Resources.fetch("studies", "getOne", params), + osparc.store.Study.getInstance().getOne(this.__resourceData["uuid"]), osparc.store.Services.getStudyServices(this.__resourceData["uuid"]), ]) .then(values => { @@ -271,6 +331,10 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { this._openPage(this.__servicesUpdatePage); }, + openConversations: function() { + this._openPage(this.__conversationsPage); + }, + openAccessRights: function() { this._openPage(this.__permissionsPage); }, @@ -344,33 +408,39 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { // removeAll osparc.utils.Utils.removeAllChildren(tabsView); - // add Open service button - [ - this.__getInfoPage, - this.__getBillingPage, - this.__getServicesUpdatePage, - this.__getServicesBootOptionsPage, - this.__getConversationsPage, - this.__getPermissionsPage, - this.__getPublishPage, - this.__getCreateTemplatePage, - this.__getCreateFunctionsPage, - this.__getTagsPage, - this.__getQualityPage, - this.__getClassifiersPage, - this.__getPreviewPage - ].forEach(pageCallee => { - if (pageCallee) { - const page = pageCallee.call(this); - if (page) { - tabsView.add(page); - } + if (this.__resourceData["resourceType"] === "functionedTemplate") { + // for now, we only want the preview page + this.__addPreviewPage(); + this.fireEvent("pagesAdded"); + return; + } else if (osparc.utils.Resources.isFunction(this.__resourceData)) { + this.__addInfoPage(); + this.__addPermissionsPage(); + if (this.__resourceModel.getFunctionClass() === osparc.data.model.Function.FUNCTION_CLASS.PROJECT) { + this.__addPreviewPage(); } - }); - + this.fireEvent("pagesAdded"); + return; + } - this.__getActivityOverviewPopUp(); - this.__getProjectFilesPopUp(); + this.__addInfoPage(); + this.__addBillingPage(); + this.__addServicesUpdatePage(); + this.__addServicesBootOptionsPage(); + this.__addConversationsPage(); + this.__addPermissionsPage(); + this.__addPublishPage(); + this.__addCreateTemplatePage(); + this.__addCreateFunctionsPage(); + this.__addTagsPage(); + this.__addQualityPage(); + this.__addClassifiersPage(); + this.__addPreviewPage(); + + if (osparc.product.Utils.showComputationalActivity()) { + this.__addActivityOverviewPopUp(); + } + this.__addProjectFilesPopUp(); if (selectedTabId) { const pageFound = tabsView.getChildren().find(page => page.tabId === selectedTabId); @@ -393,6 +463,9 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { case "tutorial": this.fireDataEvent("updateTutorial", updatedData); break; + case "function": + this.fireDataEvent("updateFunction", updatedData); + break; case "hypertool": this.fireDataEvent("updateHypertool", updatedData); break; @@ -402,12 +475,17 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { } }, - __getInfoPage: function() { + __addPage: function(page) { + const tabsView = this.getChildControl("tabs-view"); + tabsView.add(page); + }, + + __addInfoPage: function() { const id = "Information"; const title = this.tr("Overview"); const iconSrc = "@FontAwesome5Solid/info/22"; const page = this.__infoPage = new osparc.dashboard.resources.pages.BasePage(title, iconSrc, id); - this.__addOpenButton(page); + this.__addToolbarButtons(page); const lazyLoadContent = () => { const resourceData = this.__resourceData; @@ -419,6 +497,12 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { const updatedData = e.getData(); this.__fireUpdateEvent(resourceData, updatedData); }); + } else if (osparc.utils.Resources.isFunction(resourceData)) { + infoCard = new osparc.info.FunctionLarge(resourceModel); + infoCard.addListener("updateFunction", e => { + const updatedData = e.getData(); + this.__fireUpdateEvent(resourceData, updatedData); + }); } else { infoCard = new osparc.info.StudyLarge(resourceModel, false); infoCard.addListener("updateStudy", e => { @@ -431,15 +515,19 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { infoCard.addListener("openClassifiers", () => this.openClassifiers()); infoCard.addListener("openQuality", () => this.openQuality()); page.addToContent(infoCard); + this.__widgets.push(infoCard); } page.addListenerOnce("appear", lazyLoadContent, this); - return page; + this.__addPage(page); }, - __getBillingPage: function() { + __addBillingPage: function() { if (!osparc.desktop.credits.Utils.areWalletsEnabled()) { - return null; + return; + } + if (osparc.utils.Resources.isStudyLike(this.__resourceData) && !osparc.data.model.Study.canIWrite(this.__resourceData["accessRights"])) { + return; } const resourceData = this.__resourceData; @@ -448,7 +536,7 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { const title = this.tr("Billing Settings"); const iconSrc = "@FontAwesome5Solid/cogs/22"; const page = this.__billingSettings = new osparc.dashboard.resources.pages.BasePage(title, iconSrc, id); - this.__addOpenButton(page); + this.__addToolbarButtons(page); if (resourceData["resourceType"] === "study") { const canBeOpened = osparc.study.Utils.canShowBillingOptions(resourceData); @@ -457,41 +545,44 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { const lazyLoadContent = () => { const billingSettings = new osparc.study.BillingSettings(resourceData); + this.self().disableIfInUse(resourceData, billingSettings); billingSettings.addListener("debtPayed", () => { - if (resourceData["resourceType"] === "study") { - page.payDebtButton.set({ - visibility: osparc.study.Utils.isInDebt(resourceData) ? "visible" : "excluded" - }); - const canBeOpened = osparc.study.Utils.canBeOpened(resourceData); - page.openButton.setEnabled(canBeOpened); - } + page.payDebtButton.set({ + visibility: osparc.study.Utils.isInDebt(resourceData) ? "visible" : "excluded" + }); + const enabled = osparc.study.Utils.canBeOpened(resourceData); + page.openButton.setEnabled(enabled); }) + billingSettings.addListener("closeWindow", () => { + this.fireEvent("closeWindow"); + }, this); const billingScroll = new qx.ui.container.Scroll(billingSettings); page.addToContent(billingScroll); + this.__widgets.push(billingSettings); } page.addListenerOnce("appear", lazyLoadContent, this); - return page; + this.__addPage(page); } else if (osparc.utils.Resources.isService(resourceData)) { const id = "Tiers"; const title = this.tr("Tiers"); const iconSrc = "@FontAwesome5Solid/server/22"; const page = new osparc.dashboard.resources.pages.BasePage(title, iconSrc, id); - this.__addOpenButton(page); + this.__addToolbarButtons(page); const lazyLoadContent = () => { const pricingUnitsList = new osparc.service.PricingUnitsList(resourceData); const pricingUnitsListScroll = new qx.ui.container.Scroll(pricingUnitsList); page.addToContent(pricingUnitsListScroll); + this.__widgets.push(pricingUnitsList); } page.addListenerOnce("appear", lazyLoadContent, this); - return page; + this.__addPage(page); } - return null; }, - __getPreviewPage: function() { + __addPreviewPage: function() { const resourceData = this.__resourceData; if ( osparc.utils.Resources.isService(resourceData) || @@ -499,58 +590,65 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { ["app", "guided", "standalone"].includes(osparc.study.Utils.getUiMode(resourceData)) ) { // there is no pipelining or don't show it - return null; + return; } const id = "Pipeline"; const title = this.tr("Pipeline View"); const iconSrc = "@FontAwesome5Solid/eye/22"; const page = new osparc.dashboard.resources.pages.BasePage(title, iconSrc, id); - this.__addOpenButton(page); + this.__addToolbarButtons(page); - if (this.__resourceData["resourceType"] === "study") { - const studyData = this.__resourceData; - const canBeOpened = osparc.study.Utils.canShowPreview(studyData); - page.setEnabled(canBeOpened); - } + const studyData = osparc.utils.Resources.isFunction(this.__resourceData) ? this.__resourceModel.getTemplate().serialize() : this.__resourceData; + const enabled = osparc.study.Utils.canShowPreview(studyData); + page.setEnabled(enabled); const lazyLoadContent = () => { - const resourceModel = this.__resourceModel; + const resourceModel = osparc.utils.Resources.isFunction(this.__resourceData) ? this.__resourceModel.getTemplate() : this.__resourceModel; const preview = new osparc.study.StudyPreview(resourceModel); page.addToContent(preview); + this.__widgets.push(preview); } page.addListenerOnce("appear", lazyLoadContent, this); - return page; + this.__addPage(page); }, - __getConversationsPage: function() { + __addConversationsPage: function() { const resourceData = this.__resourceData; if (osparc.utils.Resources.isService(resourceData)) { - return null; + return; } const id = "Conversations"; const title = this.tr("Conversations"); const iconSrc = "@FontAwesome5Solid/comments/22"; - const page = new osparc.dashboard.resources.pages.BasePage(title, iconSrc, id); - this.__addOpenButton(page); + const page = this.__conversationsPage = new osparc.dashboard.resources.pages.BasePage(title, iconSrc, id); + this.__addToolbarButtons(page); const lazyLoadContent = () => { const conversations = new osparc.study.Conversations(resourceData); page.addToContent(conversations); + this.__widgets.push(conversations); } page.addListenerOnce("appear", lazyLoadContent, this); - return page; + this.__addPage(page); }, - __getPermissionsPage: function() { + __addPermissionsPage: function() { + if (osparc.utils.Resources.isStudyLike(this.__resourceData) && !osparc.data.model.Study.canIWrite(this.__resourceData["accessRights"])) { + return; + } + if (osparc.utils.Resources.isService(this.__resourceData) && !osparc.data.model.Service.canIWrite(this.__resourceData["accessRights"])) { + return; + } + const id = "Permissions"; const title = this.tr("Sharing"); const iconSrc = "@FontAwesome5Solid/share-alt/22"; const page = this.__permissionsPage = new osparc.dashboard.resources.pages.BasePage(title, iconSrc, id); - this.__addOpenButton(page); + this.__addToolbarButtons(page); const lazyLoadContent = () => { const resourceData = this.__resourceData; @@ -561,6 +659,12 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { const updatedData = e.getData(); this.__fireUpdateEvent(resourceData, updatedData); }, this); + } else if (osparc.utils.Resources.isFunction(resourceData)) { + collaboratorsView = new osparc.share.CollaboratorsFunction(resourceData); + collaboratorsView.addListener("updateAccessRights", e => { + const updatedData = e.getData(); + this.__fireUpdateEvent(resourceData, updatedData); + }, this); } else { collaboratorsView = new osparc.share.CollaboratorsStudy(resourceData); if (osparc.utils.Resources.isStudy(resourceData)) { @@ -576,31 +680,32 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { }, this); } page.addToContent(collaboratorsView); + this.__widgets.push(collaboratorsView); } page.addListenerOnce("appear", lazyLoadContent, this); - return page; + this.__addPage(page); }, - __getClassifiersPage: function() { + __addClassifiersPage: function() { if (!osparc.product.Utils.showClassifiers()) { - return null; + return; } const id = "Classifiers"; if (!osparc.data.Permissions.getInstance().canDo("study.classifier")) { - return null; + return; } const title = this.tr("Classifiers"); const iconSrc = "@FontAwesome5Solid/search/22"; const page = this.__classifiersPage = new osparc.dashboard.resources.pages.BasePage(title, iconSrc, id); - this.__addOpenButton(page); + this.__addToolbarButtons(page); const lazyLoadContent = () => { const resourceData = this.__resourceData; let classifiers = null; if ( (osparc.utils.Resources.isStudy(resourceData) || osparc.utils.Resources.isTemplate(resourceData)) && osparc.data.model.Study.canIWrite(resourceData["accessRights"]) || - osparc.utils.Resources.isService(resourceData) && osparc.service.Utils.canIWrite(resourceData["accessRights"]) + osparc.utils.Resources.isService(resourceData) && osparc.data.model.Service.canIWrite(resourceData["accessRights"]) ) { classifiers = new osparc.metadata.ClassifiersEditor(resourceData); classifiers.addListener("updateClassifiers", e => { @@ -611,15 +716,16 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { classifiers = new osparc.metadata.ClassifiersViewer(resourceData); } page.addToContent(classifiers); + this.__widgets.push(classifiers); } page.addListenerOnce("appear", lazyLoadContent, this); - return page; + this.__addPage(page); }, - __getQualityPage: function() { + __addQualityPage: function() { if (!osparc.product.Utils.showQuality()) { - return null; + return; } const resourceData = this.__resourceData; @@ -631,7 +737,7 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { const title = this.tr("Quality"); const iconSrc = "@FontAwesome5Solid/star-half/22"; const page = this.__qualityPage = new osparc.dashboard.resources.pages.BasePage(title, iconSrc, id); - this.__addOpenButton(page); + this.__addToolbarButtons(page); const lazyLoadContent = () => { const qualityEditor = new osparc.metadata.QualityEditor(resourceData); @@ -640,28 +746,28 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { this.__fireUpdateEvent(updatedData); }); page.addToContent(qualityEditor); + this.__widgets.push(qualityEditor); } page.addListenerOnce("appear", lazyLoadContent, this); - return page; + this.__addPage(page); } - return null; }, - __getTagsPage: function() { + __addTagsPage: function() { const resourceData = this.__resourceData; if (osparc.utils.Resources.isService(resourceData)) { - return null; + return; } if (!osparc.data.model.Study.canIWrite(resourceData["accessRights"])) { - return null; + return; } const id = "Tags"; const title = this.tr("Tags"); const iconSrc = "@FontAwesome5Solid/tags/22"; const page = this.__tagsPage = new osparc.dashboard.resources.pages.BasePage(title, iconSrc, id); - this.__addOpenButton(page); + this.__addToolbarButtons(page); const lazyLoadContent = () => { const tagManager = new osparc.form.tag.TagManager(resourceData); @@ -671,71 +777,78 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { this.__fireUpdateEvent(resourceData, updatedData); }, this); page.addToContent(tagManager); + this.__widgets.push(tagManager); } page.addListenerOnce("appear", lazyLoadContent, this); - return page; + this.__addPage(page); }, - __getServicesUpdatePage: function() { + __addServicesUpdatePage: function() { const resourceData = this.__resourceData; if (osparc.utils.Resources.isService(resourceData)) { - return null; + return; + } + if (osparc.utils.Resources.isStudyLike(this.__resourceData) && !osparc.data.model.Study.canIWrite(this.__resourceData["accessRights"])) { + return; } const id = "ServicesUpdate"; const title = this.tr("Services Updates"); const iconSrc = "@MaterialIcons/update/24"; const page = this.__servicesUpdatePage = new osparc.dashboard.resources.pages.BasePage(title, iconSrc, id); - this.__addOpenButton(page); + this.__addToolbarButtons(page); - if (this.__resourceData["resourceType"] === "study") { - const studyData = this.__resourceData; - const canBeOpened = osparc.study.Utils.canShowServiceUpdates(studyData); - page.setEnabled(canBeOpened); - } + const studyData = this.__resourceData; + const enabled = osparc.study.Utils.canShowServiceUpdates(studyData); + page.setEnabled(enabled); const lazyLoadContent = () => { const servicesUpdate = new osparc.metadata.ServicesInStudyUpdate(resourceData); + this.self().disableIfInUse(resourceData, servicesUpdate); servicesUpdate.addListener("updateService", e => { const updatedData = e.getData(); this.__fireUpdateEvent(resourceData, updatedData); }); page.addToContent(servicesUpdate); + this.__widgets.push(servicesUpdate); } page.addListenerOnce("appear", lazyLoadContent, this); - return page; + this.__addPage(page); }, - __getServicesBootOptionsPage: function() { + __addServicesBootOptionsPage: function() { const resourceData = this.__resourceData; if ( osparc.utils.Resources.isService(resourceData) || !osparc.data.Permissions.getInstance().canDo("study.node.bootOptions.read") ) { - return null; + return; + } + if (osparc.utils.Resources.isStudyLike(this.__resourceData) && !osparc.data.model.Study.canIWrite(this.__resourceData["accessRights"])) { + return; } const id = "ServicesBootOptions"; const title = this.tr("Boot Options"); const iconSrc = "@FontAwesome5Solid/play-circle/22"; const page = new osparc.dashboard.resources.pages.BasePage(title, iconSrc, id); - this.__addOpenButton(page); + this.__addToolbarButtons(page); - if (this.__resourceData["resourceType"] === "study") { - const studyData = this.__resourceData; - const canBeOpened = osparc.study.Utils.canShowServiceBootOptions(studyData); - page.setEnabled(canBeOpened); - } + const studyData = this.__resourceData; + const enabled = osparc.study.Utils.canShowServiceBootOptions(studyData); + page.setEnabled(enabled); const lazyLoadContent = () => { const servicesBootOpts = new osparc.metadata.ServicesInStudyBootOpts(resourceData); + this.self().disableIfInUse(resourceData, servicesBootOpts); servicesBootOpts.addListener("updateService", e => { const updatedData = e.getData(); this.__fireUpdateEvent(resourceData, updatedData); }); page.addToContent(servicesBootOpts); + this.__widgets.push(servicesBootOpts); if ( osparc.utils.Resources.isStudy(resourceData) || @@ -757,15 +870,15 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { } page.addListenerOnce("appear", lazyLoadContent, this); - return page; + this.__addPage(page); }, - __getPublishPage: function() { + __addPublishPage: function() { if ( !osparc.utils.Resources.isStudy(this.__resourceData) || !osparc.product.Utils.showPublicProjects() ) { - return null; + return; } const canIWrite = osparc.data.model.Study.canIWrite(this.__resourceData["accessRights"]); @@ -776,11 +889,9 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { const title = this.tr("Publish"); const page = new osparc.dashboard.resources.pages.BasePage(title, iconSrc, id); - if (this.__resourceData["resourceType"] === "study") { - const studyData = this.__resourceData; - const canBeOpened = osparc.study.Utils.canBeDuplicated(studyData); - page.setEnabled(canBeOpened); - } + const studyData = this.__resourceData; + const enabled = osparc.study.Utils.canBeDuplicated(studyData); + page.setEnabled(enabled); const lazyLoadContent = () => { const makeItPublic = true; @@ -793,20 +904,20 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { toolbar.add(publishTemplateButton); page.addToHeader(toolbar); page.addToContent(saveAsTemplate); + this.__widgets.push(saveAsTemplate); } page.addListenerOnce("appear", lazyLoadContent, this); - return page; + this.__addPage(page); } - return null; }, - __getCreateTemplatePage: function() { + __addCreateTemplatePage: function() { if ( !osparc.utils.Resources.isStudy(this.__resourceData) || - osparc.product.Utils.showTemplates() + !osparc.product.Utils.showTemplates() ) { - return null; + return; } const canIWrite = osparc.data.model.Study.canIWrite(this.__resourceData["accessRights"]); @@ -817,11 +928,9 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { const title = this.tr("Template"); const page = new osparc.dashboard.resources.pages.BasePage(title, iconSrc, id); - if (this.__resourceData["resourceType"] === "study") { - const studyData = this.__resourceData; - const canBeOpened = osparc.study.Utils.canBeDuplicated(studyData); - page.setEnabled(canBeOpened); - } + const studyData = this.__resourceData; + const enabled = osparc.study.Utils.canBeDuplicated(studyData); + page.setEnabled(enabled); const lazyLoadContent = () => { const makeItPublic = false; @@ -834,42 +943,45 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { toolbar.add(createTemplateButton); page.addToHeader(toolbar); page.addToContent(saveAsTemplate); + this.__widgets.push(saveAsTemplate); } page.addListenerOnce("appear", lazyLoadContent, this); - return page; + this.__addPage(page); } - return null; }, - __getCreateFunctionsPage: function() { - if (osparc.utils.DisabledPlugins.isFunctionsDisabled()) { - return null; + __addCreateFunctionsPage: function() { + if (!osparc.data.Permissions.getInstance().checkFunctionPermissions("writeFunctions")) { + return; } if (!osparc.utils.Resources.isStudy(this.__resourceData)) { - return null; + return; } - if (!osparc.study.Utils.canCreateFunction(this.__resourceData["workbench"])) { - return null; - } + const isPotentialFunction = osparc.study.CreateFunction.isPotentialFunction(this.__resourceData["workbench"]); const id = "CreateFunction"; const iconSrc = "@MaterialIcons/functions/24"; const title = this.tr("Create Function"); const page = new osparc.dashboard.resources.pages.BasePage(title, iconSrc, id); + page.setEnabled(isPotentialFunction); + osparc.utils.Utils.toolTipTextOnDisabledWidget(page.getChildControl("button"), osparc.study.CreateFunction.CREATE_FUNCTION_TEXT); const createFunction = new osparc.study.CreateFunction(this.__resourceData); const createFunctionButton = createFunction.getCreateFunctionButton(); + osparc.utils.Utils.setIdToWidget(createFunctionButton, "create_function_page_btn"); osparc.dashboard.resources.pages.BasePage.decorateHeaderButton(createFunctionButton); const toolbar = this.self().createToolbar(); toolbar.add(createFunctionButton); page.addToHeader(toolbar); page.addToContent(createFunction); - return page; + this.__widgets.push(createFunction); + + this.__addPage(page); }, - __getProjectFilesPopUp: function() { + __addProjectFilesPopUp: function() { const resourceData = this.__resourceData; if (!osparc.utils.Resources.isService(resourceData)) { const title = osparc.product.Utils.resourceTypeToAlias(resourceData["resourceType"], {firstUpperCase: true}) + this.tr(" Files"); @@ -883,7 +995,7 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { gap: 14, cursor: "pointer", }); - dataAccess.addListener("tap", () => osparc.widget.StudyDataManager.popUpInWindow(resourceData["uuid"], null, title)); + dataAccess.addListener("tap", () => osparc.widget.StudyDataManager.popUpInWindow(resourceData, null, title)); this.addWidgetToTabs(dataAccess); if (resourceData["resourceType"] === "study") { @@ -893,7 +1005,7 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { } }, - __getActivityOverviewPopUp: function() { + __addActivityOverviewPopUp: function() { const resourceData = this.__resourceData; if (osparc.utils.Resources.isStudy(resourceData)) { const title = this.tr("Activity Overview..."); @@ -911,5 +1023,11 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { this.addWidgetToTabs(dataAccess); } }, + + // overridden + destroy: function() { + this.__widgets.forEach(w => w.destroy()); + this.base(arguments); + }, } }); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/SearchBarFilter.js b/services/static-webserver/client/source/class/osparc/dashboard/SearchBarFilter.js index 131838fd18ef..c18900a3b317 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/SearchBarFilter.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/SearchBarFilter.js @@ -26,9 +26,10 @@ qx.Class.define("osparc.dashboard.SearchBarFilter", { this._setLayout(new qx.ui.layout.HBox(5)); this.set({ - backgroundColor: "input_background", + backgroundColor: this.self().BG_COLOR, paddingLeft: 6, height: this.self().HEIGHT, + maxHeight: this.self().HEIGHT, decorator: "rounded", }); @@ -41,8 +42,27 @@ qx.Class.define("osparc.dashboard.SearchBarFilter", { this.__currentFilter = null; }, + properties: { + showFilterMenu: { + check: "Boolean", + init: true, + event: "changeShowFilterMenu", + } + }, + statics: { HEIGHT: 36, + BG_COLOR: "input_background", + + getInitialFilterData: function() { + return { + tags: [], + classifiers: [], + sharedWith: null, + appType: null, + text: "" + }; + }, getSharedWithOptions: function(resourceType) { if (resourceType === "template") { @@ -70,11 +90,24 @@ qx.Class.define("osparc.dashboard.SearchBarFilter", { label: qx.locale.Manager.tr("Public") + " " + resourceAlias, icon: "@FontAwesome5Solid/globe/20" }]; - } + }, + + createChip: function(chipType, chipId, chipLabel) { + const chipButton = new qx.ui.form.Button().set({ + label: osparc.utils.Utils.capitalize(chipType) + " = '" + chipLabel + "'", + icon: "@MaterialIcons/close/12", + toolTipText: chipLabel, + appearance: "chip-button" + }); + chipButton.type = chipType; + chipButton.id = chipId; + return chipButton; + }, }, events: { - "filterChanged": "qx.event.type.Data" + "filterChanged": "qx.event.type.Data", + "resetButtonPressed": "qx.event.type.Event", }, members: { @@ -100,7 +133,7 @@ qx.Class.define("osparc.dashboard.SearchBarFilter", { break; case "text-field": control = new qx.ui.form.TextField().set({ - backgroundColor: "input_background", + backgroundColor: this.self().BG_COLOR, font: "text-16", placeholder: this.tr("search"), alignY: "bottom", @@ -175,16 +208,16 @@ qx.Class.define("osparc.dashboard.SearchBarFilter", { this.__hideFilterMenu(); } }, this); - textField.addListener("changeValue", () => this.__filter(), this); + textField.addListener("focusout", () => this.__filter(), this); const resetButton = this.getChildControl("reset-button"); - resetButton.addListener("execute", () => this.__resetFilters(), this); + resetButton.addListener("execute", () => this.resetButtonPressed(), this); osparc.store.Store.getInstance().addListener("changeTags", () => this.__buildFiltersMenu(), this); }, getTextFilterValue: function() { - return this.getChildControl("text-field").getValue(); + return this.getChildControl("text-field").getValue() ? this.getChildControl("text-field").getValue().trim() : null; }, __showFilterMenu: function() { @@ -203,7 +236,9 @@ qx.Class.define("osparc.dashboard.SearchBarFilter", { left: left }); - this.__filtersMenu.show(); + if (this.getShowFilterMenu()) { + this.__filtersMenu.show(); + } }, __hideFilterMenu: function() { @@ -278,6 +313,11 @@ qx.Class.define("osparc.dashboard.SearchBarFilter", { // hypertools filter const hypertoolTypeButton = new qx.ui.menu.Button("Hypertools", null); + hypertoolTypeButton.exclude(); + osparc.store.Templates.getHypertools() + .then(hypertools => { + hypertoolTypeButton.setVisibility(hypertools.length > 0 ? "visible" : "excluded"); + }); osparc.utils.Utils.replaceIconWithThumbnail(hypertoolTypeButton, osparc.data.model.StudyUI.HYPERTOOL_ICON, 18); serviceTypeMenu.add(hypertoolTypeButton); hypertoolTypeButton.addListener("execute", () => this.__addChip("app-type", "hypertool", "Hypertools"), this); @@ -299,7 +339,6 @@ qx.Class.define("osparc.dashboard.SearchBarFilter", { }); }, - setSharedWithActiveFilter: function(optionId, optionLabel) { this.__removeChips("shared-with"); if (optionId === "show-all") { @@ -318,30 +357,37 @@ qx.Class.define("osparc.dashboard.SearchBarFilter", { } }, + // this widget pops up a larger widget with all filters visible + // and lets users search between projects, templates, public projects and, eventually, files + popUpSearchBarFilter: function() { + const initFilterData = this.getFilterData(); + const searchBarFilterExtended = new osparc.dashboard.SearchBarFilterExtended(this.__resourceType, initFilterData); + const bounds = osparc.utils.Utils.getBounds(this); + searchBarFilterExtended.setLayoutProperties({ + left: bounds.left, + top: bounds.top, + }); + searchBarFilterExtended.set({ + width: bounds.width, + }); + searchBarFilterExtended.addListener("resetButtonPressed", () => { + this.resetButtonPressed(); + }, this); + return searchBarFilterExtended; + }, + __addChip: function(type, id, label) { const activeFilter = this.getChildControl("active-filters"); const chipFound = activeFilter.getChildren().find(chip => chip.type === type && chip.id === id); if (chipFound) { return; } - const chip = this.__createChip(type, id, label); + const chip = this.self().createChip(type, id, label); + chip.addListener("execute", () => this.__removeChip(type, id), this); activeFilter.add(chip); this.__filter(); }, - __createChip: function(chipType, chipId, chipLabel) { - const chipButton = new qx.ui.form.Button().set({ - label: osparc.utils.Utils.capitalize(chipType) + " = '" + chipLabel + "'", - icon: "@MaterialIcons/close/12", - toolTipText: chipLabel, - appearance: "chip-button" - }); - chipButton.type = chipType; - chipButton.id = chipId; - chipButton.addListener("execute", () => this.__removeChip(chipType, chipId), this); - return chipButton; - }, - __removeChip: function(type, id) { const activeFilter = this.getChildControl("active-filters"); const chipFound = activeFilter.getChildren().find(chip => chip.type === type && chip.id === id); @@ -368,19 +414,14 @@ qx.Class.define("osparc.dashboard.SearchBarFilter", { this.getChildControl("text-field").resetValue(); }, - __resetFilters: function() { + resetButtonPressed: function() { this.resetFilters(); - this.__filter(); + this._filterChange(this.self().getInitialFilterData()); + this.fireEvent("resetButtonPressed"); }, getFilterData: function() { - const filterData = { - tags: [], - classifiers: [], - sharedWith: null, - appType: null, - text: "" - }; + const filterData = this.self().getInitialFilterData(); const textFilter = this.getTextFilterValue(); filterData["text"] = textFilter ? textFilter : ""; this.getChildControl("active-filters").getChildren().forEach(chip => { diff --git a/services/static-webserver/client/source/class/osparc/dashboard/SearchBarFilterExtended.js b/services/static-webserver/client/source/class/osparc/dashboard/SearchBarFilterExtended.js new file mode 100644 index 000000000000..5f127fc8fd46 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/dashboard/SearchBarFilterExtended.js @@ -0,0 +1,359 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.dashboard.SearchBarFilterExtended", { + extend: qx.ui.core.Widget, + + construct: function(resourceType, initFilterData = {}) { + this.base(arguments, "searchBarFilter-"+resourceType, "searchBarFilter"); + + this._setLayout(new qx.ui.layout.VBox(10)); + + this.set({ + backgroundColor: osparc.dashboard.SearchBarFilter.BG_COLOR, + padding: 8, + decorator: "rounded", + }); + osparc.utils.Utils.addBorder(this, 1, qx.theme.manager.Color.getInstance().resolve("product-color")); + + this.__resourceType = resourceType; + this.__initFilterData = initFilterData; + + this.__buildLayout(); + + this.setCurrentContext(osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PROJECTS); + + qx.core.Init.getApplication().getRoot().add(this); + + this.__attachHideHandlers(); + }, + + events: { + "filterChanged": "qx.event.type.Data", + "resetButtonPressed": "qx.event.type.Event", + }, + + properties: { + currentContext: { + check: [ + "searchProjects", // osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PROJECTS, + "searchTemplates", // osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_TEMPLATES, + "searchPublicTemplates", // osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PUBLIC_TEMPLATES, + "searchFunctions" // osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_FUNCTIONS + ], + init: null, + nullable: false, + event: "changeCurrentContext", + apply: "__applyCurrentContext", + }, + }, + + statics: { + decorateListItem: function(listItem) { + listItem.set({ + gap: 8, + backgroundColor: osparc.dashboard.SearchBarFilter.BG_COLOR, + }); + }, + + createListItem: function(label, icon, model) { + const listItem = new qx.ui.form.ListItem(label, icon, model); + this.self().decorateListItem(listItem); + return listItem; + }, + }, + + members: { + __resourceType: null, + + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "search-bar-filter": { + control = new osparc.dashboard.SearchBarFilter(this.__resourceType).set({ + showFilterMenu: false, + }); + const textField = control.getChildControl("text-field"); + textField.addListener("appear", () => { + textField.focus(); + textField.activate(); + }); + const resetButton = control.getChildControl("reset-button"); + resetButton.set({ + paddingRight: 2, // 10-8 + opacity: 0.7, + backgroundColor: "transparent", + }); + osparc.utils.Utils.hideBorder(resetButton); + this._add(control); + break; + } + case "context-drop-down": { + control = new qx.ui.form.SelectBox().set({ + minWidth: 150, + }); + control.getChildControl("arrow").syncAppearance(); // force sync to show the arrow + this.self().decorateListItem(control.getChildControl("atom")); + const searchBarFilter = this.getChildControl("search-bar-filter"); + searchBarFilter._addAt(control, 3); //"search-icon", "active-filters", "text-field", "reset-button" + break; + } + case "my-projects-button": { + control = this.self().createListItem( + this.tr("My Projects"), + "@FontAwesome5Solid/file/14", + "myProjects" + ); + const contextDropDown = this.getChildControl("context-drop-down"); + contextDropDown.add(control); + break; + } + case "templates-button": { + control = this.self().createListItem( + this.tr("Templates"), + "@FontAwesome5Solid/copy/14", + "templates" + ); + const contextDropDown = this.getChildControl("context-drop-down"); + contextDropDown.add(control); + break; + } + case "public-projects-button": { + control = this.self().createListItem( + this.tr("Public Projects"), + "@FontAwesome5Solid/globe/14", + "publicProjects" + ); + const contextDropDown = this.getChildControl("context-drop-down"); + contextDropDown.add(control); + break; + } + case "functions-button": { + control = this.self().createListItem( + this.tr("Functions"), + "@MaterialIcons/functions/18", + "functions" + ); + const contextDropDown = this.getChildControl("context-drop-down"); + contextDropDown.add(control); + break; + } + case "filter-buttons": + control = new qx.ui.toolbar.ToolBar().set({ + backgroundColor: osparc.dashboard.SearchBarFilter.BG_COLOR, + }); + this._add(control); + break; + case "shared-with-button": + control = new qx.ui.toolbar.MenuButton(this.tr("Shared with"), "@FontAwesome5Solid/share-alt/12"); + this.__addSharedWithMenu(control); + this.getChildControl("filter-buttons").add(control); + break; + case "tags-button": + control = new qx.ui.toolbar.MenuButton(this.tr("Tags"), "@FontAwesome5Solid/tags/12"); + this.__addTagsMenu(control); + this.getChildControl("filter-buttons").add(control); + break; + } + return control || this.base(arguments, id); + }, + + __buildLayout: function() { + const searchBarFilter = this.getChildControl("search-bar-filter"); + + const contextDropDown = this.getChildControl("context-drop-down"); + this.getChildControl("my-projects-button"); + if (osparc.product.Utils.showTemplates()) { + this.getChildControl("templates-button"); + } + if (osparc.product.Utils.showPublicProjects()) { + this.getChildControl("public-projects-button"); + } + if (osparc.product.Utils.showFunctions()) { + this.getChildControl("functions-button"); + } + if (contextDropDown.getChildren().length === 1) { + contextDropDown.hide(); + } + contextDropDown.addListener("changeSelection", e => { + const selection = e.getData(); + if (selection.length) { + const selectedContext = selection[0].getModel(); + switch (selectedContext) { + case "myProjects": + this.setCurrentContext(osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PROJECTS); + break; + case "templates": + this.setCurrentContext(osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_TEMPLATES); + break; + case "publicProjects": + this.setCurrentContext(osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PUBLIC_TEMPLATES); + break; + case "functions": + this.setCurrentContext(osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_FUNCTIONS); + break; + } + } + }); + + // Set initial state based on the provided initFilterData + const activeFilters = searchBarFilter.getChildControl("active-filters"); + const textField = searchBarFilter.getChildControl("text-field"); + if ("sharedWith" in this.__initFilterData && this.__initFilterData["sharedWith"]) { + const sharedWithOptions = osparc.dashboard.SearchBarFilter.getSharedWithOptions(this.__resourceType); + const optionsFound = sharedWithOptions.find(option => option.id === this.__initFilterData["sharedWith"]); + if (optionsFound) { + const chip = osparc.dashboard.SearchBarFilter.createChip("sharedWith", optionsFound.id, optionsFound.label); + activeFilters.add(chip); + } + } + if ("tags" in this.__initFilterData && this.__initFilterData["tags"]) { + const tags = osparc.store.Tags.getInstance().getTags(); + this.__initFilterData["tags"].forEach(tagId => { + const tagFound = tags.find(tag => tag.getTagId() === tagId); + if (tagFound) { + const chip = osparc.dashboard.SearchBarFilter.createChip("tag", tagId, tagFound.getName()); + activeFilters.add(chip); + } + }); + } + if ("text" in this.__initFilterData && this.__initFilterData["text"]) { + textField.setValue(this.__initFilterData["text"]); + } + + // Add listeners + textField.addListener("keypress", e => { + if (e.getKeyIdentifier() === "Enter") { + this.__filter("text", textField.getValue()); + } + }, this); + textField.addListener("unfocus", () => { + this.__filter("text", textField.getValue()); + }, this); + + const resetButton = searchBarFilter.getChildControl("reset-button"); + resetButton.addListener("tap", () => { + this.fireEvent("resetButtonPressed"); + this.exclude(); + }); + }, + + __applyCurrentContext: function(value, old) { + if (value === old) { + return; + } + const contextDropDown = this.getChildControl("context-drop-down"); + const searchBarFilter = this.getChildControl("search-bar-filter"); + const sharedWithButton = this.getChildControl("shared-with-button"); + const tagsButton = this.getChildControl("tags-button"); + switch (value) { + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PROJECTS: + contextDropDown.setSelection([this.getChildControl("my-projects-button")]); + searchBarFilter.getChildControl("text-field").setPlaceholder(this.tr("Search in My projects")); + sharedWithButton.setVisibility("visible"); + tagsButton.setVisibility("visible"); + break; + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_TEMPLATES: + contextDropDown.setSelection([this.getChildControl("templates-button")]); + searchBarFilter.getChildControl("text-field").setPlaceholder(this.tr("Search in Templates")); + sharedWithButton.setVisibility("excluded"); + tagsButton.setVisibility("visible"); + break; + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PUBLIC_TEMPLATES: + contextDropDown.setSelection([this.getChildControl("public-projects-button")]); + searchBarFilter.getChildControl("text-field").setPlaceholder(this.tr("Search in Public Projects")); + sharedWithButton.setVisibility("excluded"); + tagsButton.setVisibility("visible"); + break; + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_FUNCTIONS: + contextDropDown.setSelection([this.getChildControl("functions-button")]); + searchBarFilter.getChildControl("text-field").setPlaceholder(this.tr("Search in Functions")); + sharedWithButton.setVisibility("excluded"); + tagsButton.setVisibility("excluded"); + break; + } + }, + + __filter: function(filterType, filterData) { + this.fireDataEvent("filterChanged", { + searchContext: this.getCurrentContext(), + filterType, + filterData, + }); + this.exclude(); + }, + + __addSharedWithMenu: function(menuButton) { + const menu = this.__sharedWithMenu = new qx.ui.menu.Menu(); + + const sharedWithRadioGroup = new qx.ui.form.RadioGroup(); + const options = osparc.dashboard.SearchBarFilter.getSharedWithOptions(this.__resourceType); + options.forEach((option, idx) => { + const button = new qx.ui.menu.RadioButton(option.label); + menu.add(button); + button.addListener("execute", () => this.__filter("sharedWith", option)); + sharedWithRadioGroup.add(button); + // preselect show-all + if (idx === 0) { + sharedWithRadioGroup.setSelection([button]); + } + }); + menuButton.setMenu(menu); + }, + + __addTagsMenu: function(menuButton) { + const tags = osparc.store.Tags.getInstance().getTags(); + menuButton.setVisibility(tags.length ? "visible" : "excluded"); + if (tags.length) { + const menu = this.__tagsMenu = new qx.ui.menu.Menu(); + osparc.utils.Utils.setIdToWidget(menu, "searchBarFilter-tags-menu"); + tags.forEach(tag => { + const tagButton = new qx.ui.menu.Button(tag.getName(), "@FontAwesome5Solid/tag/12"); + tagButton.getChildControl("icon").setTextColor(tag.getColor()); + menu.add(tagButton); + tagButton.addListener("execute", () => this.__filter("tag", tag)); + }); + menuButton.setMenu(menu); + } + }, + + __attachHideHandlers: function() { + const tapListener = e => { + const excludeElements = [ + this, + this.__sharedWithMenu, + this.__tagsMenu, + ]; + // handle clicks on the drop down menu that might go out of bounds + const contextDropDown = this.getChildControl("context-drop-down"); + const popup = contextDropDown.getChildControl("popup"); + if (popup.isVisible()) { + excludeElements.push(popup); + } + for (let i = 0; i < excludeElements.length; i++) { + if (excludeElements[i] && osparc.utils.Utils.isMouseOnElement(excludeElements[i], e)) { + return; + } + } + + this.exclude(); + document.removeEventListener("mousedown", tapListener); + }; + document.addEventListener("mousedown", tapListener); + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/SortedByMenuButton.js b/services/static-webserver/client/source/class/osparc/dashboard/SortedByMenuButton.js index 7bb0bcb8d4a2..472d528ab40e 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/SortedByMenuButton.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/SortedByMenuButton.js @@ -34,38 +34,21 @@ qx.Class.define("osparc.dashboard.SortedByMenuButton", { this.setMenu(sortedByMenu); const options = this.self().getSortByOptions(); - options.forEach((option, idx) => { - const btn = new qx.ui.menu.Button(); - btn.btnId = option.id; - btn.set({ - label: option.label, - icon: null - }); + const btn = new qx.ui.menu.Button(option.label); + btn.field = option.id; // Sort by last modified date if (idx === options.length -1) { - this.__menuButton = btn; + this.__selectedMenuButton = btn; btn.setIcon("@FontAwesome5Solid/arrow-down/14"); } sortedByMenu.add(btn); - btn.addListener("execute", () => { - this.__buttonExecuted(btn) - }); + btn.addListener("execute", () => this.__buttonExecuted(btn)); }); - - this.addListener("changeSort", e => { - const sort = e.getData(); - this.__handelSortEvent(sort) - }, this); }, statics: { - DefaultSorting: { - field: "last_change_date", - direction: "desc" - }, - getSortByOptions: function() { return [{ id: "name", @@ -96,45 +79,40 @@ qx.Class.define("osparc.dashboard.SortedByMenuButton", { }, nullable: false, event: "changeSort", - apply: "__applySort" + apply: "__handelSortEvent", } }, members: { - __menuButton: null, + __selectedMenuButton: null, + __buttonExecuted: function(btn) { - if (this.__menuButton) { - this.__menuButton.setIcon(null); + if (this.__selectedMenuButton) { + this.__selectedMenuButton.setIcon(null); } - this.__menuButton = btn; + this.__selectedMenuButton = btn; this.set({ label: btn.getLabel(), icon: "@FontAwesome5Solid/chevron-down/10" }); - const data = { - "id": btn.btnId, - }; - this.__handelSort(data.id); - }, - - __handelSort: function(field) { + const field = btn.field; if (field === this.getSort().field) { const { direction } = this.getSort(); this.setSort({ field, direction: !direction - }) - return; + }); + } else { + this.setSort({ + field, + direction: true + }); } - this.setSort({ - field, - direction: true - }) }, __handelSortEvent: function({field, direction}) { - this.__menuButton.setIcon(direction ? "@FontAwesome5Solid/arrow-down/14" : "@FontAwesome5Solid/arrow-up/14") + this.__selectedMenuButton.setIcon(direction ? "@FontAwesome5Solid/arrow-down/14" : "@FontAwesome5Solid/arrow-up/14") this.setIcon(direction ? "@FontAwesome5Solid/arrow-down/14" : "@FontAwesome5Solid/arrow-up/14") const sort = { field: field, @@ -143,8 +121,15 @@ qx.Class.define("osparc.dashboard.SortedByMenuButton", { this.fireDataEvent("sortByChanged", sort); }, - __applySort: function(value, old) { + hideOptionButton: function(field) { + const btn = this.getMenu().getChildren().find(btn => btn.field === field); + if (btn) { + btn.exclude(); + } + }, - } + showAllOptions: function() { + this.getMenu().getChildren().forEach(btn => btn.show()); + }, } }); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js index 6ca86566385f..8c5e5e7c3c05 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js @@ -47,15 +47,34 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { "publishTemplate": "qx.event.type.Data", }, + statics: { + CONTEXT: { + PROJECTS: "studiesAndFolders", + WORKSPACES: "workspaces", + TEMPLATES: "templates", + PUBLIC_TEMPLATES: "publicTemplates", + FUNCTIONS: "functions", + TRASH: "trash", + SEARCH_PROJECTS: "searchProjects", + SEARCH_TEMPLATES: "searchTemplates", + SEARCH_PUBLIC_TEMPLATES: "searchPublicTemplates", + SEARCH_FUNCTIONS: "searchFunctions", + } + }, + properties: { currentContext: { check: [ - "studiesAndFolders", - "workspaces", - "search", - "templates", - "public", - "trash", + "studiesAndFolders", // osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS, + "workspaces", // osparc.dashboard.StudyBrowser.CONTEXT.WORKSPACES, + "templates", // osparc.dashboard.StudyBrowser.CONTEXT.TEMPLATES, + "publicTemplates", // osparc.dashboard.StudyBrowser.CONTEXT.PUBLIC_TEMPLATES, + "functions", // osparc.dashboard.StudyBrowser.CONTEXT.FUNCTIONS, + "trash", // osparc.dashboard.StudyBrowser.CONTEXT.TRASH, + "searchProjects", // osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PROJECTS, + "searchTemplates", // osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_TEMPLATES, + "searchPublicTemplates", // osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PUBLIC_TEMPLATES, + "searchFunctions", // osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_FUNCTIONS, ], nullable: false, init: "studiesAndFolders", @@ -89,13 +108,14 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, members: { - __dontShowTutorial: null, + __dontQuickStart: null, __header: null, + __sortByButton: null, __workspacesList: null, __foldersList: null, __loadingFolders: null, __loadingWorkspaces: null, - __dragWidget: null, + __lastUrlParams: null, // overridden initResources: function() { @@ -104,6 +124,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { } this._resourcesInitialized = true; + this._showLoadingPage(this.tr("Loading Projects...")); this._resourcesList = []; this.__getActiveStudy() .then(() => { @@ -118,8 +139,6 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { } else { this.reloadResources(); } - // "Starting..." page - this._hideLoadingPage(); // since all the resources (templates, users, orgs...) were already loaded, notifications can be built osparc.data.Resources.get("notifications") @@ -131,12 +150,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __getActiveStudy: function() { - const params = { - url: { - tabId: osparc.utils.Utils.getClientSessionID() - } - }; - return osparc.data.Resources.fetch("studies", "getActive", params) + return osparc.store.Study.getInstance().getActive(osparc.utils.Utils.getClientSessionID()) .then(studyData => { if (studyData) { osparc.store.Store.getInstance().setCurrentStudyId(studyData["uuid"]); @@ -150,10 +164,14 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { osparc.data.Permissions.getInstance().canDo("studies.user.read") && osparc.auth.Manager.getInstance().isLoggedIn() ) { - this.__reloadFolders(); - this.__reloadStudies(); + Promise.all([ + this.__reloadFolders(), + this.__reloadStudies(), + ]) + .finally(() => this._hideLoadingPage()); } else { this.__resetStudiesList(); + this._hideLoadingPage(); } }, @@ -164,35 +182,43 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { __reloadWorkspaces: function() { if ( !osparc.auth.Manager.getInstance().isLoggedIn() || - ["studiesAndFolders", "templates", "public"].includes(this.getCurrentContext()) || + ![ + osparc.dashboard.StudyBrowser.CONTEXT.WORKSPACES, + osparc.dashboard.StudyBrowser.CONTEXT.TRASH, + osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PROJECTS, + ].includes(this.getCurrentContext()) || this.__loadingWorkspaces ) { return; } + let filterEnabled = false; let request = null; switch (this.getCurrentContext()) { - case "search": { + case osparc.dashboard.StudyBrowser.CONTEXT.WORKSPACES: + request = osparc.store.Workspaces.getInstance().fetchWorkspaces(); + break; + case osparc.dashboard.StudyBrowser.CONTEXT.TRASH: + request = osparc.store.Workspaces.getInstance().fetchAllTrashedWorkspaces(); + break; + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PROJECTS: { const filterData = this._searchBarFilter.getFilterData(); const text = filterData.text ? encodeURIComponent(filterData.text) : ""; + filterEnabled = filterData.tags.length || filterData.sharedWith; request = osparc.store.Workspaces.getInstance().searchWorkspaces(text, this.getOrderBy()); break; } - case "workspaces": { - request = osparc.store.Workspaces.getInstance().fetchWorkspaces(); - break; - } - case "trash": - request = osparc.store.Workspaces.getInstance().fetchAllTrashedWorkspaces(); - break; } this.__loadingWorkspaces = true; this.__setWorkspacesToList([]); request .then(workspaces => { + if (filterEnabled) { + return Promise.resolve(); + } this.__setWorkspacesToList(workspaces); - if (this.getCurrentContext() === "trash") { + if (this.getCurrentContext() === osparc.dashboard.StudyBrowser.CONTEXT.TRASH) { if (workspaces.length) { this.__header.getChildControl("empty-trash-button").show(); } @@ -208,37 +234,46 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { __reloadFolders: function() { if ( !osparc.auth.Manager.getInstance().isLoggedIn() || - ["workspaces", "templates", "public"].includes(this.getCurrentContext()) || + ![ + osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS, + osparc.dashboard.StudyBrowser.CONTEXT.TRASH, + osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PROJECTS, + ].includes(this.getCurrentContext()) || this.__loadingFolders ) { return; } + let filterEnabled = false; let request = null; switch (this.getCurrentContext()) { - case "search": { - const filterData = this._searchBarFilter.getFilterData(); - const text = filterData.text ? encodeURIComponent(filterData.text) : ""; // name, description and uuid - request = osparc.store.Folders.getInstance().searchFolders(text, this.getOrderBy()); - break; - } - case "studiesAndFolders": { + case osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS: { const workspaceId = this.getCurrentWorkspaceId(); const folderId = this.getCurrentFolderId(); request = osparc.store.Folders.getInstance().fetchFolders(folderId, workspaceId, this.getOrderBy()); break; } - case "trash": + case osparc.dashboard.StudyBrowser.CONTEXT.TRASH: request = osparc.store.Folders.getInstance().fetchAllTrashedFolders(this.getOrderBy()); break; + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PROJECTS: { + const filterData = this._searchBarFilter.getFilterData(); + const text = filterData.text ? encodeURIComponent(filterData.text) : ""; // name, description and uuid + filterEnabled = filterData.tags.length || filterData.sharedWith; + request = osparc.store.Folders.getInstance().searchFolders(text, this.getOrderBy()); + break; + } } this.__loadingFolders = true; this.__setFoldersToList([]); - request + return request .then(folders => { + if (filterEnabled) { + return Promise.resolve(); + } this.__setFoldersToList(folders); - if (this.getCurrentContext() === "trash") { + if (this.getCurrentContext() === osparc.dashboard.StudyBrowser.CONTEXT.TRASH) { if (folders.length) { this.__header.getChildControl("empty-trash-button").show(); } @@ -257,7 +292,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { __reloadStudies: function() { if ( !osparc.auth.Manager.getInstance().isLoggedIn() || - this.getCurrentContext() === "workspaces" || + this.getCurrentContext() === osparc.dashboard.StudyBrowser.CONTEXT.WORKSPACES || // all but workspaces this._loadingResourcesBtn.isFetching() ) { return; @@ -267,7 +302,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { this._loadingResourcesBtn.setFetching(true); this._loadingResourcesBtn.setVisibility("visible"); - this.__getNextStudiesRequest() + return this.__getNextStudiesRequest() .then(resp => { // Context might have been changed while waiting for the response. // The new call is on the way, therefore this response can be ignored. @@ -276,55 +311,84 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { return; } - if (["templates", "public"].includes(this.getCurrentContext())) { - const templates = resp["data"]; - templates.forEach(template => template["resourceType"] = "template"); - // For now, filtered in the frontend - const groupsStore = osparc.store.Groups.getInstance(); - const everyoneGid = groupsStore.getEveryoneGroup().getGroupId(); - const productEveryoneGid = groupsStore.getEveryoneProductGroup().getGroupId(); - const filteredTemplates = templates.filter(template => { - const publicAccess = everyoneGid in template["accessRights"] || productEveryoneGid in template["accessRights"]; - if (this.getCurrentContext() === "public") { - return publicAccess; - } - return !publicAccess; - }); - this.__addResourcesToList(filteredTemplates); - } else { - const studies = resp["data"]; - studies.forEach(study => study["resourceType"] = "study"); - this.__addResourcesToList(studies); + this.__lastUrlParams = osparc.utils.Utils.deepCloneObject(resp["params"]["url"]); + if (this.__lastUrlParams["text"]) { + this.__lastUrlParams["text"] = decodeURIComponent(this.__lastUrlParams["text"]); + } + + switch (this.getCurrentContext()) { + case osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS: + case osparc.dashboard.StudyBrowser.CONTEXT.TRASH: + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PROJECTS: { + const studies = resp["data"]; + studies.forEach(study => study["resourceType"] = "study"); + this.__addResourcesToList(studies); + break; + } + case osparc.dashboard.StudyBrowser.CONTEXT.TEMPLATES: + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_TEMPLATES: + case osparc.dashboard.StudyBrowser.CONTEXT.PUBLIC_TEMPLATES: + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PUBLIC_TEMPLATES: { + const templates = resp["data"]; + templates.forEach(template => template["resourceType"] = "template"); + // For now, filtered in the frontend + const groupsStore = osparc.store.Groups.getInstance(); + const everyoneGroupIds = groupsStore.getEveryoneGroupIds(); + const filteredTemplates = templates.filter(template => { + const templateGroupIds = Object.keys(template["accessRights"]); + const publicAccess = templateGroupIds.some(gid => everyoneGroupIds.includes(parseInt(gid))); + if ([ + osparc.dashboard.StudyBrowser.CONTEXT.PUBLIC_TEMPLATES, + osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PUBLIC_TEMPLATES, + ].includes(this.getCurrentContext())) { + return publicAccess; + } + return !publicAccess; + }); + this.__addResourcesToList(filteredTemplates); + break; + } + case osparc.dashboard.StudyBrowser.CONTEXT.FUNCTIONS: + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_FUNCTIONS: { + const functions = resp["data"]; + functions.forEach(func => func["resourceType"] = "function"); + this.__addResourcesToList(functions); + break; + } } if (this._resourcesContainer.getFlatList()) { this._resourcesContainer.getFlatList().nextRequest = resp["_links"]["next"]; } - if (this.getCurrentContext() === "trash") { + if (this.getCurrentContext() === osparc.dashboard.StudyBrowser.CONTEXT.TRASH) { if (this._resourcesList.length) { this.__header.getChildControl("empty-trash-button").show(); } } - // Show Quick Start if there are no studies in the root folder of the personal workspace - const quickStartInfo = osparc.product.quickStart.Utils.getQuickStart(); - if (quickStartInfo) { - const dontShow = osparc.utils.Utils.localCache.getLocalStorageItem(quickStartInfo.localStorageStr); - if (dontShow === "true" || this.__dontShowTutorial) { - return; - } - const nStudies = "_meta" in resp ? resp["_meta"]["total"] : 0; - if ( - nStudies === 0 && - this.getCurrentContext() === "studiesAndFolders" && - this.getCurrentWorkspaceId() === null && - this.getCurrentFolderId() === null - ) { + // Check if this is the first time the user logged in + const nStudies = "_meta" in resp ? resp["_meta"]["total"] : 0; + if ( + nStudies === 0 && + this.getCurrentContext() === osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS && + this.getCurrentWorkspaceId() === null && + this.getCurrentFolderId() === null + ) { + // It is! + // Open Support Center + osparc.support.SupportCenter.openWindow(); + // and open the Introductory Quick Start if any + const quickStartInfo = osparc.product.quickStart.Utils.getQuickStart(); + if (quickStartInfo) { + const dontShowQuickStart = osparc.utils.Utils.localCache.getLocalStorageItem(quickStartInfo.localStorageStr); + if (dontShowQuickStart === "true" || this.__dontQuickStart) { + return; + } const quickStartWindow = quickStartInfo.tutorial(); quickStartWindow.center(); quickStartWindow.open(); quickStartWindow.addListener("close", () => { - this.__dontShowTutorial = true; + this.__dontQuickStart = true; }, this); } } @@ -341,7 +405,8 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { if (this._resourcesContainer.getFlatList()) { this._loadingResourcesBtn.setVisibility(this._resourcesContainer.getFlatList().nextRequest === null ? "excluded" : "visible"); } - this._moreResourcesRequired(); + // delay the next request to avoid flooding the server + setTimeout(() => this._moreResourcesRequired(), 100); }); }, @@ -362,18 +427,14 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { resourcesList.forEach(study => { const state = study["state"]; - if (state && "locked" in state && state["locked"]["value"] && state["locked"]["status"] === "CLOSING") { + const projectStatus = osparc.study.Utils.state.getProjectStatus(state); + if (projectStatus === "CLOSING") { // websocket might have already notified that the state was closed. // But the /projects calls response got after the ws message. Ask again to make sure const delay = 2000; const studyId = study["uuid"]; setTimeout(() => { - const params = { - url: { - studyId - } - }; - osparc.data.Resources.fetch("studies", "getOne", params) + osparc.store.Study.getInstance().getOne(studyId) .then(studyData => { this.__studyStateReceived(study["uuid"], studyData["state"]); }); @@ -432,7 +493,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __addNewWorkspaceButton: function() { - if (this.getCurrentContext() !== "workspaces") { + if (this.getCurrentContext() !== osparc.dashboard.StudyBrowser.CONTEXT.WORKSPACES) { return; } @@ -450,7 +511,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, _workspaceSelected: function(workspaceId) { - this._changeContext("studiesAndFolders", workspaceId, null); + this._changeContext(osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS, workspaceId, null); }, _workspaceUpdated: function() { @@ -498,7 +559,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __addNewFolderButton: function() { - if (this.getCurrentContext() !== "studiesAndFolders") { + if (this.getCurrentContext() !== osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS) { return; } const currentWorkspaceId = this.getCurrentWorkspaceId(); @@ -530,7 +591,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, _folderSelected: function(folderId) { - this._changeContext("studiesAndFolders", this.getCurrentWorkspaceId(), folderId); + this._changeContext(osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS, this.getCurrentWorkspaceId(), folderId); }, _folderUpdated: function() { @@ -595,7 +656,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, _trashFolderRequested: function(folderId) { - const trashDays = osparc.store.StaticInfo.getInstance().getTrashRetentionDays(); + const trashDays = osparc.store.StaticInfo.getTrashRetentionDays(); let msg = this.tr("Are you sure you want to delete the Folder and all its content?"); msg += "

" + this.tr("It will be permanently deleted after ") + trashDays + " days."; const confirmationWin = new osparc.ui.window.Confirmation(msg).set({ @@ -641,7 +702,10 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { card.addListener("tap", e => this.__studyCardClicked(card, e.getNativeEvent().shiftKey), this); this._populateCardMenu(card); - if (["studiesAndFolders", "search"].includes(this.getCurrentContext())) { + if ([ + osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS, + osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PROJECTS, + ].includes(this.getCurrentContext())) { this.__attachDragHandlers(card); } }); @@ -711,15 +775,15 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { this.__reloadStudies(); }, this); - const store = osparc.store.Store.getInstance(); - store.addListener("studyStateChanged", e => { + const studyStore = osparc.store.Study.getInstance(); + studyStore.addListener("studyStateChanged", e => { const { studyId, state, } = e.getData(); this.__studyStateChanged(studyId, state); }); - store.addListener("studyDebtChanged", e => { + studyStore.addListener("studyDebtChanged", e => { const { studyId, debt, @@ -738,13 +802,34 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { delete reqParams["limit"]; delete reqParams["offset"]; delete reqParams["filters"]; + if ("text" in reqParams) { + if (reqParams["text"] === "") { + delete reqParams["text"]; + } else { + // decodeURIComponent the text to compare it with the currentParams + reqParams["text"] = decodeURIComponent(reqParams["text"]); + } + } const cParams = this.__getRequestParams(); const currentParams = {}; Object.entries(cParams).forEach(([snakeKey, value]) => { const key = osparc.utils.Utils.snakeToCamel(snakeKey); - currentParams[key] = value === "null" ? null : value; + if (value === "null") { + currentParams[key] = null; + } else if (key === "text") { + // decodeURIComponent the text to compare it with the reqParams + currentParams[key] = decodeURIComponent(value); + } else { + currentParams[key] = value; + } }); + if ([ + osparc.dashboard.StudyBrowser.CONTEXT.FUNCTIONS, + osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_FUNCTIONS, + ].includes(this.getCurrentContext())) { + currentParams.orderBy = osparc.store.Functions.curateOrderBy(currentParams.orderBy); + } // check the entries in currentParams are the same as the reqParams let sameContext = true; @@ -757,6 +842,11 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { // loose equality: will do a Number to String conversion if necessary sameContext &= key in currentParams && currentParams[key] == value; }); + + if (!sameContext) { + console.log("not sameContext", currentParams, reqParams); + } + return !sameContext; }, @@ -775,10 +865,12 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { // keep this until the backend implements it switch (this.getCurrentContext()) { - case "templates": + case osparc.dashboard.StudyBrowser.CONTEXT.TEMPLATES: + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_TEMPLATES: urlParams.accessRights = "non-public"; break; - case "public": + case osparc.dashboard.StudyBrowser.CONTEXT.PUBLIC_TEMPLATES: + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PUBLIC_TEMPLATES: urlParams.accessRights = "public"; break; } @@ -803,30 +895,39 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { requestParams.orderBy = JSON.stringify(this.getOrderBy()); switch (this.getCurrentContext()) { - case "studiesAndFolders": + case osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS: + requestParams.type = "user"; requestParams.workspaceId = this.getCurrentWorkspaceId(); requestParams.folderId = this.getCurrentFolderId(); - requestParams.type = "user"; break; - case "templates": + case osparc.dashboard.StudyBrowser.CONTEXT.TEMPLATES: + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_TEMPLATES: requestParams.type = "template"; requestParams.templateType = osparc.data.model.StudyUI.TEMPLATE_TYPE; requestParams.accessRights = "non-public"; break; - case "public": + case osparc.dashboard.StudyBrowser.CONTEXT.PUBLIC_TEMPLATES: + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PUBLIC_TEMPLATES: requestParams.type = "template"; requestParams.templateType = osparc.data.model.StudyUI.TEMPLATE_TYPE; requestParams.accessRights = "public"; break; - case "search": { - // Use the ``search`` functionality only if the user types some text - // tags should only be used to filter the current context (search context ot workspace/folder context) - const filterData = this._searchBarFilter.getFilterData(); - if (filterData.text) { - requestParams.text = filterData.text ? encodeURIComponent(filterData.text) : ""; // name, description and uuid - requestParams["tagIds"] = filterData.tags.length ? filterData.tags.join(",") : ""; - } + case osparc.dashboard.StudyBrowser.CONTEXT.FUNCTIONS: + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_FUNCTIONS: + requestParams.includeExtras = "true"; break; + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PROJECTS: { + requestParams.type = "user"; + break; + } + } + + if (this.getCurrentContext().includes("search")) { + // Use the ``search`` functionality only if the user types some text + // tags should only be used to filter the current context (search context ot workspace/folder context) + const filterData = this._searchBarFilter.getFilterData(); + if (filterData.text) { + requestParams.text = filterData.text ? encodeURIComponent(filterData.text) : ""; // name, description and uuid } } @@ -856,27 +957,44 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { let request = null; switch (this.getCurrentContext()) { - case "studiesAndFolders": - request = osparc.data.Resources.fetch("studies", "getPage", params, options); + case osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS: + request = osparc.store.Study.getInstance().getPage(params, options); break; - case "search": - request = osparc.data.Resources.fetch("studies", "getPageSearch", params, options); + case osparc.dashboard.StudyBrowser.CONTEXT.TEMPLATES: + case osparc.dashboard.StudyBrowser.CONTEXT.PUBLIC_TEMPLATES: + // The distinction is done in the frontend + request = osparc.store.Templates.fetchTemplatesPaginated(params, options); break; - case "templates": - request = osparc.store.Templates.fetchTemplatesNonPublicPaginated(params, options); + case osparc.dashboard.StudyBrowser.CONTEXT.TRASH: + request = osparc.store.Study.getInstance().getPageTrashed(params, options); break; - case "public": - request = osparc.store.Templates.fetchTemplatesPublicPaginated(params, options); + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PROJECTS: + request = osparc.store.Study.getInstance().getPageSearch(params, options); break; - case "trash": - request = osparc.data.Resources.fetch("studies", "getPageTrashed", params, options); + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_TEMPLATES: + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PUBLIC_TEMPLATES: + // The distinction is done in the frontend + request = osparc.store.Templates.searchTemplatesPaginated(params, options); + break; + case osparc.dashboard.StudyBrowser.CONTEXT.FUNCTIONS: + request = osparc.store.Functions.fetchFunctionsPaginated(params, options); + break; + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_FUNCTIONS: + request = osparc.store.Functions.searchFunctionsPaginated(params, options); break; } return request; }, invalidateStudies: function() { - osparc.store.Store.getInstance().invalidate("studies"); + osparc.store.Study.getInstance().invalidateStudies(); + this.__resetStudiesList(); + if (this._resourcesContainer.getFlatList()) { + this._resourcesContainer.getFlatList().nextRequest = null; + } + }, + + invalidateFunctions: function() { this.__resetStudiesList(); if (this._resourcesContainer.getFlatList()) { this._resourcesContainer.getFlatList().nextRequest = null; @@ -925,7 +1043,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __addNewStudyButtons: function() { - if (this.getCurrentContext() !== "studiesAndFolders") { + if (this.getCurrentContext() !== osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS) { return; } const currentWorkspaceId = this.getCurrentWorkspaceId(); @@ -966,13 +1084,13 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { osparc.store.Templates.getHypertools() .then(hypertools => { if (hypertools) { - const newStudies = new osparc.dashboard.NewStudies(newStudiesConfig); + const newPlans = new osparc.dashboard.NewPlans(newStudiesConfig); const winTitle = this.tr("New Plan"); - const win = osparc.ui.window.Window.popUpInWindow(newStudies, winTitle, osparc.dashboard.NewStudies.WIDTH+40, 300).set({ + const win = osparc.ui.window.Window.popUpInWindow(newPlans, winTitle, osparc.dashboard.NewPlans.WIDTH+40, 300).set({ clickAwayClose: false, resizable: true }); - newStudies.addListener("newStudyClicked", e => { + newPlans.addListener("newPlanClicked", e => { win.close(); const templateInfo = e.getData(); const templateData = hypertools.find(t => t.name === templateInfo.expectedTemplateLabel); @@ -990,6 +1108,15 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { // LAYOUT // _createLayout: function() { this._createSearchBar(); + this._searchBarFilter.set({ + showFilterMenu: false, + }); + this._searchBarFilter.addListener("resetButtonPressed", () => this.__filterChanged()); + const searchBarTextField = this._searchBarFilter.getChildControl("text-field"); + searchBarTextField.set({ + cursor: "pointer", + }); + searchBarTextField.addListener("tap", () => this.__extendSearchBar()); const header = this.__header = new osparc.dashboard.StudyBrowserHeader(); this.__header.addListener("trashEmptied", () => this.reloadResources(), this); @@ -1051,17 +1178,17 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const selection = e.getData(); studiesMoveButton.set({ - visibility: selection.length && currentContext === "studiesAndFolders" ? "visible" : "excluded", + visibility: selection.length && currentContext === osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS ? "visible" : "excluded", label: this.tr("Move") + (selection.length > 1 ? ` (${selection.length})` : ""), }); studiesTrashButton.set({ - visibility: selection.length && currentContext === "studiesAndFolders" ? "visible" : "excluded", + visibility: selection.length && currentContext === osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS ? "visible" : "excluded", label: this.tr("Delete") + (selection.length > 1 ? ` (${selection.length})` : ""), }); studiesDeleteButton.set({ - visibility: selection.length && currentContext === "trash" ? "visible" : "excluded", + visibility: selection.length && currentContext === osparc.dashboard.StudyBrowser.CONTEXT.TRASH ? "visible" : "excluded", label: this.tr("Delete permanently") + (selection.length > 1 ? ` (${selection.length})` : ""), }); }); @@ -1071,12 +1198,69 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { return this._resourcesContainer; }, + __extendSearchBar: function() { + const searchBarFilterExtended = this._searchBarFilter.popUpSearchBarFilter(); + let curatedContext = null; + switch (this.getCurrentContext()) { + case osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS: + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PROJECTS: + case osparc.dashboard.StudyBrowser.CONTEXT.TRASH: + curatedContext = osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PROJECTS; + break; + case osparc.dashboard.StudyBrowser.CONTEXT.TEMPLATES: + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_TEMPLATES: + curatedContext = osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_TEMPLATES; + break; + case osparc.dashboard.StudyBrowser.CONTEXT.PUBLIC_TEMPLATES: + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PUBLIC_TEMPLATES: + curatedContext = osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PUBLIC_TEMPLATES; + break; + case osparc.dashboard.StudyBrowser.CONTEXT.FUNCTIONS: + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_FUNCTIONS: + curatedContext = osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_FUNCTIONS; + break; + default: + curatedContext = osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PROJECTS; + break; + } + searchBarFilterExtended.set({ + currentContext: curatedContext, + }); + searchBarFilterExtended.addListener("filterChanged", e => { + const data = e.getData(); + // first update the filters + const filterType = data["filterType"]; + const filterData = data["filterData"]; + switch (filterType) { + case "text": + this._searchBarFilter.getChildControl("text-field").setValue(filterData); + break; + case "sharedWith": + this._searchBarFilter.setSharedWithActiveFilter(filterData.id, filterData.label); + break; + case "tag": + this._searchBarFilter.addTagActiveFilter(filterData); + break; + } + // then update the search context this will trigger the search + const searchContext = data["searchContext"]; + if ([ + osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PROJECTS, + osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_TEMPLATES, + osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PUBLIC_TEMPLATES, + osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_FUNCTIONS, + ].includes(searchContext)) { + this._changeContext(searchContext); + } + }); + }, + __connectContexts: function() { const header = this.__header; header.addListener("locationChanged", () => { const workspaceId = header.getCurrentWorkspaceId(); const folderId = header.getCurrentFolderId(); - this._changeContext("studiesAndFolders", workspaceId, folderId); + this._changeContext(osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS, workspaceId, folderId); }, this); const workspacesAndFoldersTree = this._resourceFilter.getWorkspacesAndFoldersTree(); @@ -1084,32 +1268,82 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const context = e.getData(); const workspaceId = context["workspaceId"]; if (workspaceId === -1) { - this._changeContext("workspaces"); + this._changeContext(osparc.dashboard.StudyBrowser.CONTEXT.WORKSPACES); } else { const folderId = context["folderId"]; - this._changeContext("studiesAndFolders", workspaceId, folderId); + this._changeContext(osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS, workspaceId, folderId); } }, this); - this._resourceFilter.addListener("templatesContext", () => this._changeContext("templates")); - this._resourceFilter.addListener("publicContext", () => this._changeContext("public")); - this._resourceFilter.addListener("trashContext", () => this._changeContext("trash")); + this._resourceFilter.addListener("templatesContext", () => this._changeContext(osparc.dashboard.StudyBrowser.CONTEXT.TEMPLATES)); + this._resourceFilter.addListener("publicTemplatesContext", () => this._changeContext(osparc.dashboard.StudyBrowser.CONTEXT.PUBLIC_TEMPLATES)); + this._resourceFilter.addListener("functionsContext", () => this._changeContext(osparc.dashboard.StudyBrowser.CONTEXT.FUNCTIONS)); + this._resourceFilter.addListener("trashContext", () => this._changeContext(osparc.dashboard.StudyBrowser.CONTEXT.TRASH)); this._searchBarFilter.addListener("filterChanged", e => { const filterData = e.getData(); - if (filterData.text) { - this._changeContext("search"); - } else { - const workspaceId = this.getCurrentWorkspaceId(); - const folderId = this.getCurrentFolderId(); - this._changeContext("studiesAndFolders", workspaceId, folderId); - } + this.__filterChanged(filterData); }); }, + __filterChanged: function(filterData) { + let searchContext = null; + let backToContext = null; + const isSearchContext = filterData && filterData.text; + switch (this.getCurrentContext()) { + case osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS: + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PROJECTS: + case osparc.dashboard.StudyBrowser.CONTEXT.TRASH: + if (isSearchContext) { + searchContext = osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PROJECTS; + } else { + backToContext = osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS; + } + break; + case osparc.dashboard.StudyBrowser.CONTEXT.TEMPLATES: + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_TEMPLATES: + if (isSearchContext) { + searchContext = osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_TEMPLATES; + } else { + backToContext = osparc.dashboard.StudyBrowser.CONTEXT.TEMPLATES; + } + break; + case osparc.dashboard.StudyBrowser.CONTEXT.PUBLIC_TEMPLATES: + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PUBLIC_TEMPLATES: + if (isSearchContext) { + searchContext = osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PUBLIC_TEMPLATES; + } else { + backToContext = osparc.dashboard.StudyBrowser.CONTEXT.PUBLIC_TEMPLATES; + } + break; + case osparc.dashboard.StudyBrowser.CONTEXT.FUNCTIONS: + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_FUNCTIONS: + if (isSearchContext) { + searchContext = osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_FUNCTIONS; + } else { + backToContext = osparc.dashboard.StudyBrowser.CONTEXT.FUNCTIONS; + } + break; + default: + if (isSearchContext) { + searchContext = osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PROJECTS; + } else { + backToContext = osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS; + } + break; + } + if (searchContext) { + this._changeContext(searchContext); + } else if (backToContext) { + const workspaceId = this.getCurrentWorkspaceId(); + const folderId = this.getCurrentFolderId(); + this._changeContext(backToContext, workspaceId, folderId); + } + }, + _changeContext: function(context, workspaceId = null, folderId = null) { if ( - context !== "search" && // reload studies for a new search + !context.includes("search") && // move on if it's a search context, it will be handled later context === this.getCurrentContext() && workspaceId === this.getCurrentWorkspaceId() && folderId === this.getCurrentFolderId() @@ -1118,6 +1352,17 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { return; } + if ( + context.includes("search") && + context === this.getCurrentContext() && + this.__lastUrlParams && + "text" in this.__lastUrlParams && + this.__lastUrlParams["text"] === this._searchBarFilter.getTextFilterValue() + ) { + // context and text search didn't change + return; + } + osparc.store.Store.getInstance().setStudyBrowserContext(context); this.set({ currentContext: context, @@ -1133,46 +1378,73 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { this._resourcesList = []; this._resourcesContainer.setResourcesToList(this._resourcesList); this._resourcesContainer.reloadCards("studies"); + // functions will disable it + this._searchBarFilter.setEnabled(true); + // workspaces will exclude it + this._toolbar.show(); + // functions will hide some option + this.__sortByButton.showAllOptions(); switch (this.getCurrentContext()) { - case "studiesAndFolders": + case osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS: this._searchBarFilter.resetFilters(); - this._toolbar.show(); + this._searchBarFilter.getChildControl("text-field").setPlaceholder("Search in My Projects"); this.__reloadFolders(); this._loadingResourcesBtn.setFetching(false); this.invalidateStudies(); this.__reloadStudies(); break; - case "workspaces": + case osparc.dashboard.StudyBrowser.CONTEXT.WORKSPACES: this._searchBarFilter.resetFilters(); + this._searchBarFilter.getChildControl("text-field").setPlaceholder("Search in My Projects"); + // workspaces can't be sorted and don't support list view this._toolbar.exclude(); this.__reloadWorkspaces(); break; - case "search": - this._toolbar.show(); + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PROJECTS: + this._searchBarFilter.getChildControl("text-field").setPlaceholder("Search in My Projects"); this.__reloadWorkspaces(); this.__reloadFolders(); this._loadingResourcesBtn.setFetching(false); this.invalidateStudies(); this.__reloadStudies(); break; - case "templates": - this._searchBarFilter.resetFilters(); - this._toolbar.show(); + case osparc.dashboard.StudyBrowser.CONTEXT.TEMPLATES: + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_TEMPLATES: + if (this.getCurrentContext() === osparc.dashboard.StudyBrowser.CONTEXT.TEMPLATES) { + this._searchBarFilter.resetFilters(); + } + this._searchBarFilter.getChildControl("text-field").setPlaceholder("Search in Templates"); this._loadingResourcesBtn.setFetching(false); this.invalidateStudies(); this.__reloadStudies(); break; - case "public": - this._searchBarFilter.resetFilters(); - this._toolbar.show(); + case osparc.dashboard.StudyBrowser.CONTEXT.PUBLIC_TEMPLATES: + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PUBLIC_TEMPLATES: + if (this.getCurrentContext() === osparc.dashboard.StudyBrowser.CONTEXT.PUBLIC_TEMPLATES) { + this._searchBarFilter.resetFilters(); + } + this._searchBarFilter.getChildControl("text-field").setPlaceholder("Search in Public Projects"); this._loadingResourcesBtn.setFetching(false); this.invalidateStudies(); this.__reloadStudies(); break; - case "trash": + case osparc.dashboard.StudyBrowser.CONTEXT.FUNCTIONS: + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_FUNCTIONS: + if (this.getCurrentContext() === osparc.dashboard.StudyBrowser.CONTEXT.FUNCTIONS) { + this._searchBarFilter.resetFilters(); + } + this._searchBarFilter.getChildControl("text-field").setPlaceholder("Search in Functions"); + // functions don't support all options yet + this.__sortByButton.hideOptionButton("name"); + this.__sortByButton.hideOptionButton("prj_owner"); + this._loadingResourcesBtn.setFetching(false); + this.invalidateFunctions(); + this.__reloadStudies(); + break; + case osparc.dashboard.StudyBrowser.CONTEXT.TRASH: this._searchBarFilter.resetFilters(); - this._toolbar.show(); + this._searchBarFilter.getChildControl("text-field").setPlaceholder("Search in My Projects"); this.__reloadWorkspaces(); this.__reloadFolders(); this._loadingResourcesBtn.setFetching(false); @@ -1242,7 +1514,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __addSortByButton: function() { - const sortByButton = new osparc.dashboard.SortedByMenuButton(); + const sortByButton = this.__sortByButton = new osparc.dashboard.SortedByMenuButton(); sortByButton.set({ appearance: "form-button-outlined" }); @@ -1386,8 +1658,6 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { return deleteButton; }, - - __createSelectButton: function() { const selectButton = new qx.ui.form.ToggleButton().set({ appearance: "form-button-outlined", @@ -1402,7 +1672,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }); this.bind("multiSelection", selectButton, "value"); this.bind("currentContext", selectButton, "visibility", { - converter: currentContext => currentContext === "studiesAndFolders" ? "visible" : "excluded" + converter: currentContext => currentContext === osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS ? "visible" : "excluded" }); return selectButton; }, @@ -1421,7 +1691,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { // LAYOUT // __studyStateReceived: function(studyId, state, errors) { - osparc.store.Store.getInstance().setStudyState(studyId, state); + osparc.store.Study.getInstance().setStudyState(studyId, state); if (errors && errors.length) { console.error(errors); } @@ -1432,9 +1702,9 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { if (idx > -1) { this._resourcesList[idx]["state"] = state; } - const studyItem = this._resourcesContainer.getCards().find(card => osparc.dashboard.ResourceBrowserBase.isCardButtonItem(card) && card.getUuid() === studyId); - if (studyItem) { - studyItem.setState(state); + const studyCard = this._resourcesContainer.getCards().find(card => osparc.dashboard.ResourceBrowserBase.isCardButtonItem(card) && card.getUuid() === studyId); + if (studyCard) { + studyCard.setState(state); } }, @@ -1457,10 +1727,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { minStudyData["workspaceId"] = this.getCurrentWorkspaceId(); minStudyData["folderId"] = this.getCurrentFolderId(); this._showLoadingPage(this.tr("Creating ") + (minStudyData.name || osparc.product.Utils.getStudyAlias())); - const params = { - data: minStudyData - }; - osparc.study.Utils.createStudyAndPoll(params) + osparc.study.Utils.createStudyAndPoll(minStudyData) .then(studyData => this.__startStudyAfterCreating(studyData["uuid"])) .catch(err => { this._hideLoadingPage(); @@ -1505,12 +1772,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const openCB = () => this._hideLoadingPage(); const cancelCB = () => { this._hideLoadingPage(); - const params = { - url: { - studyId - } - }; - osparc.data.Resources.fetch("studies", "delete", params); + osparc.store.Study.getInstance().deleteStudy(studyId); }; const isStudyCreation = true; this._startStudyById(studyId, openCB, cancelCB, isStudyCreation); @@ -1534,21 +1796,40 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { this.base(arguments, templateData); }, - __removeFromStudyList: function(studyId) { - const idx = this._resourcesList.findIndex(study => study["uuid"] === studyId); + _updateFunctionData: function(functionData) { + functionData["resourceType"] = "function"; + + const index = this._resourcesList.findIndex(func => func["uuid"] === functionData["uuid"]); + if (index === -1) { + // add it in first position, most likely it's a new study + this._resourcesList.unshift(functionData); + } else { + this._resourcesList[index] = functionData; + } + // it will render the studies in the right order + this._reloadCards(); + }, + + __removeFromList: function(resourceUuid) { + const idx = this._resourcesList.findIndex(resource => resource["uuid"] === resourceUuid); if (idx > -1) { this._resourcesList.splice(idx, 1); } - this._resourcesContainer.removeCard(studyId); + this._resourcesContainer.removeCard(resourceUuid); }, _populateCardMenu: function(card) { const studyData = card.getResourceData(); - if (studyData["resourceType"] === "template") { - // The Study Browser can also list templates - this._populateTemplateCardMenu(card); - } else { - this.__populateStudyCardMenu(card); + switch (studyData["resourceType"]) { + case "study": + this.__populateStudyCardMenu(card); + break; + case "template": + this._populateTemplateCardMenu(card); + break; + case "function": + this.__populateFunctionCardMenu(card); + break; } }, @@ -1559,7 +1840,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const writeAccess = osparc.data.model.Study.canIWrite(studyData["accessRights"]); const deleteAccess = osparc.data.model.Study.canIDelete(studyData["accessRights"]); - if (this.getCurrentContext() === "trash") { + if (this.getCurrentContext() === osparc.dashboard.StudyBrowser.CONTEXT.TRASH) { const trashed = Boolean(studyData["trashedAt"]); if (trashed) { if (writeAccess) { @@ -1580,9 +1861,9 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { menu.add(openButton); } - if (this.getCurrentContext() === "search") { - const renameStudyButton = this.__getOpenLocationMenuButton(studyData); - menu.add(renameStudyButton); + if (this.getCurrentContext() === osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PROJECTS) { + const openLocationButton = this.__getOpenLocationMenuButton(studyData); + menu.add(openLocationButton); } if (writeAccess) { @@ -1598,10 +1879,12 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const duplicateStudyButton = this.__getDuplicateMenuButton(studyData); menu.add(duplicateStudyButton); - const convertToPipelineButton = this.__getConvertToPipelineMenuButton(studyData); - menu.add(convertToPipelineButton); + if (writeAccess && osparc.product.Utils.showConvertToPipeline()) { + const convertToPipelineButton = this.__getConvertToPipelineMenuButton(studyData); + menu.add(convertToPipelineButton); + } - if (osparc.product.Utils.hasExportCMisEnabled()) { + if (osparc.product.Utils.showExportCMis()) { const exportStudyButton = this.__getExportCMisMenuButton(studyData); menu.add(exportStudyButton); } @@ -1652,10 +1935,18 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { card.evaluateMenuButtons(); }, + __populateFunctionCardMenu: function(card) { + const menu = card.getMenu(); + const functionData = card.getResourceData(); + + const deleteButton = this.__getDeleteFunctionMenuButton(functionData); + menu.add(deleteButton); + }, + __getOpenLocationMenuButton: function(studyData) { const openLocationButton = new qx.ui.menu.Button(this.tr("Open location"), "@FontAwesome5Solid/external-link-alt/12"); openLocationButton.addListener("execute", () => { - this._changeContext("studiesAndFolders", studyData["workspaceId"], studyData["folderId"]); + this._changeContext(osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS, studyData["workspaceId"], studyData["folderId"]); }, this); return openLocationButton; }, @@ -1697,13 +1988,13 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __updateName: function(studyData, name) { - osparc.store.Study.patchStudyData(studyData, "name", name) + osparc.store.Study.getInstance().patchStudyData(studyData, "name", name) .then(() => this._updateStudyData(studyData)) .catch(err => osparc.FlashMessenger.logError(err, this.tr("Something went wrong while renaming"))); }, __updateThumbnail: function(studyData, url) { - osparc.store.Study.patchStudyData(studyData, "thumbnail", url) + osparc.store.Study.getInstance().patchStudyData(studyData, "thumbnail", url) .then(() => this._updateStudyData(studyData)) .catch(err => osparc.FlashMessenger.logError(err, this.tr("Something went wrong while updating the thumbnail"))); }, @@ -1727,7 +2018,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { __doMoveStudy: function(studyData, destWorkspaceId, destFolderId) { this.__moveStudyToWorkspace(studyData, destWorkspaceId) // first move to workspace .then(() => this.__moveStudyToFolder(studyData, destFolderId)) // then move to folder - .then(() => this.__removeFromStudyList(studyData["uuid"])) + .then(() => this.__removeFromList(studyData["uuid"])) .catch(err => osparc.FlashMessenger.logError(err)); }, @@ -1770,13 +2061,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { // resolve right away return new Promise(resolve => resolve()); } - const params = { - url: { - studyId: studyData["uuid"], - workspaceId: destWorkspaceId, - } - }; - return osparc.data.Resources.fetch("studies", "moveToWorkspace", params) + return osparc.store.Study.getInstance().moveStudyToWorkspace(studyData["uuid"], destWorkspaceId) .then(() => studyData["workspaceId"] = destWorkspaceId); }, @@ -1785,13 +2070,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { // resolve right away return new Promise(resolve => resolve()); } - const params = { - url: { - studyId: studyData["uuid"], - folderId: destFolderId, - } - }; - return osparc.data.Resources.fetch("studies", "moveToFolder", params) + return osparc.store.Study.getInstance().moveStudyToFolder(studyData["uuid"], destFolderId) .then(() => studyData["folderId"] = destFolderId); }, @@ -1852,7 +2131,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { __updateUIMode: function(studyData, uiMode) { const studyUI = osparc.utils.Utils.deepCloneObject(studyData["ui"]); studyUI["mode"] = uiMode; - return osparc.store.Study.patchStudyData(studyData, "ui", studyUI) + return osparc.store.Study.getInstance().patchStudyData(studyData, "ui", studyUI) .then(() => this._updateStudyData(studyData)) }, @@ -1866,7 +2145,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, _deleteResourceRequested: function(studyId) { - if (this.getCurrentContext() === "trash") { + if (this.getCurrentContext() === osparc.dashboard.StudyBrowser.CONTEXT.TRASH) { this.__deleteStudyRequested(this.__getStudyData(studyId)); } else { this.__trashStudyRequested(this.__getStudyData(studyId)); @@ -1937,6 +2216,54 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { return deleteButton; }, + __getDeleteFunctionMenuButton: function(functionData) { + const deleteButton = new qx.ui.menu.Button(this.tr("Delete"), "@FontAwesome5Solid/trash/12"); + deleteButton.set({ + appearance: "menu-button" + }); + osparc.utils.Utils.setIdToWidget(deleteButton, "functionItemMenuDelete"); + deleteButton.addListener("execute", () => { + this.__popUpDeleteFunctionWindow(functionData, false); + }, this); + return deleteButton; + }, + + __popUpDeleteFunctionWindow: function(functionData, force, message) { + const win = this.__createConfirmDeleteWindow([functionData.title]); + win.setCaption(this.tr("Delete function")); + if (force) { + if (message) { + win.setMessage(message); + } else { + const msg = this.tr("The function has associated jobs. Are you sure you want to delete it?"); + win.setMessage(msg); + } + } + win.center(); + win.open(); + win.addListener("close", () => { + if (win.getConfirmed()) { + this.__doDeleteFunction(functionData, force); + } + }, this); + }, + + __doDeleteFunction: function(functionData, force = false) { + osparc.store.Functions.deleteFunction(functionData.uuid, force) + .then(() => { + this.__removeFromList(functionData.uuid); + const msg = this.tr("Successfully deleted"); + osparc.FlashMessenger.logAs(msg, "INFO"); + }) + .catch(err => { + if (err && err.status && err.status === 409) { + this.__popUpDeleteFunctionWindow(functionData, true, err.message); + } else { + osparc.FlashMessenger.logError(err); + } + }); + }, + __getStudyData: function(id) { return this._resourcesList.find(study => study.uuid === id); }, @@ -2017,12 +2344,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { importTaskUI.setSubtitle(processingLabel); importingStudyCard.getChildControl("progress-bar").exclude(); const data = JSON.parse(req.responseText); - const params = { - url: { - "studyId": data["data"]["uuid"] - } - }; - osparc.data.Resources.fetch("studies", "getOne", params) + osparc.store.Study.getInstance().getOne(data["data"]["uuid"]) .then(studyData => this._updateStudyData(studyData)) .catch(err => osparc.FlashMessenger.logError(err, this.tr("Something went wrong while fetching the study"))) .finally(() => { @@ -2051,9 +2373,9 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __untrashStudy: function(studyData) { - osparc.store.Store.getInstance().untrashStudy(studyData.uuid) + osparc.store.Study.getInstance().untrashStudy(studyData.uuid) .then(() => { - this.__removeFromStudyList(studyData.uuid); + this.__removeFromList(studyData.uuid); const msg = this.tr("Successfully restored"); osparc.FlashMessenger.logAs(msg, "INFO"); this._resourceFilter.evaluateTrashEmpty(); @@ -2063,9 +2385,9 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __trashStudy: function(studyData) { - osparc.store.Store.getInstance().trashStudy(studyData.uuid) + osparc.store.Study.getInstance().trashStudy(studyData.uuid) .then(() => { - this.__removeFromStudyList(studyData.uuid); + this.__removeFromList(studyData.uuid); const msg = this.tr("Successfully deleted"); osparc.FlashMessenger.logAs(msg, "INFO"); this._resourceFilter.setTrashEmpty(false); @@ -2091,7 +2413,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { // remove me from collaborators const myGid = osparc.auth.Data.getInstance().getGroupId(); delete arCopy[myGid]; - return osparc.store.Study.patchStudyData(studyData, "accessRights", arCopy); + return osparc.store.Study.getInstance().patchStudyData(studyData, "accessRights", arCopy); }, __doDeleteStudy: function(studyData) { @@ -2100,10 +2422,10 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { operationPromise = this.__removeMeFromCollaborators(studyData); } else { // delete study - operationPromise = osparc.store.Store.getInstance().deleteStudy(studyData.uuid); + operationPromise = osparc.store.Study.getInstance().deleteStudy(studyData.uuid); } operationPromise - .then(() => this.__removeFromStudyList(studyData.uuid)) + .then(() => this.__removeFromList(studyData.uuid)) .catch(err => osparc.FlashMessenger.logError(err)) .finally(() => this.resetSelection()); }, @@ -2120,7 +2442,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { } else { msg += ` '${studyNames[0]}'?`; } - const trashDays = osparc.store.StaticInfo.getInstance().getTrashRetentionDays(); + const trashDays = osparc.store.StaticInfo.getTrashRetentionDays(); msg += "

" + (studyNames.length > 1 ? "They" : "It") + this.tr(` will be permanently deleted after ${trashDays} days.`); const confirmationWin = new osparc.ui.window.Confirmation(msg).set({ caption: this.tr("Delete"), diff --git a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowserHeader.js b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowserHeader.js index d4c99138587e..421479da04c6 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowserHeader.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowserHeader.js @@ -234,7 +234,7 @@ qx.Class.define("osparc.dashboard.StudyBrowserHeader", { }, __titleTapped: function() { - if (osparc.store.Store.getInstance().getStudyBrowserContext() === "studiesAndFolders") { + if (osparc.store.Store.getInstance().getStudyBrowserContext() === osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS) { const workspaceId = this.getCurrentWorkspaceId(); const folderId = null; this.setCurrentFolderId(folderId); @@ -272,7 +272,7 @@ qx.Class.define("osparc.dashboard.StudyBrowserHeader", { const currentContext = osparc.store.Store.getInstance().getStudyBrowserContext(); switch (currentContext) { - case "studiesAndFolders": { + case osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS: { const workspaceId = this.getCurrentWorkspaceId(); title.setCursor("pointer"); title.addListener("tap", this.__titleTapped, this); @@ -293,34 +293,50 @@ qx.Class.define("osparc.dashboard.StudyBrowserHeader", { } break; } - case "workspaces": + case osparc.dashboard.StudyBrowser.CONTEXT.WORKSPACES: this.__setIcon(osparc.store.Workspaces.iconPath(32)); title.setValue(this.tr("Shared Workspaces")); break; - case "search": - this.__setIcon("@FontAwesome5Solid/search/24"); - title.setValue(this.tr("Search results")); - break; - case "templates": { + case osparc.dashboard.StudyBrowser.CONTEXT.TEMPLATES: { this.__setIcon("@FontAwesome5Solid/copy/24"); title.setValue(this.tr("Templates")); break; } - case "public": { + case osparc.dashboard.StudyBrowser.CONTEXT.PUBLIC_TEMPLATES: { this.__setIcon("@FontAwesome5Solid/globe/24"); title.setValue(this.tr("Public Projects")); break; } - case "trash": { + case osparc.dashboard.StudyBrowser.CONTEXT.FUNCTIONS: { + this.__setIcon("@MaterialIcons/functions/26"); + title.setValue(this.tr("Functions")); + break; + } + case osparc.dashboard.StudyBrowser.CONTEXT.TRASH: { this.__setIcon("@FontAwesome5Solid/trash/24"); title.setValue(this.tr("Recently Deleted")); - const trashDays = osparc.store.StaticInfo.getInstance().getTrashRetentionDays(); + const trashDays = osparc.store.StaticInfo.getTrashRetentionDays(); description.set({ value: this.tr(`Items here will be permanently deleted after ${trashDays} days.`), visibility: "visible", }); break; } + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PROJECTS: + this.__setIcon("@FontAwesome5Solid/search/24"); + title.setValue(this.tr("My Projects results")); + break; + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_TEMPLATES: + this.__setIcon("@FontAwesome5Solid/search/24"); + title.setValue(this.tr("Templates results")); + break; + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PUBLIC_TEMPLATES: + this.__setIcon("@FontAwesome5Solid/search/24"); + title.setValue(this.tr("Public Projects results")); + break; + case osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_FUNCTIONS: + this.__setIcon("@FontAwesome5Solid/search/24"); + title.setValue(this.tr("Functions results")); } }, @@ -383,7 +399,7 @@ qx.Class.define("osparc.dashboard.StudyBrowserHeader", { const roleText = this.getChildControl("role-text"); const roleIcon = this.getChildControl("role-icon"); const currentContext = osparc.store.Store.getInstance().getStudyBrowserContext(); - if (currentContext === "studiesAndFolders" && value && Object.keys(value).length) { + if (currentContext === osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS && value && Object.keys(value).length) { editButton.setVisibility(value["delete"] ? "visible" : "excluded"); const menu = new qx.ui.menu.Menu().set({ position: "bottom-right" diff --git a/services/static-webserver/client/source/class/osparc/dashboard/TutorialBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/TutorialBrowser.js index 78ae0e458a01..2b83c072355a 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/TutorialBrowser.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/TutorialBrowser.js @@ -34,21 +34,23 @@ qx.Class.define("osparc.dashboard.TutorialBrowser", { } this._resourcesInitialized = true; + this._showLoadingPage(this.tr("Loading Tutorials...")); osparc.store.Templates.getTutorials() .then(() => { this._resourcesList = []; this.getChildControl("resources-layout"); this.reloadResources(); this.__attachEventHandlers(); - this._hideLoadingPage(); }); }, reloadResources: function(useCache = true) { if (osparc.data.Permissions.getInstance().canDo("studies.templates.read")) { - this.__reloadTutorials(useCache); + this.__reloadTutorials(useCache) + .finally(() => this._hideLoadingPage()); } else { this.__setResourcesToList([]); + this._hideLoadingPage(); } }, @@ -65,8 +67,6 @@ qx.Class.define("osparc.dashboard.TutorialBrowser", { }, __tutorialStateReceived: function(templateId, state, errors) { - osparc.store.Templates.getTutorials() - // OM follow here const idx = this._resourcesList.findIndex(study => study["uuid"] === templateId); if (idx > -1) { this._resourcesList[idx]["state"] = state; @@ -83,9 +83,9 @@ qx.Class.define("osparc.dashboard.TutorialBrowser", { __reloadTutorials: function(useCache) { this.__tasksToCards(); - osparc.store.Templates.getTutorials(useCache) - .then(tutorials => this.__setResourcesToList(tutorials)) - .catch(() => this.__setResourcesToList([])); + return osparc.store.Templates.getTutorials(useCache) + .then(tutorials => this.__setResourcesToList(tutorials)) + .catch(() => this.__setResourcesToList([])); }, _updateTutorialData: function(templateData) { @@ -106,7 +106,7 @@ qx.Class.define("osparc.dashboard.TutorialBrowser", { } this._resourcesContainer.setResourcesToList(this._resourcesList); - const cards = this._resourcesContainer.reloadCards("templates"); + const cards = this._resourcesContainer.reloadCards(osparc.dashboard.StudyBrowser.CONTEXT.TEMPLATES); cards.forEach(card => { card.setMultiSelectionMode(this.getMultiSelection()); card.addListener("tap", () => this.__itemClicked(card), this); @@ -116,6 +116,18 @@ qx.Class.define("osparc.dashboard.TutorialBrowser", { }); this.__evaluateUpdateAllButton(); osparc.filter.UIFilterController.dispatch("searchBarFilter"); + + this.__populateTags(); + }, + + __populateTags: function() { + if (this._resourceFilter) { + const presentTags = new Set(); + this._resourcesList.forEach(template => { + (template["tags"] || []).forEach(tagId => presentTags.add(tagId)); + }); + this._resourceFilter.populateTags(Array.from(presentTags)); + } }, __itemClicked: function(card) { @@ -143,6 +155,8 @@ qx.Class.define("osparc.dashboard.TutorialBrowser", { this._addViewModeButton(); this._addResourceFilter(); + this.__populateTags(); + osparc.store.Tags.getInstance().addListener("tagsChanged", () => this.__populateTags(), this); this._resourcesContainer.addListener("changeVisibility", () => this.__evaluateUpdateAllButton()); @@ -218,7 +232,7 @@ qx.Class.define("osparc.dashboard.TutorialBrowser", { if (node["version"] !== latestCompatible["version"]) { patchData["version"] = latestCompatible["version"]; } - templatePromises.push(osparc.store.Study.patchNodeData(uniqueTemplateData, nodeId, patchData)); + templatePromises.push(osparc.store.Study.getInstance().patchNodeData(uniqueTemplateData, nodeId, patchData)); } } Promise.all(templatePromises) diff --git a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonItem.js b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonItem.js index 7d8fb3b5a951..d1c3f78b40e0 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonItem.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonItem.js @@ -195,8 +195,8 @@ qx.Class.define("osparc.dashboard.WorkspaceButtonItem", { const studyBrowserContext = osparc.store.Store.getInstance().getStudyBrowserContext(); if ( - studyBrowserContext === "search" || - studyBrowserContext === "workspaces" + studyBrowserContext === osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PROJECTS || + studyBrowserContext === osparc.dashboard.StudyBrowser.CONTEXT.WORKSPACES ) { const editButton = new qx.ui.menu.Button(this.tr("Edit..."), "@FontAwesome5Solid/pencil-alt/12"); editButton.addListener("execute", () => { @@ -221,7 +221,7 @@ qx.Class.define("osparc.dashboard.WorkspaceButtonItem", { const trashButton = new qx.ui.menu.Button(this.tr("Delete"), "@FontAwesome5Solid/trash/12"); trashButton.addListener("execute", () => this.__trashWorkspaceRequested(), this); menu.add(trashButton); - } else if (studyBrowserContext === "trash") { + } else if (studyBrowserContext === osparc.dashboard.StudyBrowser.CONTEXT.TRASH) { const restoreButton = new qx.ui.menu.Button(this.tr("Restore"), "@MaterialIcons/restore_from_trash/16"); restoreButton.addListener("execute", () => this.fireDataEvent("untrashWorkspaceRequested", this.getWorkspace()), this); menu.add(restoreButton); @@ -287,7 +287,7 @@ qx.Class.define("osparc.dashboard.WorkspaceButtonItem", { __itemSelected: function() { const studyBrowserContext = osparc.store.Store.getInstance().getStudyBrowserContext(); // do not allow selecting workspace - if (studyBrowserContext !== "trash") { + if (studyBrowserContext !== osparc.dashboard.StudyBrowser.CONTEXT.TRASH) { this.fireDataEvent("workspaceSelected", this.getWorkspaceId()); } }, @@ -300,7 +300,7 @@ qx.Class.define("osparc.dashboard.WorkspaceButtonItem", { }, __trashWorkspaceRequested: function() { - const trashDays = osparc.store.StaticInfo.getInstance().getTrashRetentionDays(); + const trashDays = osparc.store.StaticInfo.getTrashRetentionDays(); let msg = this.tr("Are you sure you want to delete the Workspace and all its content?"); msg += "

" + this.tr("It will be permanently deleted after ") + trashDays + " days."; const confirmationWin = new osparc.ui.window.Confirmation(msg).set({ diff --git a/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTree.js b/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTree.js index 604d5e2e7b07..0d5301915a5c 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTree.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTree.js @@ -46,7 +46,7 @@ qx.Class.define("osparc.dashboard.WorkspacesAndFoldersTree", { this.__initTree(); // preselect "My Workspace" - this.contextChanged("studiesAndFolders"); + this.contextChanged(osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS); osparc.store.Folders.getInstance().addListener("folderAdded", e => { const folder = e.getData(); @@ -323,8 +323,11 @@ qx.Class.define("osparc.dashboard.WorkspacesAndFoldersTree", { if (selection) { selection.removeAll(); } - if (context === "studiesAndFolders" || context === "workspaces") { - const workspaceId = context === "workspaces" ? -1 : this.getCurrentWorkspaceId(); + if ([ + osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS, + osparc.dashboard.StudyBrowser.CONTEXT.WORKSPACES, + ].includes(context)) { + const workspaceId = context === osparc.dashboard.StudyBrowser.CONTEXT.WORKSPACES ? -1 : this.getCurrentWorkspaceId(); const folderId = this.getCurrentFolderId(); const locationModel = this.__getModel(workspaceId, folderId); if (locationModel) { diff --git a/services/static-webserver/client/source/class/osparc/data/Job.js b/services/static-webserver/client/source/class/osparc/data/Job.js index e6eb948c5215..0572c4df9567 100644 --- a/services/static-webserver/client/source/class/osparc/data/Job.js +++ b/services/static-webserver/client/source/class/osparc/data/Job.js @@ -22,27 +22,33 @@ qx.Class.define("osparc.data.Job", { this.base(arguments); this.set({ - projectUuid: jobData["projectUuid"], - projectName: jobData["rootProjectName"] || "", + collectionRunId: jobData["collectionRunId"], + projectIds: jobData["projectIds"], + name: jobData["name"] || "", state: jobData["state"] || "UNKNOWN", submittedAt: jobData["submittedAt"] ? new Date(jobData["submittedAt"]) : null, startedAt: jobData["startedAt"] ? new Date(jobData["startedAt"]) : null, endedAt: jobData["endedAt"] ? new Date(jobData["endedAt"]) : null, info: jobData["info"] || null, - customMetadata: jobData["projectCustomMetadata"] || null, }); this.__subJobs = []; }, properties: { - projectUuid: { + collectionRunId: { check: "String", nullable: false, init: null, }, - projectName: { + projectIds: { + check: "Array", + nullable: false, + init: null, + }, + + name: { check: "String", nullable: false, init: null, @@ -77,12 +83,6 @@ qx.Class.define("osparc.data.Job", { nullable: true, init: null, }, - - customMetadata: { - check: "Object", - nullable: true, - init: null, - }, }, statics: { @@ -104,14 +104,14 @@ qx.Class.define("osparc.data.Job", { members: { __subJobs: null, - addSubJob: function(subJobData) { + addSubJob: function(collectionRunId, subJobData) { const subJobFound = this.__subJobs.find(subJb => subJb.getNodeId() === subJobData["nodeId"]); if (subJobFound) { subJobFound.updateSubJob(subJobData); return subJobFound; } - const subJob = new osparc.data.SubJob(subJobData); + const subJob = new osparc.data.SubJob(collectionRunId, subJobData); this.__subJobs.push(subJob); return subJob; }, diff --git a/services/static-webserver/client/source/class/osparc/data/Permissions.js b/services/static-webserver/client/source/class/osparc/data/Permissions.js index 4310b232c811..89f11821defa 100644 --- a/services/static-webserver/client/source/class/osparc/data/Permissions.js +++ b/services/static-webserver/client/source/class/osparc/data/Permissions.js @@ -192,6 +192,9 @@ qx.Class.define("osparc.data.Permissions", { }, members: { + __permissions: null, + __functionPermissions: null, + arePermissionsReady() { return this.getRole() !== null; }, @@ -276,19 +279,44 @@ qx.Class.define("osparc.data.Permissions", { return canDo; }, - checkCanDo: function(action) { - return new Promise((resolve, reject) => { - osparc.data.Resources.get("permissions") - .then(permissions => { - const found = permissions.find(permission => permission["name"] === action); - if (found) { - resolve(found["allowed"]); - } else { - resolve(false); - } - }) - .catch(err => reject(err)); - }); + fetchPermissions: function() { + osparc.data.Resources.get("permissions") + .then(permissions => { + this.__permissions = permissions; + }) + .catch(err => console.error(err)); + }, + + checkMyGroupCanDo: function(action) { + if (this.__permissions) { + const found = this.__permissions.find(permission => permission["name"] === action); + if (found) { + return found["allowed"]; + } + } + return false; + }, + + fetchFunctionPermissions: function() { + osparc.data.Resources.get("functionPermissions") + .then(functionPermissions => { + this.__functionPermissions = functionPermissions; + }) + .catch(err => console.error(err)); + }, + + checkFunctionPermissions: function(action) { + if (osparc.utils.DisabledPlugins.isFunctionsDisabled()) { + return false; + } + + if ( + this.__functionPermissions && + action in this.__functionPermissions + ) { + return this.__functionPermissions[action]; + } + return false; }, isTester: function() { @@ -296,11 +324,11 @@ qx.Class.define("osparc.data.Permissions", { }, isProductOwner: function() { - return this.getRole() === "product_owner"; + return ["admin", "product_owner"].includes(this.getRole()); }, isAdmin: function() { - return this.getRole() === "admin"; + return ["admin"].includes(this.getRole()); }, } }); diff --git a/services/static-webserver/client/source/class/osparc/data/Resources.js b/services/static-webserver/client/source/class/osparc/data/Resources.js index 919af166a0b2..60e1415d3f7d 100644 --- a/services/static-webserver/client/source/class/osparc/data/Resources.js +++ b/services/static-webserver/client/source/class/osparc/data/Resources.js @@ -102,7 +102,7 @@ qx.Class.define("osparc.data.Resources", { endpoints: { get: { method: "GET", - url: "/{productName}/app-summary.json", + url: `/{productName}/app-summary.json?no-cache=${new Date().getTime()}`, isJsonFile: true } } @@ -142,20 +142,12 @@ qx.Class.define("osparc.data.Resources", { getPageSearch: { useCache: false, method: "GET", - url: statics.API + "/projects:search?offset={offset}&limit={limit}&text={text}&tag_ids={tagIds}&order_by={orderBy}" + url: statics.API + "/projects:search?offset={offset}&limit={limit}&text={text}&order_by={orderBy}&type=user" }, getPageTrashed: { useCache: false, method: "GET", - url: statics.API + "/projects:search?filters={%22trashed%22:%22true%22}&offset={offset}&limit={limit}&order_by={orderBy}" - }, - postToTemplate: { - method: "POST", - url: statics.API + "/projects?from_study={study_id}&as_template=true©_data={copy_data}" - }, - open: { - method: "POST", - url: statics.API + "/projects/{studyId}:open" + url: statics.API + "/projects:search?filters={%22trashed%22:%22true%22}&offset={offset}&limit={limit}&order_by={orderBy}&type=user" }, getWallet: { useCache: false, @@ -170,14 +162,6 @@ qx.Class.define("osparc.data.Resources", { method: "POST", url: statics.API + "/projects/{studyId}/wallet/{walletId}:pay-debt" }, - openDisableAutoStart: { - method: "POST", - url: statics.API + "/projects/{studyId}:open?disable_service_auto_start={disableServiceAutoStart}" - }, - close: { - method: "POST", - url: statics.API + "/projects/{studyId}:close" - }, duplicate: { method: "POST", // url: statics.API + "/projects/{studyId}:duplicate" @@ -201,14 +185,6 @@ qx.Class.define("osparc.data.Resources", { method: "PATCH", url: statics.API + "/projects/{studyId}" }, - trash: { - method: "POST", - url: statics.API + "/projects/{studyId}:trash" - }, - untrash: { - method: "POST", - url: statics.API + "/projects/{studyId}:untrash" - }, delete: { method: "DELETE", url: statics.API + "/projects/{studyId}" @@ -256,11 +232,6 @@ qx.Class.define("osparc.data.Resources", { method: "PUT", url: statics.API + "/projects/{studyId}/nodes/{nodeId}/pricing-plan/{pricingPlanId}/pricing-unit/{pricingUnitId}" }, - checkShareePermissions: { - useCache: false, - method: "GET", - url: statics.API + "/projects/{studyId}/nodes/-/services:access?for_gid={gid}" - }, postAccessRights: { useCache: false, method: "POST", @@ -276,10 +247,6 @@ qx.Class.define("osparc.data.Resources", { method: "PUT", url: statics.API + "/projects/{studyId}/groups/{gId}" }, - shareWithEmail: { - method: "POST", - url: statics.API + "/projects/{studyId}:share" - }, addTag: { useCache: false, method: "POST", @@ -303,12 +270,45 @@ qx.Class.define("osparc.data.Resources", { method: "POST", url: statics.API + "/projects/{studyId}/workspaces/{workspaceId}:move" }, + updateMetadata: { + method: "PATCH", + url: statics.API + "/projects/{studyId}/metadata" + }, + open: { + method: "POST", + url: statics.API + "/projects/{studyId}:open" + }, + openDisableAutoStart: { + method: "POST", + url: statics.API + "/projects/{studyId}:open?disable_service_auto_start={disableServiceAutoStart}" + }, + close: { + method: "POST", + url: statics.API + "/projects/{studyId}:close" + }, + shareWithEmail: { + method: "POST", + url: statics.API + "/projects/{studyId}:share" + }, + checkShareePermissions: { + useCache: false, + method: "GET", + url: statics.API + "/projects/{studyId}/nodes/-/services:access?for_gid={gid}" + }, + trash: { + method: "POST", + url: statics.API + "/projects/{studyId}:trash" + }, + untrash: { + method: "POST", + url: statics.API + "/projects/{studyId}:untrash" + }, } }, - "conversations": { - useCache: false, + "conversationsStudies": { + useCache: false, // It has its own cache handler endpoints: { - addConversation: { + postConversation: { method: "POST", url: statics.API + "/projects/{studyId}/conversations" }, @@ -316,6 +316,10 @@ qx.Class.define("osparc.data.Resources", { method: "GET", url: statics.API + "/projects/{studyId}/conversations?offset={offset}&limit={limit}" }, + getConversation: { + method: "GET", + url: statics.API + "/projects/{studyId}/conversations/{conversationId}" + }, renameConversation: { method: "PUT", url: statics.API + "/projects/{studyId}/conversations/{conversationId}" @@ -324,10 +328,18 @@ qx.Class.define("osparc.data.Resources", { method: "DELETE", url: statics.API + "/projects/{studyId}/conversations/{conversationId}" }, - addMessage: { + postMessage: { method: "POST", url: statics.API + "/projects/{studyId}/conversations/{conversationId}/messages" }, + editMessage: { + method: "PUT", + url: statics.API + "/projects/{studyId}/conversations/{conversationId}/messages/{messageId}" + }, + deleteMessage: { + method: "DELETE", + url: statics.API + "/projects/{studyId}/conversations/{conversationId}/messages/{messageId}" + }, getMessagesPage: { method: "GET", url: statics.API + "/projects/{studyId}/conversations/{conversationId}/messages?offset={offset}&limit={limit}" @@ -352,11 +364,11 @@ qx.Class.define("osparc.data.Resources", { endpoints: { getPageLatest: { method: "GET", - url: statics.API + "/computations/-/iterations/latest?offset={offset}&limit={limit}&order_by={orderBy}&filter_only_running={runningOnly}&filters={filters}" + url: statics.API + "/computation-collection-runs?offset={offset}&limit={limit}&order_by={orderBy}&filter_only_running={runningOnly}" }, getPageHistory: { method: "GET", - url: statics.API + "/computations/{studyId}/iterations?offset={offset}&limit={limit}&order_by={orderBy}&include_children={includeChildren}" + url: statics.API + "/computation-collection-runs?offset={offset}&limit={limit}&order_by={orderBy}&filter_by_root_project_id={projectId}" }, } }, @@ -365,7 +377,7 @@ qx.Class.define("osparc.data.Resources", { endpoints: { getPageLatest: { method: "GET", - url: statics.API + "/computations/{studyId}/iterations/latest/tasks?offset={offset}&limit={limit}&order_by={orderBy}&include_children={includeChildren}" + url: statics.API + "/computation-collection-runs/{collectionRunId}/tasks?offset={offset}&limit={limit}&order_by={orderBy}" }, } }, @@ -598,7 +610,15 @@ qx.Class.define("osparc.data.Resources", { }, getPageFilteredSorted: { method: "GET", - url: statics.API + "/projects?type=template&template_type={templateType}&offset={offset}&limit={limit}&order_by={orderBy}" + url: statics.API + "/projects?type=template&offset={offset}&limit={limit}&order_by={orderBy}&template_type={templateType}" + }, + getPageSearchFilteredSorted: { + method: "GET", + url: statics.API + "/projects:search?type=template&offset={offset}&limit={limit}&order_by={orderBy}&template_type={templateType}&text={text}" + }, + postToTemplate: { + method: "POST", + url: statics.API + "/projects?from_study={study_id}&as_template=true©_data={copy_data}&hidden={hidden}" }, } }, @@ -608,10 +628,40 @@ qx.Class.define("osparc.data.Resources", { "functions": { useCache: false, endpoints: { + getOne: { + method: "GET", + url: statics.API + "/functions/{functionId}?include_extras=true" + }, + getPage: { + method: "GET", + url: statics.API + "/functions?include_extras=true&offset={offset}&limit={limit}&order_by={orderBy}" + }, + getPageSearch: { + method: "GET", + url: statics.API + "/functions?include_extras=true&offset={offset}&limit={limit}&search={text}&order_by={orderBy}" + }, create: { method: "POST", url: statics.API + "/functions" - } + }, + delete: { + method: "DELETE", + url: statics.API + "/functions/{functionId}?force={force}" + }, + patch: { + method: "PATCH", + url: statics.API + "/functions/{functionId}?include_extras=true" + }, + putAccessRights: { + useCache: false, + method: "PUT", + url: statics.API + "/functions/{functionId}/groups/{gId}" + }, + deleteAccessRights: { + useCache: false, + method: "DELETE", + url: statics.API + "/functions/{functionId}/groups/{gId}" + }, } }, /* @@ -808,6 +858,18 @@ qx.Class.define("osparc.data.Resources", { method: "PATCH", url: statics.API + "/me" }, + phoneRegister: { + method: "POST", + url: statics.API + "/me/phone:register" + }, + phoneResendCode: { + method: "POST", + url: statics.API + "/me/phone:resend" + }, + phoneConfirm: { + method: "POST", + url: statics.API + "/me/phone:confirm" + }, } }, /* @@ -833,6 +895,18 @@ qx.Class.define("osparc.data.Resources", { } } }, + /* + * FUNCTION PERMISSIONS + */ + "functionPermissions": { + useCache: true, + endpoints: { + get: { + method: "GET", + url: statics.API + "/me/function-permissions" + } + } + }, /* * API-KEYS */ @@ -1057,10 +1131,14 @@ qx.Class.define("osparc.data.Resources", { }, "poUsers": { endpoints: { - search: { + searchByEmail: { method: "GET", url: statics.API + "/admin/user-accounts:search?email={email}" }, + searchByGroupId: { + method: "GET", + url: statics.API + "/admin/user-accounts:search?primary_group_id={gId}" + }, getPendingUsers: { method: "GET", url: statics.API + "/admin/user-accounts?review_status=PENDING" @@ -1425,7 +1503,52 @@ qx.Class.define("osparc.data.Resources", { url: statics.API + "/wallets/{walletId}/licensed-items-checkouts?offset={offset}&limit={limit}" }, } - } + }, + + /* + * SUPPORT CONVERSATIONS + */ + "conversationsSupport": { + useCache: false, // It has its own cache handler + endpoints: { + postConversation: { + method: "POST", + url: statics.API + "/conversations" + }, + getConversationsPage: { + method: "GET", + url: statics.API + "/conversations?type=SUPPORT&offset={offset}&limit={limit}" + }, + getConversation: { + method: "GET", + url: statics.API + "/conversations/{conversationId}" + }, + patchConversation: { + method: "PATCH", + url: statics.API + "/conversations/{conversationId}" + }, + deleteConversation: { + method: "DELETE", + url: statics.API + "/conversations/{conversationId}" + }, + postMessage: { + method: "POST", + url: statics.API + "/conversations/{conversationId}/messages" + }, + editMessage: { + method: "PUT", + url: statics.API + "/conversations/{conversationId}/messages/{messageId}" + }, + deleteMessage: { + method: "DELETE", + url: statics.API + "/conversations/{conversationId}/messages/{messageId}" + }, + getMessagesPage: { + method: "GET", + url: statics.API + "/conversations/{conversationId}/messages?offset={offset}&limit={limit}" + }, + } + }, }; }, @@ -1528,6 +1651,7 @@ qx.Class.define("osparc.data.Resources", { let message = null; let status = null; let supportId = null; + let errors = []; if (e.getData().error) { const errorData = e.getData().error; if (errorData.message) { @@ -1537,13 +1661,15 @@ qx.Class.define("osparc.data.Resources", { if (message === null && logs && logs.length) { message = logs[0].message; } - const errors = errorData.errors || []; + errors = errorData.errors || []; if (message === null && errors && errors.length) { message = errors[0].message; } status = errorData.status; if (errorData["support_id"]) { supportId = errorData["support_id"]; + } else if (errorData["supportId"]) { + supportId = errorData["supportId"]; } } else { const req = e.getRequest(); @@ -1576,6 +1702,9 @@ qx.Class.define("osparc.data.Resources", { if (status) { err.status = status; } + if (errors.length) { + err.errors = errors; + } if (supportId) { err.supportId = supportId; } diff --git a/services/static-webserver/client/source/class/osparc/data/Roles.js b/services/static-webserver/client/source/class/osparc/data/Roles.js index d5abf35add2c..aacb89f0bea1 100644 --- a/services/static-webserver/client/source/class/osparc/data/Roles.js +++ b/services/static-webserver/client/source/class/osparc/data/Roles.js @@ -26,7 +26,7 @@ qx.Class.define("osparc.data.Roles", { label: qx.locale.Manager.tr("Restricted Member"), longLabel: qx.locale.Manager.tr("Restricted member: no Read access"), canDo: [ - qx.locale.Manager.tr("- can access content shared within the Organization") + qx.locale.Manager.tr("- Can access content shared within the Organization") ], accessRights: { "read": false, @@ -39,8 +39,8 @@ qx.Class.define("osparc.data.Roles", { label: qx.locale.Manager.tr("Member"), longLabel: qx.locale.Manager.tr("Member: Read access"), canDo: [ - qx.locale.Manager.tr("- can see other members"), - qx.locale.Manager.tr("- can share with other members") + qx.locale.Manager.tr("- Can see other members"), + qx.locale.Manager.tr("- Can share with other members") ], accessRights: { "read": true, @@ -53,9 +53,9 @@ qx.Class.define("osparc.data.Roles", { label: qx.locale.Manager.tr("Manager"), longLabel: qx.locale.Manager.tr("Manager: Read/Write access"), canDo: [ - qx.locale.Manager.tr("- can Add/Delete members"), - qx.locale.Manager.tr("- can Promote/Demote members"), - qx.locale.Manager.tr("- can Edit Organization details") + qx.locale.Manager.tr("- Can Add/Delete members"), + qx.locale.Manager.tr("- Can Promote/Demote members"), + qx.locale.Manager.tr("- Can Edit Organization details") ], accessRights: { "read": true, @@ -68,7 +68,7 @@ qx.Class.define("osparc.data.Roles", { label: qx.locale.Manager.tr("Administrator"), longLabel: qx.locale.Manager.tr("Admin: Read/Write/Delete access"), canDo: [ - qx.locale.Manager.tr("- can Delete the Organization") + qx.locale.Manager.tr("- Can Delete the Organization") ], accessRights: { "read": true, @@ -84,7 +84,7 @@ qx.Class.define("osparc.data.Roles", { label: qx.locale.Manager.tr("User"), longLabel: qx.locale.Manager.tr("User: Read access"), canDo: [ - qx.locale.Manager.tr("- can open it without making changes") + qx.locale.Manager.tr("- Can open it without making changes") ], accessRights: { "read": true, @@ -97,8 +97,8 @@ qx.Class.define("osparc.data.Roles", { label: qx.locale.Manager.tr("Editor"), longLabel: qx.locale.Manager.tr("Editor: Read/Write access"), canDo: [ - qx.locale.Manager.tr("- can make changes"), - qx.locale.Manager.tr("- can share it") + qx.locale.Manager.tr("- Can make changes"), + qx.locale.Manager.tr("- Can share it") ], accessRights: { "read": true, @@ -111,7 +111,7 @@ qx.Class.define("osparc.data.Roles", { label: qx.locale.Manager.tr("Owner"), longLabel: qx.locale.Manager.tr("Owner: Read/Write/Delete access"), canDo: [ - qx.locale.Manager.tr("- can delete it") + qx.locale.Manager.tr("- Can delete it") ], accessRights: { "read": true, @@ -120,13 +120,42 @@ qx.Class.define("osparc.data.Roles", { }, } }, + FUNCTION: { + "read": { + id: "read", + label: qx.locale.Manager.tr("User"), + longLabel: qx.locale.Manager.tr("User: Read access"), + canDo: [ + qx.locale.Manager.tr("- Can use it") + ], + accessRights: { + "execute": true, + "read": true, + "write": false + }, + }, + "write": { + id: "write", + label: qx.locale.Manager.tr("Owner"), + longLabel: qx.locale.Manager.tr("Owner: Read/Write access"), + canDo: [ + qx.locale.Manager.tr("- Can make changes"), + qx.locale.Manager.tr("- Can share it") + ], + accessRights: { + "execute": true, + "read": true, + "write": true + }, + }, + }, SERVICES: { "read": { id: "read", label: qx.locale.Manager.tr("User"), longLabel: qx.locale.Manager.tr("User: Read access"), canDo: [ - qx.locale.Manager.tr("- can use it") + qx.locale.Manager.tr("- Can use it") ], accessRights: { "execute": true, @@ -138,8 +167,8 @@ qx.Class.define("osparc.data.Roles", { label: qx.locale.Manager.tr("Editor"), longLabel: qx.locale.Manager.tr("Editor: Read/Write access"), canDo: [ - qx.locale.Manager.tr("- can make changes"), - qx.locale.Manager.tr("- can share it") + qx.locale.Manager.tr("- Can make changes"), + qx.locale.Manager.tr("- Can share it") ], accessRights: { "execute": true, @@ -153,7 +182,7 @@ qx.Class.define("osparc.data.Roles", { label: qx.locale.Manager.tr("User"), longLabel: qx.locale.Manager.tr("User: Read access"), canDo: [ - qx.locale.Manager.tr("- can use the credits") + qx.locale.Manager.tr("- Can use the credits") ], accessRights: { "read": true, @@ -166,8 +195,8 @@ qx.Class.define("osparc.data.Roles", { label: qx.locale.Manager.tr("Accountant"), longLabel: qx.locale.Manager.tr("Accountant: Read/Write access"), canDo: [ - qx.locale.Manager.tr("- can Add/Delete members"), - qx.locale.Manager.tr("- can Edit Credit Account details") + qx.locale.Manager.tr("- Can Add/Delete members"), + qx.locale.Manager.tr("- Can Edit Credit Account details") ], accessRights: { "read": true, @@ -182,27 +211,42 @@ qx.Class.define("osparc.data.Roles", { label: qx.locale.Manager.tr("Viewer"), longLabel: qx.locale.Manager.tr("Viewer: Read access"), canDo: [ - qx.locale.Manager.tr("- can inspect the content and open ") + osparc.product.Utils.getStudyAlias({plural: true}) + qx.locale.Manager.tr(" without making changes") - ] + qx.locale.Manager.tr("- Can inspect the content and open ") + osparc.product.Utils.getStudyAlias({plural: true}) + qx.locale.Manager.tr(" without making changes") + ], + accessRights: { + "read": true, + "write": false, + "delete": false + }, }, "write": { id: "write", label: qx.locale.Manager.tr("Editor"), longLabel: qx.locale.Manager.tr("Editor: Read/Write access"), canDo: [ - qx.locale.Manager.tr("- can add ") + osparc.product.Utils.getStudyAlias({plural: true}), - qx.locale.Manager.tr("- can add folders"), - ] + qx.locale.Manager.tr("- Can add ") + osparc.product.Utils.getStudyAlias({plural: true}), + qx.locale.Manager.tr("- Can add folders"), + ], + accessRights: { + "read": true, + "write": true, + "delete": false + }, }, "delete": { id: "delete", label: qx.locale.Manager.tr("Owner"), longLabel: qx.locale.Manager.tr("Owner: Read/Write/Delete access"), canDo: [ - qx.locale.Manager.tr("- can rename workspace"), - qx.locale.Manager.tr("- can share it"), - qx.locale.Manager.tr("- can delete it") - ] + qx.locale.Manager.tr("- Can rename workspace"), + qx.locale.Manager.tr("- Can share it"), + qx.locale.Manager.tr("- Can delete it") + ], + accessRights: { + "read": true, + "write": true, + "delete": true + }, } }, @@ -254,6 +298,10 @@ qx.Class.define("osparc.data.Roles", { return this.__createRolesLayout(osparc.data.Roles.STUDY); }, + createRolesFunctionInfo: function() { + return this.__createRolesLayout(osparc.data.Roles.FUNCTION); + }, + createRolesServicesInfo: function() { return this.__createRolesLayout(osparc.data.Roles.SERVICES); }, diff --git a/services/static-webserver/client/source/class/osparc/data/SubJob.js b/services/static-webserver/client/source/class/osparc/data/SubJob.js index 9ee1f0174632..862a89280a5e 100644 --- a/services/static-webserver/client/source/class/osparc/data/SubJob.js +++ b/services/static-webserver/client/source/class/osparc/data/SubJob.js @@ -18,10 +18,11 @@ qx.Class.define("osparc.data.SubJob", { extend: qx.core.Object, - construct: function(subJobData) { + construct: function(collectionRunId, subJobData) { this.base(arguments); this.set({ + collectionRunId, projectUuid: subJobData["projectUuid"], nodeId: subJobData["nodeId"], }); @@ -30,6 +31,12 @@ qx.Class.define("osparc.data.SubJob", { }, properties: { + collectionRunId: { + check: "String", + nullable: false, + init: null, + }, + projectUuid: { check: "String", nullable: false, @@ -42,7 +49,7 @@ qx.Class.define("osparc.data.SubJob", { init: null, }, - nodeName: { + name: { check: "String", nullable: false, init: null, @@ -94,7 +101,7 @@ qx.Class.define("osparc.data.SubJob", { members: { updateSubJob: function(subJobData) { this.set({ - nodeName: subJobData["nodeName"], + name: subJobData["name"], state: subJobData["state"], progress: subJobData["progress"], startedAt: subJobData["startedAt"] ? new Date(subJobData["startedAt"]) : null, diff --git a/services/static-webserver/client/source/class/osparc/data/model/Conversation.js b/services/static-webserver/client/source/class/osparc/data/model/Conversation.js new file mode 100644 index 000000000000..6f59640c9e14 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/data/model/Conversation.js @@ -0,0 +1,341 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +/** + * Class that stores Conversation data. + */ + +qx.Class.define("osparc.data.model.Conversation", { + extend: qx.core.Object, + + /** + * @param conversationData {Object} Object containing the serialized Conversation Data + * @param studyId {String} ID of the Study + * */ + construct: function(conversationData, studyId) { + this.base(arguments); + + this.set({ + conversationId: conversationData.conversationId, + name: conversationData.name, + userGroupId: conversationData.userGroupId, + type: conversationData.type, + created: new Date(conversationData.created), + modified: new Date(conversationData.modified), + projectId: conversationData.projectUuid || null, + extraContext: conversationData.extraContext || null, + studyId: studyId || null, + }); + + this.__messages = []; + this.__listenToConversationMessageWS(); + + if (conversationData.type === "SUPPORT") { + this.__fetchLastMessage(); + } + }, + + statics: { + CHANNELS: { + CONVERSATION_CREATED: "conversation:created", + CONVERSATION_UPDATED: "conversation:updated", + CONVERSATION_DELETED: "conversation:deleted", + CONVERSATION_MESSAGE_CREATED: "conversation:message:created", + CONVERSATION_MESSAGE_UPDATED: "conversation:message:updated", + CONVERSATION_MESSAGE_DELETED: "conversation:message:deleted", + }, + + MAX_TITLE_LENGTH: 50, + MAX_CONTENT_LENGTH: 4096, + }, + + properties: { + conversationId: { + check: "String", + nullable: false, + init: null, + event: "changeConversationId", + }, + + name: { + check: "String", + nullable: false, + init: null, + event: "changeName", + apply: "__applyName", + }, + + userGroupId: { + check: "Number", + nullable: false, + init: null, + event: "changeUserGroupId", + }, + + type: { + check: [ + "PROJECT_STATIC", + "PROJECT_ANNOTATION", + "SUPPORT", + ], + nullable: false, + init: null, + event: "changeType", + }, + + created: { + check: "Date", + nullable: false, + init: null, + event: "changeCreated", + }, + + modified: { + check: "Date", + nullable: false, + init: null, + event: "changeModified", + }, + + projectId: { + check: "String", + nullable: true, + init: null, + event: "changeProjectId", + }, + + extraContext: { + check: "Object", + nullable: true, + init: null, + event: "changeExtraContext", + }, + + nameAlias: { + check: "String", + nullable: false, + init: "", + event: "changeNameAlias", + }, + + lastMessage: { + check: "Object", + nullable: true, + init: null, + event: "changeLastMessage", + apply: "__applyLastMessage", + }, + + studyId: { + check: "String", + nullable: true, + init: null, + }, + }, + + events: { + "messageAdded": "qx.event.type.Data", + "messageUpdated": "qx.event.type.Data", + "messageDeleted": "qx.event.type.Data", + }, + + members: { + __fetchLastMessagePromise: null, + __nextRequestParams: null, + __messages: null, + + __applyName: function(name) { + if (name && name !== "null") { + this.setNameAlias(name); + } + }, + + __applyLastMessage: function(lastMessage) { + const name = this.getName(); + if (!name || name === "null") { + this.setNameAlias(lastMessage ? lastMessage.content : ""); + } + }, + + __listenToConversationMessageWS: function() { + const socket = osparc.wrapper.WebSocket.getInstance(); + [ + this.self().CHANNELS.CONVERSATION_MESSAGE_CREATED, + this.self().CHANNELS.CONVERSATION_MESSAGE_UPDATED, + this.self().CHANNELS.CONVERSATION_MESSAGE_DELETED, + ].forEach(eventName => { + const eventHandler = message => { + if (message) { + const conversationId = message["conversationId"]; + if (conversationId === this.getConversationId()) { + switch (eventName) { + case this.self().CHANNELS.CONVERSATION_MESSAGE_CREATED: + this.addMessage(message); + break; + case this.self().CHANNELS.CONVERSATION_MESSAGE_UPDATED: + this.updateMessage(message); + break; + case this.self().CHANNELS.CONVERSATION_MESSAGE_DELETED: + this.deleteMessage(message); + break; + } + } + } + }; + socket.on(eventName, eventHandler, this); + }); + }, + + __fetchLastMessage: function() { + if (this.__fetchLastMessagePromise) { + return this.__fetchLastMessagePromise; + } + + let promise = osparc.store.ConversationsSupport.getInstance().fetchLastMessage(this.getConversationId()); + promise + .then(lastMessage => { + this.addMessage(lastMessage); + promise = null; + return lastMessage; + }) + .finally(() => { + this.__fetchLastMessagePromise = null; + }); + + this.__fetchLastMessagePromise = promise; + return promise; + }, + + amIOwner: function() { + return this.getUserGroupId() === osparc.auth.Data.getInstance().getGroupId(); + }, + + getNextMessages: function() { + const params = { + url: { + conversationId: this.getConversationId(), + offset: 0, + limit: 42 + } + }; + if (this.getStudyId()) { + params.url.studyId = this.getStudyId(); + } + + const nextRequestParams = this.__nextRequestParams; + if (nextRequestParams) { + params.url.offset = nextRequestParams.offset; + params.url.limit = nextRequestParams.limit; + } + const options = { + resolveWResponse: true + }; + const promise = this.getStudyId() ? + osparc.data.Resources.fetch("conversationsStudies", "getMessagesPage", params, options) : + osparc.data.Resources.fetch("conversationsSupport", "getMessagesPage", params, options); + return promise + .then(resp => { + const messages = resp["data"]; + messages.forEach(message => this.addMessage(message)); + this.__nextRequestParams = resp["_links"]["next"]; + return resp; + }) + .catch(err => osparc.FlashMessenger.logError(err)); + }, + + renameConversation: function(newName) { + osparc.store.ConversationsSupport.getInstance().renameConversation(this.getConversationId(), newName) + .then(() => this.setName(newName)) + .catch(err => osparc.FlashMessenger.logError(err)); + }, + + patchExtraContext: function(extraContext) { + osparc.store.ConversationsSupport.getInstance().patchExtraContext(this.getConversationId(), extraContext) + .then(() => { + this.setExtraContext(extraContext); + }); + }, + + addMessage: function(message) { + if (message) { + const found = this.__messages.find(msg => msg["messageId"] === message["messageId"]); + if (!found) { + this.__messages.push(message); + this.fireDataEvent("messageAdded", message); + } + // latest first + this.__messages.sort((a, b) => new Date(b.created) - new Date(a.created)); + this.setLastMessage(this.__messages[0]); + } + }, + + updateMessage: function(message) { + if (message) { + const found = this.__messages.find(msg => msg["messageId"] === message["messageId"]); + if (found) { + Object.assign(found, message); + this.fireDataEvent("messageUpdated", found); + } + } + }, + + deleteMessage: function(message) { + if (message) { + const found = this.__messages.find(msg => msg["messageId"] === message["messageId"]); + if (found) { + this.__messages.splice(this.__messages.indexOf(found), 1); + this.fireDataEvent("messageDeleted", found); + } + } + }, + + getContextProjectId: function() { + if (this.getExtraContext() && "projectId" in this.getExtraContext()) { + return this.getExtraContext()["projectId"]; + } + return null; + }, + + getFogbugzLink: function() { + if (this.getExtraContext() && "fogbugz_case_url" in this.getExtraContext()) { + return this.getExtraContext()["fogbugz_case_url"]; + } + return null; + }, + + getAppointment: function() { + if (this.getExtraContext() && "appointment" in this.getExtraContext()) { + return this.getExtraContext()["appointment"]; + } + return null; + }, + + setAppointment: function(appointment) { + const extraContext = this.getExtraContext() || {}; + extraContext["appointment"] = appointment ? appointment.toISOString() : null; + // OM: Supporters are not allowed to patch the conversation metadata yet + const backendAllowsPatch = osparc.store.Groups.getInstance().amIASupportUser() ? false : true; + if (backendAllowsPatch) { + return osparc.store.ConversationsSupport.getInstance().patchExtraContext(this.getConversationId(), extraContext) + .then(() => { + this.setExtraContext(Object.assign({}, extraContext)); + }); + } + return Promise.resolve(this.setExtraContext(Object.assign({}, extraContext))); + }, + }, +}); diff --git a/services/static-webserver/client/source/class/osparc/data/model/Function.js b/services/static-webserver/client/source/class/osparc/data/model/Function.js new file mode 100644 index 000000000000..3b0e6c1fd24a --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/data/model/Function.js @@ -0,0 +1,203 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +/** + * Class that stores Function data. + */ + +qx.Class.define("osparc.data.model.Function", { + extend: qx.core.Object, + + /** + * @param functionData {Object} Object containing the serialized Function Data + * @param templateData {Object} Object containing the underlying serialized Template Data + */ + construct: function(functionData, templateData = null) { + this.base(arguments); + + this.set({ + uuid: functionData.uuid, + functionClass: functionData.functionClass, + title: functionData.title, + description: functionData.description, + inputSchema: functionData.inputSchema || this.getInputSchema(), + outputSchema: functionData.outputSchema || this.getOutputSchema(), + defaultInputs: functionData.defaultInputs || this.getDefaultInputs(), + accessRights: functionData.accessRights || this.getAccessRights(), + creationDate: functionData.creationDate ? new Date(functionData.creationDate) : this.getCreationDate(), + lastChangeDate: functionData.lastChangeDate ? new Date(functionData.lastChangeDate) : this.getLastChangeDate(), + thumbnail: functionData.thumbnail || this.getThumbnail(), + templateId: functionData.templateId || this.getTemplateId(), + }); + + if (templateData) { + const template = new osparc.data.model.Study(templateData); + this.setTemplate(template); + } + }, + + properties: { + uuid: { + check: "String", + nullable: false, + event: "changeUuid", + init: "" + }, + + functionClass: { + check: [ + "PROJECT", // osparc.data.model.Function.FUNCTION_CLASS.PROJECT + "SOLVER", // osparc.data.model.Function.FUNCTION_CLASS.SOLVER + "PYTHON_CODE", // osparc.data.model.Function.FUNCTION_CLASS.PYTHON + ], + nullable: false, + event: "changeFunctionClass", + init: null + }, + + title: { + check: "String", + nullable: false, + event: "changeTitle", + init: "Function" + }, + + description: { + check: "String", + nullable: true, + event: "changeDescription", + init: null + }, + + inputSchema: { + check: "Object", + nullable: false, + event: "changeInputSchema", + init: {} + }, + + outputSchema: { + check: "Object", + nullable: false, + event: "changeOutputSchema", + init: {} + }, + + defaultInputs: { + check: "Object", + nullable: false, + event: "changeDefaultInputs", + init: {} + }, + + accessRights: { + check: "Object", + nullable: false, + event: "changeAccessRights", + init: {} + }, + + creationDate: { + check: "Date", + nullable: false, + event: "changeCreationDate", + init: new Date() + }, + + lastChangeDate: { + check: "Date", + nullable: false, + event: "changeLastChangeDate", + init: new Date() + }, + + thumbnail: { + check: "String", + nullable: true, + event: "changeThumbnail", + init: null + }, + + templateId: { + check: "String", + nullable: true, + init: null, + }, + + template: { + check: "osparc.data.model.Study", + nullable: true, + init: null, + }, + }, + + statics: { + FUNCTION_CLASS: { + PROJECT: "PROJECT", + SOLVER: "SOLVER", + PYTHON_CODE: "PYTHON_CODE" + }, + + getProperties: function() { + return Object.keys(qx.util.PropertyUtil.getProperties(osparc.data.model.Function)); + }, + + canIWrite: function(accessRights) { + const groupsStore = osparc.store.Groups.getInstance(); + const orgIDs = groupsStore.getOrganizationIds(); + orgIDs.push(groupsStore.getMyGroupId()); + if (orgIDs.length) { + return osparc.share.CollaboratorsFunction.canGroupsWrite(accessRights, (orgIDs)); + } + return false; + }, + }, + + members: { + serialize: function() { + let jsonObject = {}; + const propertyKeys = this.self().getProperties(); + propertyKeys.forEach(key => { + if (key === "template") { + return; // template is not serialized + } + jsonObject[key] = this.get(key); + }); + return jsonObject; + }, + + canIWrite: function() { + const accessRights = this.getAccessRights(); + return this.self().canIWrite(accessRights); + }, + + patchFunction: function(functionChanges) { + return osparc.store.Functions.patchFunction(this.getUuid(), functionChanges) + .then(functionData => { + Object.keys(functionChanges).forEach(fieldKey => { + const upKey = qx.lang.String.firstUp(fieldKey); + const setter = "set" + upKey; + this[setter](functionChanges[fieldKey]); + }); + this.set({ + lastChangeDate: new Date(functionData.lastChangeDate) + }); + return functionData; + }); + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/data/model/Group.js b/services/static-webserver/client/source/class/osparc/data/model/Group.js index e345265ba689..3153c95e0dc1 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/Group.js +++ b/services/static-webserver/client/source/class/osparc/data/model/Group.js @@ -82,7 +82,7 @@ qx.Class.define("osparc.data.model.Group", { }, groupType: { - check: ["me", "organization", "productEveryone", "everyone"], + check: ["me", "organization", "support", "productEveryone", "everyone"], nullable: false, init: null, }, @@ -96,7 +96,7 @@ qx.Class.define("osparc.data.model.Group", { statics: { getProperties: function() { return Object.keys(qx.util.PropertyUtil.getProperties(osparc.data.model.Group)); - } + }, }, members: { @@ -104,8 +104,8 @@ qx.Class.define("osparc.data.model.Group", { return Object.values(this.getGroupMembers()).find(user => user.getUserId() === userId); }, - getGroupMemberByUsername: function(username) { - return Object.values(this.getGroupMembers()).find(user => user.getUsername() === username); + getGroupMemberByUserName: function(userName) { + return Object.values(this.getGroupMembers()).find(user => user.getUserName() === userName); }, getGroupMemberByLogin: function(userEmail) { diff --git a/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js b/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js index 850672e96572..2b085ece0ff4 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js +++ b/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js @@ -30,6 +30,7 @@ qx.Class.define("osparc.data.model.IframeHandler", { }); this.__initLoadingPage(); + this.__initLockedPage(); this.__initIFrame(); }, @@ -44,7 +45,9 @@ qx.Class.define("osparc.data.model.IframeHandler", { node: { check: "osparc.data.model.Node", init: null, - nullable: false + nullable: false, + event: "changeNode", + apply: "__applyNode", }, loadingPage: { @@ -53,6 +56,12 @@ qx.Class.define("osparc.data.model.IframeHandler", { nullable: true }, + lockedPage: { + check: "osparc.ui.message.NodeLockedPage", + init: null, + nullable: true + }, + iFrame: { check: "osparc.widget.PersistentIframe", init: null, @@ -61,7 +70,19 @@ qx.Class.define("osparc.data.model.IframeHandler", { }, events: { - "iframeChanged": "qx.event.type.Event" + "iframeStateChanged": "qx.event.type.Event" + }, + + statics: { + evalShowToolbar: function(loadingPage, study) { + if (osparc.product.Utils.isProduct("s4llite")) { + loadingPage.setShowToolbar(false); + } else { + study.getUi().bind("mode", loadingPage, "showToolbar", { + converter: mode => mode !== "standalone" + }); + } + }, }, members: { @@ -89,16 +110,17 @@ qx.Class.define("osparc.data.model.IframeHandler", { } }, + __applyNode: function(node) { + node.getStatus().getLockState().addListener("changeLocked", () => this.fireEvent("iframeStateChanged"), this); + }, + __initIFrame: function() { const iframe = new osparc.widget.PersistentIframe(); - osparc.utils.Utils.setIdToWidget(iframe.getIframe(), "iframe_"+this.getNode().getNodeId()); - if (osparc.product.Utils.isProduct("s4llite")) { - iframe.setShowToolbar(false); - } else { - this.getStudy().getUi().bind("mode", iframe, "showToolbar", { - converter: mode => mode !== "standalone" - }); + if (this.getNode().getKey().includes("s4l-ui")) { + iframe.getIframe().setAppearance("iframe-no-border"); } + osparc.utils.Utils.setIdToWidget(iframe.getIframe(), "iframe_"+this.getNode().getNodeId()); + this.self().evalShowToolbar(iframe, this.getStudy()); iframe.addListener("restart", () => this.restartIFrame(), this); iframe.getDiskUsageIndicator().setCurrentNode(this.getNode()) this.setIFrame(iframe); @@ -108,16 +130,11 @@ qx.Class.define("osparc.data.model.IframeHandler", { const loadingPage = new osparc.ui.message.Loading().set({ header: this.__getLoadingPageHeader() }); - if (osparc.product.Utils.isProduct("s4llite")) { - loadingPage.setShowToolbar(false); - } else { - this.getStudy().getUi().bind("mode", loadingPage, "showToolbar", { - converter: mode => mode !== "standalone" - }); - } + + this.self().evalShowToolbar(loadingPage, this.getStudy()); const node = this.getNode(); - const thumbnail = node.getMetaData()["thumbnail"]; + const thumbnail = node.getMetadata()["thumbnail"]; if (thumbnail) { loadingPage.setLogo(thumbnail); } @@ -141,11 +158,18 @@ qx.Class.define("osparc.data.model.IframeHandler", { status = node.getStatus().getInteractive(); } const statusText = status ? (status.charAt(0).toUpperCase() + status.slice(1)) : this.tr("Starting"); - const metadata = node.getMetaData(); + const metadata = node.getMetadata(); const versionDisplay = osparc.service.Utils.extractVersionDisplay(metadata); return statusText + " " + node.getLabel() + " v" + versionDisplay + ""; }, + __initLockedPage: function() { + const lockedPage = new osparc.ui.message.NodeLockedPage(); + this.self().evalShowToolbar(lockedPage, this.getStudy()); + this.bind("node", lockedPage, "node"); + this.setLockedPage(lockedPage); + }, + __nodeState: function() { // Check if study is still there if (this.getStudy() === null || this.__stopRequestingStatus === true) { @@ -381,7 +405,7 @@ qx.Class.define("osparc.data.model.IframeHandler", { if (this.getIFrame()) { this.getIFrame().resetSource(); } - this.fireEvent("iframeChanged"); + this.fireEvent("iframeStateChanged"); } }, @@ -418,7 +442,7 @@ qx.Class.define("osparc.data.model.IframeHandler", { // fire event to force switching to iframe's content: // it is required in those cases where the native 'load' event isn't triggered (voila) - this.fireEvent("iframeChanged"); + this.fireEvent("iframeStateChanged"); } } } diff --git a/services/static-webserver/client/source/class/osparc/data/model/Node.js b/services/static-webserver/client/source/class/osparc/data/model/Node.js index 048f69ebfc0c..0426f16e8532 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/Node.js +++ b/services/static-webserver/client/source/class/osparc/data/model/Node.js @@ -41,14 +41,13 @@ qx.Class.define("osparc.data.model.Node", { /** * @param study {osparc.data.model.Study} Study or Serialized Study Object - * @param metadata {Object} service's metadata + * @param key {String} unique key of the service represented by the node + * @param version {String} version of the service represented by the node * @param nodeId {String} uuid of the service represented by the node (not needed for new Nodes) */ - construct: function(study, metadata, nodeId) { + construct: function(study, key, version, nodeId) { this.base(arguments); - this.__metaData = metadata; - this.setOutputs({}); this.__inputNodes = []; this.__inputsRequired = []; @@ -57,12 +56,10 @@ qx.Class.define("osparc.data.model.Node", { } this.set({ nodeId: nodeId || osparc.utils.Utils.uuidV4(), - key: metadata["key"], - version: metadata["version"], + key, + version, status: new osparc.data.model.NodeStatus(this) }); - - this.populateWithMetadata(); }, properties: { @@ -76,14 +73,12 @@ qx.Class.define("osparc.data.model.Node", { key: { check: "String", nullable: true, - apply: "__applyNewMetaData" }, version: { check: "String", nullable: true, event: "changeVersion", - apply: "__applyNewMetaData" }, nodeId: { @@ -91,6 +86,14 @@ qx.Class.define("osparc.data.model.Node", { nullable: false }, + metadata: { + check: "Object", + init: null, + nullable: false, + event: "changeMetadata", + apply: "__applyMetadata", + }, + label: { check: "String", init: "Node", @@ -98,11 +101,6 @@ qx.Class.define("osparc.data.model.Node", { event: "changeLabel" }, - inputAccess: { - check: "Object", - nullable: true - }, - dynamicV2: { check: "Boolean", init: false, @@ -171,7 +169,8 @@ qx.Class.define("osparc.data.model.Node", { check: "qx.core.Object", init: null, nullable: true, - event: "changeMarker" + event: "changeMarker", + apply: "__applyMarker", }, inputConnected: { @@ -197,10 +196,13 @@ qx.Class.define("osparc.data.model.Node", { }, events: { - "updateStudyDocument": "qx.event.type.Event", + "projectDocumentChanged": "qx.event.type.Data", "reloadModel": "qx.event.type.Event", "retrieveInputs": "qx.event.type.Data", "keyChanged": "qx.event.type.Event", + "changePosition": "qx.event.type.Data", + "edgeCreated": "qx.event.type.Data", + "edgeRemoved": "qx.event.type.Data", "fileRequested": "qx.event.type.Data", "parameterRequested": "qx.event.type.Data", "filePickerRequested": "qx.event.type.Data", @@ -213,6 +215,37 @@ qx.Class.define("osparc.data.model.Node", { }, statics: { + // Properties of the Node class that should not be listened to + ListenChangesProps: [ + // "study", // immutable + "key", + "version", + // "nodeId", // immutable + "label", + "inputs", // own listener + "inputsUnits", // own listener + // "dynamicV2", // frontend only + // "serviceUrl", // frontend only + // "portsConnected", // frontend only + "outputs", // listen to changes only if this is a frontend node + // "status", // backend driven + // "errors", // frontend only + "bootOptions", + // "propsForm", // frontend only + // "outputsForm", // frontend only + // "marker", // own listener + // "inputConnected", // frontend only + // "outputConnected", // frontend only + // "logger", // frontend only + "inputNodes", // !! not a property but goes into the model + "inputsRequired", // !! not a property but goes into the model + "progress", // !! not a property but goes into the model + ], + + getProperties: function() { + return Object.keys(qx.util.PropertyUtil.getProperties(osparc.data.model.Node)); + }, + isFrontend: function(metadata) { return (metadata && metadata.key && metadata.key.includes("/frontend/")); }, @@ -241,6 +274,10 @@ qx.Class.define("osparc.data.model.Node", { return (metadata && metadata.type && metadata.type === "computational"); }, + isUnknown: function(metadata) { + return (metadata && metadata.key && metadata.key === osparc.store.Services.UNKNOWN_SERVICE_KEY); + }, + isUpdatable: function(metadata) { return osparc.service.Utils.isUpdatable(metadata); }, @@ -322,7 +359,6 @@ qx.Class.define("osparc.data.model.Node", { }, members: { - __metaData: null, __inputNodes: null, __inputsRequired: null, __settingsForm: null, @@ -340,7 +376,7 @@ qx.Class.define("osparc.data.model.Node", { }, isInKey: function(str) { - if (this.getMetaData() === null) { + if (this.getMetadata() === null) { return false; } if (this.getKey() === null) { @@ -350,71 +386,66 @@ qx.Class.define("osparc.data.model.Node", { }, isFilePicker: function() { - return osparc.data.model.Node.isFilePicker(this.getMetaData()); + return osparc.data.model.Node.isFilePicker(this.getMetadata()); }, isParameter: function() { - return osparc.data.model.Node.isParameter(this.getMetaData()); + return osparc.data.model.Node.isParameter(this.getMetadata()); }, isIterator: function() { - return osparc.data.model.Node.isIterator(this.getMetaData()); + return osparc.data.model.Node.isIterator(this.getMetadata()); }, isProbe: function() { - return osparc.data.model.Node.isProbe(this.getMetaData()); + return osparc.data.model.Node.isProbe(this.getMetadata()); }, isDynamic: function() { - return osparc.data.model.Node.isDynamic(this.getMetaData()); + return osparc.data.model.Node.isDynamic(this.getMetadata()); }, isComputational: function() { - return osparc.data.model.Node.isComputational(this.getMetaData()); + return osparc.data.model.Node.isComputational(this.getMetadata()); + }, + + isUnknown: function() { + return osparc.data.model.Node.isUnknown(this.getMetadata()); }, isUpdatable: function() { - return osparc.data.model.Node.isUpdatable(this.getMetaData()); + return osparc.data.model.Node.isUpdatable(this.getMetadata()); }, isDeprecated: function() { - return osparc.data.model.Node.isDeprecated(this.getMetaData()); + return osparc.data.model.Node.isDeprecated(this.getMetadata()); }, isRetired: function() { - return osparc.data.model.Node.isRetired(this.getMetaData()); + return osparc.data.model.Node.isRetired(this.getMetadata()); }, hasBootModes: function() { - return osparc.data.model.Node.hasBootModes(this.getMetaData()); + return osparc.data.model.Node.hasBootModes(this.getMetadata()); }, getMinVisibleInputs: function() { - return osparc.data.model.Node.getMinVisibleInputs(this.getMetaData()); + return osparc.data.model.Node.getMinVisibleInputs(this.getMetadata()); }, - __applyNewMetaData: function(newV, oldV) { - if (oldV !== null) { - const metadata = osparc.store.Services.getMetadata(this.getKey(), this.getVersion()); - if (metadata) { - this.__metaData = metadata; - } - } - }, - - getMetaData: function() { - return this.__metaData; + hasPropsForm: function() { + return this.isPropertyInitialized("propsForm") && this.getPropsForm(); }, __getInputData: function() { - if (this.isPropertyInitialized("propsForm") && this.getPropsForm()) { + if (this.hasPropsForm()) { return this.getPropsForm().getValues(); } return {}; }, __getInputUnits: function() { - if (this.isPropertyInitialized("propsForm") && this.getPropsForm()) { + if (this.hasPropsForm()) { const changedUnits = this.getPropsForm().getChangedXUnits(); if (Object.keys(changedUnits).length) { return changedUnits; @@ -447,8 +478,27 @@ qx.Class.define("osparc.data.model.Node", { return Object.keys(this.getOutputs()).length; }, - populateWithMetadata: function() { - const metadata = this.__metaData; + fetchMetadataAndPopulate: function(nodeData, nodeUiData) { + this.__initNodeData = nodeData; + this.__initNodeUiData = nodeUiData; + return osparc.store.Services.getService(this.getKey(), this.getVersion()) + .then(serviceMetadata => { + this.setMetadata(serviceMetadata); + this.populateNodeData(nodeData); + // old place to store the position + this.populateNodeUIData(nodeData); + // new place to store the position and marker + this.populateNodeUIData(nodeUiData); + this.listenToChanges(); + }) + .catch(err => { + console.log(err); + const errorMsg = qx.locale.Manager.tr("Service metadata missing"); + osparc.FlashMessenger.logError(errorMsg); + }); + }, + + __applyMetadata: function(metadata) { if (metadata) { if (metadata.name) { this.setLabel(metadata.name); @@ -461,9 +511,13 @@ qx.Class.define("osparc.data.model.Node", { if (this.getPropsForm()) { this.getPropsForm().makeInputsDynamic(); } + } else { + this.setInputs({}); } if (metadata.outputs) { this.setOutputs(metadata.outputs); + } else { + this.setOutputs({}); } } }, @@ -473,17 +527,16 @@ qx.Class.define("osparc.data.model.Node", { if (nodeData.label) { this.setLabel(nodeData.label); } - this.populateInputOutputData(nodeData); - this.populateStates(nodeData); + this.__populateInputOutputData(nodeData); + this.populateProgress(nodeData); + this.populateState(nodeData); if (nodeData.bootOptions) { this.setBootOptions(nodeData.bootOptions); } } - if (this.isParameter()) { this.__initParameter(); } - if (osparc.store.Store.getInstance().getCurrentStudy()) { // do not initialize the logger and iframe if the study isn't open this.__initLogger(); @@ -492,19 +545,24 @@ qx.Class.define("osparc.data.model.Node", { }, populateNodeUIData: function(nodeUIData) { - if ("position" in nodeUIData) { - this.setPosition(nodeUIData.position); - } - if ("marker" in nodeUIData) { - this.__addMarker(nodeUIData.marker); + if (nodeUIData) { + if ("position" in nodeUIData) { + this.setPosition(nodeUIData.position); + } + if ("marker" in nodeUIData) { + this.addMarker(nodeUIData.marker); + } } }, - populateInputOutputData: function(nodeData) { + __populateInputOutputData: function(nodeData) { this.__setInputData(nodeData.inputs); this.__setInputUnits(nodeData.inputsUnits); - this.__setInputDataAccess(nodeData.inputAccess); if (this.getPropsForm()) { + const study = this.getStudy(); + if (study && study.isReadOnly()) { + this.getPropsForm().setEnabled(false); + } this.getPropsForm().makeInputsDynamic(); } this.setOutputData(nodeData.outputs); @@ -513,7 +571,7 @@ qx.Class.define("osparc.data.model.Node", { this.setInputsRequired(nodeData.inputsRequired || []); }, - populateStates: function(nodeData) { + populateProgress: function(nodeData) { if ("progress" in nodeData) { const progress = Number.parseInt(nodeData["progress"]); const oldProgress = this.getStatus().getProgress(); @@ -524,6 +582,9 @@ qx.Class.define("osparc.data.model.Node", { this.getStatus().setProgress(progress); } } + }, + + populateState: function(nodeData) { if ("state" in nodeData) { this.getStatus().setState(nodeData.state); } @@ -551,25 +612,12 @@ qx.Class.define("osparc.data.model.Node", { return this.getIframeHandler() ? this.getIframeHandler().getLoadingPage() : null; }, + getLockedPage: function() { + return this.getIframeHandler() ? this.getIframeHandler().getLockedPage() : null; + }, + __applyPropsForm: function(propsForm) { osparc.utils.Utils.setIdToWidget(propsForm, "settingsForm_" + this.getNodeId()); - - const checkIsPipelineRunning = () => { - const isPipelineRunning = this.getStudy().isPipelineRunning(); - this.getPropsForm().setEnabled(!isPipelineRunning); - }; - this.getStudy().addListener("changeState", () => checkIsPipelineRunning(), this); - - // potentially disabling the inputs form might have side effects if the deserialization is not over - if (this.getWorkbench().isDeserialized()) { - checkIsPipelineRunning(); - } else { - this.getWorkbench().addListener("changeDeserialized", e => { - if (e.getData()) { - checkIsPipelineRunning(); - } - }, this); - } }, /** @@ -651,11 +699,11 @@ qx.Class.define("osparc.data.model.Node", { if (this.getMarker()) { this.__removeMarker(); } else { - this.__addMarker(); + this.addMarker(); } }, - __addMarker: function(marker) { + addMarker: function(marker) { if (marker === undefined) { marker = { color: osparc.utils.Utils.getRandomColor() @@ -669,10 +717,35 @@ qx.Class.define("osparc.data.model.Node", { this.setMarker(null); }, + __applyMarker: function(marker) { + if (marker) { + this.fireDataEvent("projectDocumentChanged", { + "op": "add", + "path": `/ui/workbench/${this.getNodeId()}/marker`, + "value": marker.getColor(), + "osparc-resource": "ui", + }); + marker.addListener("changeColor", e => { + this.fireDataEvent("projectDocumentChanged", { + "op": "replace", + "path": `/ui/workbench/${this.getNodeId()}/marker`, + "value": e.getData(), + "osparc-resource": "ui", + }); + }); + } else { + this.fireDataEvent("projectDocumentChanged", { + "op": "remove", + "path": `/ui/workbench/${this.getNodeId()}/marker`, + "osparc-resource": "ui", + }); + } + }, + __setInputData: function(inputs) { if (this.__settingsForm && inputs) { - const inputData = {}; const inputLinks = {}; + const inputData = {}; const inputsCopy = osparc.utils.Utils.deepCloneObject(inputs); for (let key in inputsCopy) { if (osparc.utils.Ports.isDataALink(inputsCopy[key])) { @@ -692,18 +765,6 @@ qx.Class.define("osparc.data.model.Node", { } }, - __setInputDataAccess: function(inputAccess) { - if (inputAccess) { - this.setInputAccess(inputAccess); - this.getPropsForm().setAccessLevel(inputAccess); - } - - const study = this.getStudy(); - if (study && study.isReadOnly() && this.getPropsForm()) { - this.getPropsForm().setEnabled(false); - } - }, - setOutputData: function(outputs) { if (outputs) { let hasOutputs = false; @@ -733,7 +794,8 @@ qx.Class.define("osparc.data.model.Node", { this.getStatus().setModified(false); } - this.fireDataEvent("changeOutputs", this.getOutputs()); + // event was fired in the outputs setter + // this.fireDataEvent("changeOutputs", this.getOutputs()); } }, @@ -779,8 +841,8 @@ qx.Class.define("osparc.data.model.Node", { // errors to port if (loc.length > 2) { const portKey = loc[2]; - if (this.hasInputs() && portKey in this.getMetaData()["inputs"]) { - errorMsgData["msg"] = this.getMetaData()["inputs"][portKey]["label"] + ": " + errorMsgData["msg"]; + if (this.hasInputs() && portKey in this.getMetadata()["inputs"]) { + errorMsgData["msg"] = this.getMetadata()["inputs"][portKey]["label"] + ": " + errorMsgData["msg"]; } else { errorMsgData["msg"] = portKey + ": " + errorMsgData["msg"]; } @@ -793,7 +855,7 @@ qx.Class.define("osparc.data.model.Node", { }); } else if (this.hasInputs()) { // reset port errors - Object.keys(this.getMetaData()["inputs"]).forEach(portKey => { + Object.keys(this.getMetadata()["inputs"]).forEach(portKey => { this.getPropsForm().setPortErrorMessage(portKey, null); }); } @@ -806,21 +868,35 @@ qx.Class.define("osparc.data.model.Node", { return; } - // create automatic port connections - let autoConnections = 0; - const outPorts = node1.getOutputs(); - const inPorts = node2.getInputs(); - for (const outPort in outPorts) { - for (const inPort in inPorts) { - if (await node2.addPortLink(inPort, node1.getNodeId(), outPort)) { - autoConnections++; - break; + const autoConnectPorts = async () => { + // create automatic port connections + let autoConnections = 0; + const outPorts = node1.getOutputs(); + const inPorts = node2.getInputs(); + for (const outPort in outPorts) { + for (const inPort in inPorts) { + if (await node2.addPortLink(inPort, node1.getNodeId(), outPort)) { + autoConnections++; + break; + } } } + if (autoConnections) { + const flashMessenger = osparc.FlashMessenger.getInstance(); + flashMessenger.logAs(autoConnections + this.tr(" ports auto connected"), "INFO"); + } } - if (autoConnections) { - const flashMessenger = osparc.FlashMessenger.getInstance(); - flashMessenger.logAs(autoConnections + this.tr(" ports auto connected"), "INFO"); + if (node1.getMetadata() && node2.getMetadata()) { + autoConnectPorts(); + } else { + // wait for both metadata to be loaded + const onMetadataChanged = () => { + if (node1.getMetadata() && node2.getMetadata()) { + autoConnectPorts(); + } + }; + node1.addListenerOnce("changeMetadata", onMetadataChanged, this); + node2.addListenerOnce("changeMetadata", onMetadataChanged, this); } }, @@ -877,13 +953,14 @@ qx.Class.define("osparc.data.model.Node", { removeInputNode: function(inputNodeId) { const index = this.__inputNodes.indexOf(inputNodeId); - if (index > -1) { - // remove node connection - this.__inputNodes.splice(index, 1); - this.fireDataEvent("changeInputNodes"); - return true; + // make sure index is valid + if (index < 0 || index >= this.__inputNodes.length) { + return false; } - return false; + // remove node connection + this.__inputNodes.splice(index, 1); + this.fireEvent("changeInputNodes"); + return true; }, isInputNode: function(inputNodeId) { @@ -1082,7 +1159,7 @@ qx.Class.define("osparc.data.model.Node", { checkState: function() { if (this.isDynamic()) { - const metadata = this.getMetaData(); + const metadata = this.getMetadata(); const msg = "Starting " + metadata.key + ":" + metadata.version + "..."; const msgData = { nodeId: this.getNodeId(), @@ -1101,7 +1178,7 @@ qx.Class.define("osparc.data.model.Node", { stopDynamicService: function() { if (this.isDynamic()) { - const metadata = this.getMetaData(); + const metadata = this.getMetadata(); const msg = "Stopping " + metadata.key + ":" + metadata.version + "..."; const msgData = { nodeId: this.getNodeId(), @@ -1153,7 +1230,7 @@ qx.Class.define("osparc.data.model.Node", { this.__deleteInBackend() .then(() => { resolve(true); - this.removeIFrame(); + this.nodeRemoved(); }) .catch(err => { console.error(err); @@ -1162,6 +1239,10 @@ qx.Class.define("osparc.data.model.Node", { }); }, + nodeRemoved: function() { + this.removeIFrame(); + }, + __deleteInBackend: function() { // remove node in the backend const params = { @@ -1186,13 +1267,30 @@ qx.Class.define("osparc.data.model.Node", { }, setPosition: function(pos) { - const { - x, - y - } = pos; + const {x, y} = pos; + if (x === this.__posX && y === this.__posY) { + return; // no change + } + // keep positions positive this.__posX = parseInt(x) < 0 ? 0 : parseInt(x); this.__posY = parseInt(y) < 0 ? 0 : parseInt(y); + + const nodeId = this.getNodeId(); + this.fireDataEvent("projectDocumentChanged", { + "op": "replace", + "path": `/ui/workbench/${nodeId}/position`, + "value": { + "x": this.__posX, + "y": this.__posY, + }, + "osparc-resource": "ui", + }); + + this.fireDataEvent("changePosition", { + x: this.__posX, + y: this.__posY + }); }, getPosition: function() { @@ -1211,8 +1309,11 @@ qx.Class.define("osparc.data.model.Node", { if (newMetadata) { const value = this.__getInputData()["linspace_start"]; const label = this.getLabel(); - this.setKey(newMetadata["key"]); - this.populateWithMetadata(); + this.set({ + key: newMetadata["key"], + version: newMetadata["version"], + }); + this.setMetadata(newMetadata); this.populateNodeData(); this.setLabel(label); osparc.node.ParameterEditor.setParameterOutputValue(this, value); @@ -1224,12 +1325,15 @@ qx.Class.define("osparc.data.model.Node", { if (!["int"].includes(type)) { return; } - const metadata = osparc.store.Services.getLatest("simcore/services/frontend/data-iterator/int-range") - if (metadata) { + const newMetadata = osparc.store.Services.getLatest("simcore/services/frontend/data-iterator/int-range") + if (newMetadata) { const value = this.__getOutputData("out_1"); const label = this.getLabel(); - this.setKey(metadata["key"]); - this.populateWithMetadata(); + this.set({ + key: newMetadata["key"], + version: newMetadata["version"], + }); + this.setMetadata(newMetadata); this.populateNodeData(); this.setLabel(label); this.__setInputData({ @@ -1241,24 +1345,209 @@ qx.Class.define("osparc.data.model.Node", { } }, - serialize: function(clean = true) { + listenToChanges: function() { + const nodeId = this.getNodeId(); + const nodePropertyKeys = this.self().getProperties(); + this.self().ListenChangesProps.forEach(key => { + switch (key) { + case "inputs": + if (this.hasPropsForm()) { + // listen to changes in the props form + this.getPropsForm().addListener("changeData", () => { + const data = this.__getInputData(); + this.fireDataEvent("projectDocumentChanged", { + "op": "replace", + "path": `/workbench/${nodeId}/inputs`, + "value": data, + "osparc-resource": "node", + }); + }); + // listen to changes in link and unlink of ports + this.getPropsForm().addListener("linkFieldModified", () => { + const data = this.__getInputData(); + this.fireDataEvent("projectDocumentChanged", { + "op": "replace", + "path": `/workbench/${nodeId}/inputs`, + "value": data, + "osparc-resource": "node", + }); + }); + } + break; + case "inputsUnits": + if (this.hasPropsForm()) { + this.getPropsForm().addListener("unitChanged", () => { + const data = this.__getInputUnits(); + this.fireDataEvent("projectDocumentChanged", { + "op": "replace", + "path": `/workbench/${nodeId}/inputsUnits`, + "value": data, + "osparc-resource": "node", + }); + }); + } + break; + case "inputNodes": + this.addListener("changeInputNodes", () => { + const data = this.getInputNodes(); + this.fireDataEvent("projectDocumentChanged", { + "op": "replace", + "path": `/workbench/${nodeId}/inputNodes`, + "value": data, + "osparc-resource": "node", + }); + }, this); + break; + case "inputsRequired": + this.addListener("changeInputsRequired", () => { + const data = this.getInputsRequired(); + this.fireDataEvent("projectDocumentChanged", { + "op": "replace", + "path": `/workbench/${nodeId}/inputsRequired`, + "value": data, + "osparc-resource": "node", + }); + }, this); + break; + case "progress": + if (this.isFilePicker()) { + this.getStatus().addListener("changeProgress", e => { + const data = e.getData(); + this.fireDataEvent("projectDocumentChanged", { + "op": "replace", + "path": `/workbench/${nodeId}/progress`, + "value": data, + "osparc-resource": "node", + }); + }); + } + break; + case "outputs": + if (this.isFilePicker() || this.isParameter()) { + this.addListener("changeOutputs", e => { + let data = e.getData(); + if (this.isFilePicker()) { + data = osparc.file.FilePicker.serializeOutput(this.getOutputs()); + } else if (this.isParameter()) { + data = this.__getOutputsData(); + } + this.fireDataEvent("projectDocumentChanged", { + "op": "replace", + "path": `/workbench/${nodeId}/outputs`, + "value": data, + "osparc-resource": "node", + }); + }, this); + } + break; + default: + if (nodePropertyKeys.includes(key)) { + this.addListener("change" + qx.lang.String.firstUp(key), e => { + const data = e.getData(); + this.fireDataEvent("projectDocumentChanged", { + "op": "replace", + "path": `/workbench/${nodeId}/` + key, + "value": data, + "osparc-resource": "node", + }); + }, this); + } else { + console.error(`Property "${key}" is not a valid property of osparc.data.model.Node`); + } + break; + } + }); + }, + + updateNodeFromPatch: function(nodePatches) { + const nodePropertyKeys = this.self().getProperties(); + nodePatches.forEach(patch => { + const op = patch.op; + const path = patch.path; + const value = patch.value; + const nodeProperty = path.split("/")[3]; + switch (nodeProperty) { + case "inputs": { + const updatedPortKey = path.split("/")[4]; + const currentInputs = this.__getInputData(); + if (osparc.utils.Ports.isDataALink(currentInputs[updatedPortKey])) { + // if the port is a link, we remove it from the props form + this.getPropsForm().removePortLink(updatedPortKey); + } + currentInputs[updatedPortKey] = value; + this.__setInputData(currentInputs); + break; + } + case "inputsUnits": { + // this is never transmitted by the frontend + const updatedPortKey = path.split("/")[4]; + const currentInputUnits = this.__getInputUnits() || {}; + currentInputUnits[updatedPortKey] = value; + this.__setInputUnits(currentInputUnits); + break; + } + case "inputNodes": + if (op === "add") { + const inputNodeId = value; + this.fireDataEvent("edgeCreated", { + nodeId1: inputNodeId, + nodeId2: this.getNodeId(), + }); + } else if (op === "remove") { + // we don't have more information about the input node, so we just remove it by index + const index = path.split("/")[4]; + // make sure index is valid + if (index >= 0 && index < this.__inputNodes.length) { + this.fireDataEvent("edgeRemoved", { + nodeId1: this.__inputNodes[index], + nodeId2: this.getNodeId(), + }); + } + } + break; + case "inputsRequired": + console.warn(`To be implemented: patching ${nodeProperty} is not supported yet`); + break; + case "outputs": { + const updatedPortKey = path.split("/")[4]; + const currentOutputs = this.isFilePicker() ? osparc.file.FilePicker.serializeOutput(this.getOutputs()) : this.__getOutputsData(); + currentOutputs[updatedPortKey] = value; + this.setOutputData(currentOutputs); + break; + } + case "progress": + if (this.isFilePicker()) { + this.getStatus().setProgress(value); + } else { + console.warn(`To be implemented: patching ${nodeProperty} is not supported yet`); + } + break; + default: + if (nodePropertyKeys.includes(nodeProperty)) { + const setter = "set" + qx.lang.String.firstUp(nodeProperty); + if (this[setter]) { + this[setter](value); + } else { + console.warn(`Property "${nodeProperty}" does not have a setter in osparc.data.model.Node`); + } + } + break; + } + }); + }, + + serialize: function() { // node generic let nodeEntry = { key: this.getKey(), version: this.getVersion(), label: this.getLabel(), inputs: this.__getInputData(), - inputsUnits: this.__getInputUnits(), - inputAccess: this.getInputAccess(), + inputsUnits: this.__getInputUnits(), // this is not working inputNodes: this.getInputNodes(), inputsRequired: this.getInputsRequired(), bootOptions: this.getBootOptions() }; - if (!clean) { - nodeEntry.progress = this.getStatus().getProgress(); - nodeEntry.outputs = this.__getOutputsData(); - nodeEntry.state = this.getStatus().serialize(); - } if (this.isFilePicker()) { nodeEntry.outputs = osparc.file.FilePicker.serializeOutput(this.getOutputs()); @@ -1276,6 +1565,18 @@ qx.Class.define("osparc.data.model.Node", { } return filteredNodeEntry; - } + }, + + serializeUI: function() { + const uiInfo = {} + uiInfo["position"] = this.getPosition(); + const marker = this.getMarker(); + if (marker) { + uiInfo["marker"] = { + "color": marker.getColor() + }; + } + return uiInfo; + }, } }); diff --git a/services/static-webserver/client/source/class/osparc/data/model/NodeLockState.js b/services/static-webserver/client/source/class/osparc/data/model/NodeLockState.js new file mode 100644 index 000000000000..c14add8bc80a --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/data/model/NodeLockState.js @@ -0,0 +1,73 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.data.model.NodeLockState", { + extend: qx.core.Object, + + construct: function() { + this.base(arguments); + + this.initCurrentUserGroupIds(); + this.initLocked(); + this.initStatus(); + }, + + properties: { + currentUserGroupIds: { + check: "Array", + init: [], + nullable: false, + event: "changeCurrentUserGroupIds", + }, + + locked: { + check: "Boolean", + init: false, + nullable: false, + event: "changeLocked", + }, + + status: { + // check: ["NOT_STARTED", "STARTED", "OPENED"], + check: "String", + init: "NOT_STARTED", + nullable: false, + event: "changeStatus", + } + }, + + members: { + stateReceived: function(state) { + if (state) { + this.set({ + currentUserGroupIds: "current_user_groupids" in state ? state["current_user_groupids"] : [], + locked: "locked" in state ? state["locked"] : false, + status: "status" in state ? state["status"] : "NOT_STARTED", + }); + } + }, + + isLockedBySomeoneElse: function() { + if (this.isLocked()) { + const currentUserGroupIds = this.getCurrentUserGroupIds(); + const myGroupId = osparc.auth.Data.getInstance().getGroupId(); + return !currentUserGroupIds.includes(myGroupId); + } + return false; + } + } +}); diff --git a/services/static-webserver/client/source/class/osparc/data/model/NodeProgressSequence.js b/services/static-webserver/client/source/class/osparc/data/model/NodeProgressSequence.js index bb67236294e7..56ee50c4a2a5 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/NodeProgressSequence.js +++ b/services/static-webserver/client/source/class/osparc/data/model/NodeProgressSequence.js @@ -229,7 +229,10 @@ qx.Class.define("osparc.data.model.NodeProgressSequence", { }, __initLayout: function() { - this.__mainLoadingPage = new qx.ui.container.Composite(new qx.ui.layout.VBox(8)); + this.__mainLoadingPage = new qx.ui.container.Composite(new qx.ui.layout.VBox(8)).set({ + maxWidth: 400, + decorator: "rounded", + }); const sequenceLoadingPage = new osparc.widget.ProgressSequence(qx.locale.Manager.tr("LOADING ...")); const nTasks = 7; diff --git a/services/static-webserver/client/source/class/osparc/data/model/NodeStatus.js b/services/static-webserver/client/source/class/osparc/data/model/NodeStatus.js index 9f6259e2b376..ff58b7ddd827 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/NodeStatus.js +++ b/services/static-webserver/client/source/class/osparc/data/model/NodeStatus.js @@ -26,19 +26,18 @@ qx.Class.define("osparc.data.model.NodeStatus", { construct: function(node) { this.base(arguments); - this.setNode(node); + const lockState = new osparc.data.model.NodeLockState(); + this.setLockState(lockState); - if (node.isDynamic()) { - const progressSequence = new osparc.data.model.NodeProgressSequence(); - this.setProgressSequence(progressSequence); - } + this.setNode(node); }, properties: { node: { check: "osparc.data.model.Node", init: null, - nullable: false + nullable: false, + apply: "__applyNode", }, progress: { @@ -97,6 +96,13 @@ qx.Class.define("osparc.data.model.NodeStatus", { hasOutputs: { check: "Boolean", init: false + }, + + lockState: { + check: "osparc.data.model.NodeLockState", + nullable: true, + init: null, + event: "changeLockState" } }, @@ -142,6 +148,20 @@ qx.Class.define("osparc.data.model.NodeStatus", { }, members: { + __applyNode: function(node) { + const initNode = () => { + if (node.isDynamic()) { + const progressSequence = new osparc.data.model.NodeProgressSequence(); + this.setProgressSequence(progressSequence); + } + }; + if (node.getMetadata()) { + initNode(); + } else { + node.addListenerOnce("changeMetadata", () => initNode(), this); + } + }, + __transformProgress: function(value) { const oldP = this.getProgress(); if (this.getNode().isFilePicker() && oldP === 100 && value !== 0 && value !== 100) { @@ -173,12 +193,11 @@ qx.Class.define("osparc.data.model.NodeStatus", { const compRunning = this.getRunning(); const hasOutputs = this.getHasOutputs(); const modified = this.getModified(); - const hasDependencies = this.hasDependencies(); if (["PUBLISHED", "PENDING", "WAITING_FOR_RESOURCES", "WAITING_FOR_CLUSTER", "STARTED"].includes(compRunning)) { this.setOutput("busy"); } else if ([null, false].includes(hasOutputs)) { this.setOutput("not-available"); - } else if (hasOutputs && (modified || hasDependencies)) { + } else if (hasOutputs && modified) { this.setOutput("out-of-date"); } else if (hasOutputs && !modified) { this.setOutput("up-to-date"); @@ -198,27 +217,10 @@ qx.Class.define("osparc.data.model.NodeStatus", { // currentStatus is only applicable to computational services this.setRunning(state.currentStatus); } - if ("modified" in state) { - if (this.getHasOutputs()) { - // File Picker can't have a modified output - this.setModified((state.modified || this.hasDependencies()) && !this.getNode().isFilePicker()); - } else { - this.setModified(null); - } + this.setModified("modified" in state ? state.modified : null); + if ("lock_state" in state) { + this.getLockState().stateReceived(state.lock_state); } }, - - serialize: function() { - const state = {}; - state["dependencies"] = this.getDependencies() ? this.getDependencies() : []; - if (this.getNode().isComputational()) { - state["currentStatus"] = this.getRunning(); - } - state["modified"] = null; - // File Picker can't have a modified output - if (this.getHasOutputs() && !this.getNode().isFilePicker()) { - state["modified"] = this.hasDependencies(); - } - } } }); diff --git a/services/static-webserver/client/source/class/osparc/data/model/NodeUnknown.js b/services/static-webserver/client/source/class/osparc/data/model/NodeUnknown.js new file mode 100644 index 000000000000..8eae34e51fd3 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/data/model/NodeUnknown.js @@ -0,0 +1,57 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +/** + * Class that stores Node data without a known metadata. + */ + +qx.Class.define("osparc.data.model.NodeUnknown", { + extend: osparc.data.model.Node, + + /** + * @param study {osparc.data.model.Study} Study or Serialized Study Object + * @param key {String} service's key + * @param version {String} service's version + * @param nodeId {String} uuid of the service represented by the node (not needed for new Nodes) + */ + construct: function(study, key, version, nodeId) { + // use the unknown metadata + const metadata = osparc.store.Services.getUnknownServiceMetadata(); + this.base(arguments, study, metadata, nodeId); + + // but keep the original key and version + if (key && version) { + this.set({ + key, + version, + }); + } + }, + + members: { + // override + serialize: function() { + /* + if (this.getKey() === osparc.store.Services.UNKNOWN_SERVICE_KEY) { + return null; + } + */ + + return null; + } + } +}); diff --git a/services/static-webserver/client/source/class/osparc/data/model/Service.js b/services/static-webserver/client/source/class/osparc/data/model/Service.js index d7f37db2e00f..b278e3e253fa 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/Service.js +++ b/services/static-webserver/client/source/class/osparc/data/model/Service.js @@ -31,18 +31,18 @@ qx.Class.define("osparc.data.model.Service", { this.set({ key: serviceData.key, version: serviceData.version, - versionDisplay: serviceData.versionDisplay, + versionDisplay: serviceData.versionDisplay || null, name: serviceData.name, - description: serviceData.description, - thumbnail: serviceData.thumbnail, - serviceType: serviceData.type, - contact: serviceData.contact, - authors: serviceData.authors, - owner: serviceData.owner || "", + description: serviceData.description || null, + thumbnail: serviceData.thumbnail || null, + serviceType: serviceData.type || null, + contact: serviceData.contact || null, + authors: serviceData.authors || null, + owner: serviceData.owner || null, accessRights: serviceData.accessRights, - bootOptions: serviceData.bootOptions, + bootOptions: serviceData.bootOptions || null, classifiers: serviceData.classifiers || [], - quality: serviceData.quality || null, + quality: serviceData.quality || {}, xType: serviceData.xType || null, hits: serviceData.hits || 0, }); @@ -147,7 +147,6 @@ qx.Class.define("osparc.data.model.Service", { nullable: false }, - // ------ ignore for serializing ------ xType: { check: "String", nullable: true, @@ -160,30 +159,15 @@ qx.Class.define("osparc.data.model.Service", { init: 0, event: "changeHits", nullable: false - } - // ------ ignore for serializing ------ + }, }, statics: { - IgnoreSerializationProps: [ - "xType", - "hits", - ] + canIWrite: function(serviceAccessRights) { + const groupsStore = osparc.store.Groups.getInstance(); + const orgIDs = groupsStore.getOrganizationIds(); + orgIDs.push(groupsStore.getMyGroupId()); + return osparc.share.CollaboratorsService.canGroupsWrite(serviceAccessRights, orgIDs); + }, }, - - members: { - __serviceData: null, - - serialize: function() { - let jsonObject = {}; - const propertyKeys = this.self().getProperties(); - propertyKeys.forEach(key => { - if (this.self().IgnoreSerializationProps.includes(key)) { - return; - } - jsonObject[key] = this.get(key); - }); - return jsonObject; - }, - } }); diff --git a/services/static-webserver/client/source/class/osparc/data/model/Study.js b/services/static-webserver/client/source/class/osparc/data/model/Study.js index 95a8eba42ab9..081924b2a58c 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/Study.js +++ b/services/static-webserver/client/source/class/osparc/data/model/Study.js @@ -44,8 +44,8 @@ qx.Class.define("osparc.data.model.Study", { this.set({ uuid: studyData.uuid || this.getUuid(), - workspaceId: studyData.workspaceId || null, - folderId: studyData.folderId || null, + workspaceId: studyData.workspaceId || this.getWorkspaceId(), + folderId: studyData.folderId || this.getFolderId(), name: studyData.name || this.getName(), description: studyData.description || this.getDescription(), thumbnail: studyData.thumbnail || this.getThumbnail(), @@ -60,9 +60,9 @@ qx.Class.define("osparc.data.model.Study", { permalink: studyData.permalink || this.getPermalink(), dev: studyData.dev || this.getDev(), trashedAt: studyData.trashedAt ? new Date(studyData.trashedAt) : this.getTrashedAt(), - trashedBy: studyData.trashedBy || null, - type: studyData.type, - templateType: studyData.templateType, + trashedBy: studyData.trashedBy || this.getTrashedBy(), + type: studyData.type || this.getType(), + templateType: studyData.templateType || this.getTemplateType(), }); const wbData = studyData.workbench || this.getWorkbench(); @@ -70,8 +70,8 @@ qx.Class.define("osparc.data.model.Study", { this.setWorkbench(workbench); workbench.setStudy(this); - const workbenchUi = new osparc.data.model.StudyUI(studyData.ui); - this.setUi(workbenchUi); + const studyUI = new osparc.data.model.StudyUI(studyData.ui); + this.setUi(studyUI); this.getWorkbench().buildWorkbench(); }, @@ -143,7 +143,7 @@ qx.Class.define("osparc.data.model.Study", { thumbnail: { check: "String", - nullable: false, + nullable: true, event: "changeThumbnail", init: "" }, @@ -215,7 +215,6 @@ qx.Class.define("osparc.data.model.Study", { event: "changeTemplateType" }, - // ------ ignore for serializing ------ state: { check: "Object", nullable: true, @@ -249,20 +248,58 @@ qx.Class.define("osparc.data.model.Study", { init: null, event: "changeTrashedBy", }, - // ------ ignore for serializing ------ + + savePending: { + check: "Boolean", + nullable: true, + event: "changeSavePending", + init: false + }, + }, + + events: { + "projectDocumentChanged": "qx.event.type.Data", }, statics: { + // Properties of the Study class that should not be listened to + ListenChangesProps: [ + // "uuid", // immutable + // "workspaceId", // own patch + // "folderId", // own patch + "name", + "description", + // "prjOwner", // immutable + // "accessRights", // own patch + // "creationDate", // immutable + // "lastChangeDate", // backend sets it + "thumbnail", + "workbench", // own listener + "ui", // own listener + // "tags", // own patch + // "classifiers", // own patch + // "quality", // own patch + // "permalink", // backend sets it + "dev", + // "type", // immutable + "templateType", + // "state", // backend sets it + // "pipelineRunning", // backend sets it + // "readOnly", // frontend only + // "trashedAt", // backend sets it + // "trashedBy", // backend sets it + // "savePending", // frontend only + ], + + // Properties of the Study class that should not be serialized + // when serializing the study object to send it to the backend IgnoreSerializationProps: [ "permalink", "state", "pipelineRunning", "readOnly", "trashedAt", - ], - - IgnoreModelizationProps: [ - "dev" + "savePending", ], OwnPatch: [ @@ -353,21 +390,46 @@ qx.Class.define("osparc.data.model.Study", { } return overallProgress/nCompNodes; }, - - isRunning: function(state) { - return [ - "PUBLISHED", - "PENDING", - "WAITING_FOR_RESOURCES", - "WAITING_FOR_CLUSTER", - "STARTED", - "RETRY" - ].includes(state); - }, }, members: { - serialize: function(clean = true) { + listenToChanges: function() { + const propertyKeys = this.self().getProperties(); + this.self().ListenChangesProps.forEach(key => { + switch (key) { + case "workbench": + this.getWorkbench().addListener("projectDocumentChanged", e => { + const data = e.getData(); + this.fireDataEvent("projectDocumentChanged", data); + }, this); + break; + case "ui": + this.getUi().listenToChanges(); + this.getUi().addListener("projectDocumentChanged", e => { + const data = e.getData(); + this.fireDataEvent("projectDocumentChanged", data); + }, this); + break; + default: + if (propertyKeys.includes(key)) { + this.addListener("change" + qx.lang.String.firstUp(key), e => { + const data = e.getData(); + this.fireDataEvent("projectDocumentChanged", { + "op": "replace", + "path": "/" + key, + "value": data, + "osparc-resource": "study", + }); + }, this); + } else { + console.error(`Property "${key}" is not a valid property of osparc.data.model.Study`); + } + break; + } + }); + }, + + serialize: function() { let jsonObject = {}; const propertyKeys = this.self().getProperties(); propertyKeys.forEach(key => { @@ -375,7 +437,7 @@ qx.Class.define("osparc.data.model.Study", { return; } if (key === "workbench") { - jsonObject[key] = this.getWorkbench().serialize(clean); + jsonObject[key] = this.getWorkbench().serialize(); return; } if (key === "ui") { @@ -511,19 +573,18 @@ qx.Class.define("osparc.data.model.Study", { // Do not listen to output related backend updates if the node is a frontend node. // The frontend controls its output values, progress and states. // If a File Picker is uploading a file, the backend could override the current state with some older state. - if (node && nodeData && !osparc.data.model.Node.isFrontend(node.getMetaData())) { - node.setOutputData(nodeData.outputs); - if ("progress" in nodeData) { - const progress = Number.parseInt(nodeData["progress"]); - node.getStatus().setProgress(progress); + if (node) { + if (nodeData && !osparc.data.model.Node.isFrontend(node.getMetadata())) { + node.setOutputData(nodeData.outputs); + node.populateProgress(nodeData); + node.populateState(nodeData); + } + if ("errors" in nodeUpdatedData) { + const errors = nodeUpdatedData["errors"]; + node.setErrors(errors); + } else { + node.setErrors([]); } - node.populateStates(nodeData); - } - if (node && "errors" in nodeUpdatedData) { - const errors = nodeUpdatedData["errors"]; - node.setErrors(errors); - } else { - node.setErrors([]); } }, @@ -543,8 +604,8 @@ qx.Class.define("osparc.data.model.Study", { }, isLocked: function() { - if (this.getState() && "locked" in this.getState()) { - return this.getState()["locked"]["value"]; + if (this.getState()) { + return osparc.study.Utils.state.isProjectLocked(this.getState()); } return false; }, @@ -567,33 +628,21 @@ qx.Class.define("osparc.data.model.Study", { }, __applyState: function(value) { - if (value && "state" in value) { - const isRunning = this.self().isRunning(value["state"]["value"]); - this.setPipelineRunning(isRunning); - } else { - this.setPipelineRunning(false); - } + this.setPipelineRunning(osparc.study.Utils.state.isPipelineRunning(value)); }, getDisableServiceAutoStart: function() { if ("disableServiceAutoStart" in this.getDev()) { return this.getDev()["disableServiceAutoStart"]; } - return null; + return false; }, openStudy: function() { - const params = { - url: { - "studyId": this.getUuid() - }, - data: osparc.utils.Utils.getClientSessionID() - }; - if (this.getDisableServiceAutoStart() !== null) { - params["url"]["disableServiceAutoStart"] = this.getDisableServiceAutoStart(); - return osparc.data.Resources.fetch("studies", "openDisableAutoStart", params); + if (this.getDisableServiceAutoStart()) { + return osparc.store.Study.getInstance().openStudy(this.getUuid(), false); } - return osparc.data.Resources.fetch("studies", "open", params); + return osparc.store.Study.getInstance().openStudy(this.getUuid()); }, stopStudy: function() { @@ -672,13 +721,7 @@ qx.Class.define("osparc.data.model.Study", { } return new Promise((resolve, reject) => { - const params = { - url: { - "studyId": this.getUuid() - }, - data: studyChanges - }; - osparc.data.Resources.fetch("studies", "patch", params) + osparc.store.Study.getInstance().patchStudy(this.getUuid(), studyChanges) .then(() => { Object.keys(studyChanges).forEach(fieldKey => { const upKey = qx.lang.String.firstUp(fieldKey); @@ -699,44 +742,80 @@ qx.Class.define("osparc.data.model.Study", { /** * Call patch Study, but the changes were already applied on the frontend * @param studyDiffs {Object} Diff Object coming from the JsonDiffPatch lib. Use only the keys, not the changes. + * @param studySource {Object} Study object that was used to check the diffs on the frontend. */ - patchStudyDelayed: function(studyDiffs) { + patchStudyDiffs: function(studyDiffs, studySource) { const promises = []; - let workbenchDiffs = {}; if ("workbench" in studyDiffs) { - workbenchDiffs = studyDiffs["workbench"]; - promises.push(this.getWorkbench().patchWorkbenchDelayed(workbenchDiffs)); + promises.push(this.getWorkbench().patchWorkbenchDiffs(studyDiffs["workbench"], studySource["workbench"])); delete studyDiffs["workbench"]; } - const fieldKeys = Object.keys(studyDiffs); - if (fieldKeys.length) { - const patchData = {}; - const params = { - url: { - "studyId": this.getUuid() - }, - data: patchData - }; - fieldKeys.forEach(fieldKey => { - if (fieldKey === "ui") { - patchData[fieldKey] = this.getUi().serialize(); + const changedFields = Object.keys(studyDiffs); + if (changedFields.length) { + changedFields.forEach(changedField => { + // OM: can this be called all together? + const patchData = {}; + if (changedField === "ui") { + patchData[changedField] = this.getUi().serialize(); } else { - const upKey = qx.lang.String.firstUp(fieldKey); + const upKey = qx.lang.String.firstUp(changedField); const getter = "get" + upKey; - patchData[fieldKey] = this[getter](studyDiffs[fieldKey]); + patchData[changedField] = this[getter](studyDiffs[changedField]); } - promises.push(osparc.data.Resources.fetch("studies", "patch", params)) + promises.push(osparc.store.Study.getInstance().patchStudy(this.getUuid(), patchData)) }); } return Promise.all(promises) .then(() => { - // A bit hacky, but it's not sent back to the backend - this.set({ - lastChangeDate: new Date() - }); - const studyData = this.serialize(); - return studyData; + return studySource; }); - } + }, + + // unused in favor of updateStudyFromPatches + updateStudyFromDiff: function(studyDiffs) { + const studyPropertyKeys = this.self().getProperties(); + studyPropertyKeys.forEach(studyPropertyKey => { + if (studyPropertyKey in studyDiffs) { + const newValue = studyDiffs[studyPropertyKey][1]; + if ("lastChangeDate" === studyPropertyKey) { + this.setLastChangeDate(new Date(newValue)); + } else { + const upKey = qx.lang.String.firstUp(studyPropertyKey); + const setter = "set" + upKey; + this[setter](newValue); + } + delete studyDiffs[studyPropertyKey]; + } + }); + }, + + // json PATCHES + updateStudyFromPatches: function(studyPatches) { + const studyPropertyKeys = this.self().getProperties(); + studyPatches.forEach(patch => { + const op = patch.op; + const path = patch.path; + const value = patch.value; + switch (op) { + case "replace": + const studyProperty = path.substring(1); // remove the leading "/" + if (studyPropertyKeys.includes(studyProperty)) { + if (path === "/lastChangeDate") { + this.setLastChangeDate(new Date(value)); + } else { + const setter = "set" + qx.lang.String.firstUp(studyProperty); + if (this[setter]) { + this[setter](value); + } else { + console.warn(`Property "${studyProperty}" does not have a setter in osparc.data.model.Study`); + } + } + } + break; + default: + console.warn(`Unhandled patch operation "${op}" for path "${path}" with value "${value}"`); + } + }); + }, } }); diff --git a/services/static-webserver/client/source/class/osparc/data/model/StudyUI.js b/services/static-webserver/client/source/class/osparc/data/model/StudyUI.js index 7bf1d260e76f..c3a13bdd0fb4 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/StudyUI.js +++ b/services/static-webserver/client/source/class/osparc/data/model/StudyUI.js @@ -34,12 +34,22 @@ qx.Class.define("osparc.data.model.StudyUI", { currentNodeId: studyDataUI && studyDataUI.currentNodeId ? studyDataUI.currentNodeId : this.initCurrentNodeId(), mode: studyDataUI && studyDataUI.mode ? studyDataUI.mode : this.initMode(), annotations: {}, - templateType: studyDataUI && studyDataUI.templateType ? studyDataUI.templateType : null, }); - if ("annotations" in studyDataUI) { - this.__annotationsInitData = studyDataUI["annotations"]; + if (studyDataUI["annotations"]) { + Object.entries(studyDataUI["annotations"]).forEach(([annotationId, annotationData]) => { + this.addAnnotation(annotationData, annotationId); + }); } + + this.getSlideshow().addListener("changeSlideshow", () => { + this.fireDataEvent("projectDocumentChanged", { + "op": "replace", + "path": "/ui/slideshow", + "value": this.getSlideshow().serialize(), + "osparc-resource": "study-ui", + }); + }, this); }, properties: { @@ -81,13 +91,12 @@ qx.Class.define("osparc.data.model.StudyUI", { init: {}, nullable: true }, + }, - templateType: { - check: [null, "hypertool", "tutorial", "template"], - init: null, - nullable: true, - event: "changeTemplateType", - }, + events: { + "projectDocumentChanged": "qx.event.type.Data", + "annotationAdded": "qx.event.type.Data", + "annotationRemoved": "qx.event.type.Data", }, statics: { @@ -96,31 +105,51 @@ qx.Class.define("osparc.data.model.StudyUI", { HYPERTOOL_TYPE: "HYPERTOOL", HYPERTOOL_ICON: "https://raw.githubusercontent.com/ZurichMedTech/s4l-assets/refs/heads/main/app/icons/hypertool.png", PIPELINE_ICON: "https://raw.githubusercontent.com/ZurichMedTech/s4l-assets/refs/heads/main/app/icons/diagram.png", + + ListenChangesProps: [ + // "workbench", it's handled by osparc.data.model.Workbench + "slideshow", + "currentNodeId", // eventually don't patch it, it is personal, only the last closing sets it + "mode", // eventually don't patch it, it is personal, only the last closing sets it + "annotations", // TODO + ], }, members: { - __annotationsInitData: null, - __applyMode: function(mode) { if (mode === "guided") { this.setMode("app"); } }, - getAnnotationsInitData: function() { - return this.__annotationsInitData; - }, - - nullAnnotationsInitData: function() { - this.__annotationsInitData = null; - }, - - addAnnotation: function(annotation) { + addAnnotation: function(annotationData, annotationId) { + const annotation = new osparc.workbench.Annotation(annotationData, annotationId); this.getAnnotations()[annotation.getId()] = annotation; + this.fireDataEvent("projectDocumentChanged", { + "op": "add", + "path": `/ui/annotations/${annotation.getId()}`, + "value": annotation.serialize(), + "osparc-resource": "study-ui", + }); + annotation.addListener("annotationChanged", () => { + this.fireDataEvent("projectDocumentChanged", { + "op": "replace", + "path": `/ui/annotations/${annotation.getId()}`, + "value": annotation.serialize(), + "osparc-resource": "study-ui", + }); + }, this); + return annotation; }, removeAnnotation: function(annotationId) { if (annotationId in this.getAnnotations()) { + const annotation = this.getAnnotations()[annotationId] + this.fireDataEvent("projectDocumentChanged", { + "op": "remove", + "path": `/ui/annotations/${annotation.getId()}`, + "osparc-resource": "study-ui", + }); delete this.getAnnotations()[annotationId]; } }, @@ -130,6 +159,288 @@ qx.Class.define("osparc.data.model.StudyUI", { this.getSlideshow().removeNode(nodeId); }, + // unused in favor of updateUiFromPatches + updateUiFromDiff: function(uiDiff) { + if (uiDiff["workbench"]) { + const currentStudy = osparc.store.Store.getInstance().getCurrentStudy(); + if (currentStudy) { + Object.keys(uiDiff["workbench"]).forEach(nodeId => { + const node = currentStudy.getWorkbench().getNode(nodeId); + if ("position" in uiDiff["workbench"][nodeId]) { + const positionDiff = uiDiff["workbench"][nodeId]["position"]; + this.__updateNodePositionFromDiff(node, positionDiff); + } + if ("marker" in uiDiff["workbench"][nodeId]) { + const markerDiff = uiDiff["workbench"][nodeId]["marker"]; + this.__updateNodeMarkerFromDiff(node, markerDiff); + } + }); + } + } + if (uiDiff["annotations"]) { + const annotationsDiff = uiDiff["annotations"]; + this.__updateAnnotationsFromDiff(annotationsDiff); + } + }, + + __updateNodePositionFromDiff: function(node, positionDiff) { + if (node) { + const newPos = node.getPosition(); + if ("x" in positionDiff) { + newPos.x = positionDiff["x"][1]; + } + if ("y" in positionDiff) { + newPos.y = positionDiff["y"][1]; + } + node.setPosition(newPos); + } + }, + + __updateNodeMarkerFromDiff: function(node, markerDiff) { + if (node) { + if (markerDiff instanceof Array) { + if (markerDiff.length === 2 && markerDiff[1] === null) { + // it was removed + node.setMarker(null); + } else if (markerDiff.length === 1) { + // it was added + node.addMarker(markerDiff[0]); + } + } else if ("color" in markerDiff && markerDiff["color"] instanceof Array) { + // it was updated + const newColor = markerDiff["color"][1]; + node.getMarker().setColor(newColor); + } + } + }, + + __updateAnnotationAttributesFromDiff: function(annotation, attributesDiff) { + if (annotation) { + const newPos = annotation.getPosition(); + if ("x" in attributesDiff) { + newPos.x = attributesDiff["x"][1]; + } + if ("y" in attributesDiff) { + newPos.y = attributesDiff["y"][1]; + } + annotation.setPosition(newPos.x, newPos.y); + + if ("fontSize" in attributesDiff) { + annotation.setFontSize(attributesDiff["fontSize"][1]); + } + if ("text" in attributesDiff) { + annotation.setText(attributesDiff["text"][1]); + } + } + }, + + __updateAnnotationsFromDiff: function(annotationsDiff) { + // check if annotation data is an object or an array + const annotations = this.getAnnotations(); + if (annotationsDiff instanceof Array) { + // from or to empty annotations + if (annotationsDiff.length === 2) { + if (annotationsDiff[0] === null) { + // first annotation(s) was added + const annotationsData = annotationsDiff[1]; + Object.entries(annotationsData).forEach(([annotationId, annotationData]) => { + const annotation = this.addAnnotation(annotationData, annotationId); + this.fireDataEvent("annotationAdded", annotation); + }); + } else if (annotationsDiff[1] === null) { + // all annotations were removed + const removedAnnotationsData = annotationsDiff[0]; + Object.keys(removedAnnotationsData).forEach(annotationId => { + this.removeAnnotation(annotationId); + this.fireDataEvent("annotationRemoved", annotationId); + }); + } + } + } else if (annotationsDiff instanceof Object) { + Object.entries(annotationsDiff).forEach(([annotationId, annotationDiff]) => { + if (annotationDiff instanceof Array) { + if (annotationDiff.length === 1) { + // it was added + const annotation = this.addAnnotation(annotationDiff[0], annotationId); + this.fireDataEvent("annotationAdded", annotation); + } else if (annotationDiff.length === 3 && annotationDiff[1] === 0) { + // it was removed + this.removeAnnotation(annotationId); + this.fireDataEvent("annotationRemoved", annotationId); + } + } else if (annotationDiff instanceof Object) { + // it was updated + if (annotationId in annotations) { + const annotation = annotations[annotationId]; + if ("attributes" in annotationDiff) { + this.__updateAnnotationAttributesFromDiff(annotation, annotationDiff["attributes"]); + } + if ("color" in annotationDiff) { + annotation.setColor(annotationDiff["color"][1]); + } + } else { + console.warn(`Annotation with id ${annotationId} not found`); + } + } + }); + } + }, + + updateUiFromPatches: function(uiPatches) { + uiPatches.forEach(patch => { + const path = patch.path; + if (path.startsWith("/ui/workbench/")) { + const nodeId = path.split("/")[3]; + const currentStudy = osparc.store.Store.getInstance().getCurrentStudy(); + if (currentStudy) { + const node = currentStudy.getWorkbench().getNode(nodeId); + if (path.includes("/position")) { + this.__updateNodePositionFromPatch(node, patch); + } + if (path.includes("/marker")) { + this.__updateNodeMarkerFromPatch(node, patch); + } + } + } else if (path.startsWith("/ui/annotations")) { + this.__updateAnnotationFromPatch(patch); + } + }); + }, + + __updateNodePositionFromPatch: function(node, patch) { + if (node) { + const op = patch.op; + const path = patch.path; + const value = patch.value; + if (op === "replace") { + const newPos = node.getPosition(); + if (path.includes("/position/x")) { + newPos.x = value; + } + if (path.includes("/position/y")) { + newPos.y = value; + } + node.setPosition(newPos); + } + } + }, + + __updateNodeMarkerFromPatch: function(node, patch) { + if (node) { + const op = patch.op; + const path = patch.path; + const value = patch.value; + if (op === "delete" || value === null) { + // it was removed + node.setMarker(null); + } else if (op === "add") { + // it was added + node.addMarker(value); + } else if (op === "replace" && path.includes("/color")) { + // it was updated + if (node.getMarker()) { + node.getMarker().setColor(value); + } + } + } + }, + + __updateAnnotationFromPatch: function(patch) { + const op = patch.op; + const path = patch.path; + const value = patch.value; + let annotationId = path.split("/")[3]; + switch (op) { + case "add": { + const annotation = this.addAnnotation(value, annotationId); + this.fireDataEvent("annotationAdded", annotation); + break; + } + case "remove": + this.removeAnnotation(annotationId); + this.fireDataEvent("annotationRemoved", annotationId); + break; + case "replace": + if (annotationId && annotationId in this.getAnnotations()) { + const annotation = this.getAnnotations()[annotationId]; + if (annotation) { + if (path.includes("/color")) { + annotation.setColor(value); + } else if (path.includes("/attributes")) { + this.__updateAnnotationAttributesFromPatch(annotation, path, value); + } + } + } else { + // the first (add) or last (remove) annotation will fall here + if (value && Object.keys(value).length) { + // first added + annotationId = Object.keys(value)[0]; + const annotationData = Object.values(value)[0]; + const annotation = this.addAnnotation(annotationData, annotationId); + this.fireDataEvent("annotationAdded", annotation); + } else { + // last removed + const currentIds = Object.keys(this.getAnnotations()); + if (currentIds.length === 1) { + annotationId = currentIds[0]; + this.removeAnnotation(annotationId); + this.fireDataEvent("annotationRemoved", annotationId); + } + } + } + break; + } + }, + + __updateAnnotationAttributesFromPatch: function(annotation, path, value) { + if (annotation) { + const attribute = path.split("/")[5]; + switch (attribute) { + case "x": { + const newPos = annotation.getPosition(); + newPos.x = value; + annotation.setPosition(newPos.x, newPos.y); + break; + } + case "y": { + const newPos = annotation.getPosition(); + newPos.y = value; + annotation.setPosition(newPos.x, newPos.y); + break; + } + case "fontSize": + annotation.setFontSize(value); + break; + case "text": + annotation.setText(value); + break; + } + } + }, + + listenToChanges: function() { + const propertyKeys = Object.keys(qx.util.PropertyUtil.getProperties(osparc.data.model.StudyUI)); + this.self().ListenChangesProps.forEach(key => { + switch (key) { + default: + if (propertyKeys.includes(key)) { + this.addListener(`change${qx.lang.String.firstUp(key)}`, () => { + const data = this.serialize(); + this.fireDataEvent("projectDocumentChanged", { + "op": "replace", + "path": `/ui/${key}`, + "value": data, + "osparc-resource": "study-ui", + }); + }, this); + } else { + console.error(`Property "${key}" is not a valid property of osparc.data.model.StudyUI`); + } + break; + } + }); + }, + serialize: function() { const currentStudy = osparc.store.Store.getInstance().getCurrentStudy(); let jsonObject = {}; @@ -137,6 +448,7 @@ qx.Class.define("osparc.data.model.StudyUI", { jsonObject["slideshow"] = this.getSlideshow().serialize(); jsonObject["currentNodeId"] = this.getCurrentNodeId() || ""; jsonObject["mode"] = this.getMode(); + jsonObject["annotations"] = null; const annotations = this.getAnnotations(); if (Object.keys(annotations).length) { jsonObject["annotations"] = {}; diff --git a/services/static-webserver/client/source/class/osparc/data/model/User.js b/services/static-webserver/client/source/class/osparc/data/model/User.js index 47d665f847d1..2e3c2451ce1f 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/User.js +++ b/services/static-webserver/client/source/class/osparc/data/model/User.js @@ -30,7 +30,7 @@ qx.Class.define("osparc.data.model.User", { const userId = ("id" in userData) ? parseInt(userData["id"]) : parseInt(userData["userId"]); const groupId = ("gid" in userData) ? parseInt(userData["gid"]) : parseInt(userData["groupId"]); - const username = userData["userName"] || "-"; + const userName = userData["userName"] || "-"; const email = ("login" in userData) ? userData["login"] : userData["email"]; let firstName = ""; if (userData["first_name"]) { @@ -44,25 +44,32 @@ qx.Class.define("osparc.data.model.User", { } else if (userData["lastName"]) { lastName = userData["lastName"]; } - let description = [firstName, lastName].join(" ").trim(); // the null values will be replaced by empty strings - if (email) { - if (description) { - description += " - " - } - description += email; - } - const thumbnail = osparc.utils.Avatar.emailToThumbnail(email, username); + this.set({ userId, groupId, - username, + userName, firstName, lastName, email, - thumbnail, + phone: userData["phone"] || null, + }); + + const description = osparc.data.model.User.userDataToDescription(firstName, lastName, email); + this.set({ label: userData["userName"] || description, description, }); + + if (userData["contact"]) { + const contactData = userData["contact"]; + this.setContactData(contactData); + } + + // create the thumbnail after setting email and userName + this.set({ + thumbnail: this.createThumbnail(), + }); }, properties: { @@ -94,11 +101,11 @@ qx.Class.define("osparc.data.model.User", { event: "changeDescription", }, - username: { + userName: { check: "String", nullable: false, init: null, - event: "changeUsername", + event: "changeUserName", }, firstName: { @@ -122,11 +129,98 @@ qx.Class.define("osparc.data.model.User", { event: "changeEmail", }, + phone: { + check: "String", + nullable: true, + init: null, + event: "changePhone", + }, + thumbnail: { check: "String", nullable: true, init: "", event: "changeThumbnail", }, + + institution: { + check: "String", + nullable: true, + init: null, + event: "changeInstitution", + }, + + address: { + check: "String", + nullable: true, + init: null, + event: "changeAddress", + }, + + city: { + check: "String", + nullable: true, + init: null, + event: "changeCity", + }, + + state: { + check: "String", + nullable: true, + init: null, + event: "changeState", + }, + + country: { + check: "String", + nullable: true, + init: null, + event: "changeCountry", + }, + + postalCode: { + check: "String", + nullable: true, + init: null, + event: "changePostalCode", + }, + }, + + statics: { + concatFullName: function(firstName, lastName) { + return [firstName, lastName].filter(Boolean).join(" "); + }, + + userDataToDescription: function(firstName, lastName, email) { + let description = this.concatFullName(firstName, lastName); + if (email) { + if (description) { + description += " - " + } + description += email; + } + return description; + }, + }, + + members: { + createThumbnail: function(size) { + return osparc.utils.Avatar.emailToThumbnail(this.getEmail(), this.getUserName(), size); + }, + + getFullName: function() { + return this.self().concatFullName(this.getFirstName(), this.getLastName()); + }, + + setContactData: function(contactData) { + this.set({ + institution: contactData["institution"] || null, + address: contactData["address"] || null, + city: contactData["city"] || null, + state: contactData["state"] || null, + country: contactData["country"] || null, + postalCode: contactData["postalCode"] || null, + }); + }, }, }); diff --git a/services/static-webserver/client/source/class/osparc/data/model/Workbench.js b/services/static-webserver/client/source/class/osparc/data/model/Workbench.js index 02917b2730e7..736632534619 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/Workbench.js +++ b/services/static-webserver/client/source/class/osparc/data/model/Workbench.js @@ -50,9 +50,10 @@ qx.Class.define("osparc.data.model.Workbench", { }, events: { - "updateStudyDocument": "qx.event.type.Event", - "restartAutoSaveTimer": "qx.event.type.Event", + "projectDocumentChanged": "qx.event.type.Data", "pipelineChanged": "qx.event.type.Event", + "nodeAdded": "qx.event.type.Data", + "nodeRemoved": "qx.event.type.Data", "reloadModel": "qx.event.type.Event", "retrieveInputs": "qx.event.type.Data", "fileRequested": "qx.event.type.Data", @@ -109,6 +110,63 @@ qx.Class.define("osparc.data.model.Workbench", { this.__workbenchUIInitData = null; }, + __deserialize: function(workbenchInitData, uiData = {}) { + const nodeDatas = {}; + const nodeUiDatas = {}; + for (const nodeId in workbenchInitData) { + const nodeData = workbenchInitData[nodeId]; + nodeDatas[nodeId] = nodeData; + if (uiData["workbench"] && nodeId in uiData["workbench"]) { + nodeUiDatas[nodeId] = uiData["workbench"][nodeId]; + } + } + this.__deserializeNodes(nodeDatas, nodeUiDatas) + .then(() => { + this.__deserializeEdges(workbenchInitData); + this.setDeserialized(true); + }); + }, + + __deserializeNodes: function(nodeDatas, nodeUiDatas) { + const nodesPromises = []; + for (const nodeId in nodeDatas) { + const nodeData = nodeDatas[nodeId]; + const nodeUiData = nodeUiDatas[nodeId]; + const node = this.__createNode(nodeData["key"], nodeData["version"], nodeId); + nodesPromises.push(node.fetchMetadataAndPopulate(nodeData, nodeUiData)); + } + return Promise.allSettled(nodesPromises); + }, + + __createNode: function(key, version, nodeId) { + const node = new osparc.data.model.Node(this.getStudy(), key, version, nodeId); + this.__addNode(node); + this.__initNodeSignals(node); + osparc.utils.Utils.localCache.serviceToFavs(key); + return node; + }, + + + __deserializeEdges: function(workbenchData) { + for (const nodeId in workbenchData) { + const node = this.getNode(nodeId); + if (node === null) { + continue; + } + const nodeData = workbenchData[nodeId]; + const inputNodeIds = nodeData.inputNodes || []; + inputNodeIds.forEach(inputNodeId => { + const inputNode = this.getNode(inputNodeId); + if (inputNode === null) { + return; + } + const edge = new osparc.data.model.Edge(null, inputNode, node); + this.addEdge(edge); + node.addInputNode(inputNodeId); + }); + } + }, + // starts the dynamic services initWorkbench: function() { const allModels = this.getNodes(); @@ -271,13 +329,13 @@ qx.Class.define("osparc.data.model.Workbench", { nodeRight.setInputConnected(true); }, - __createNode: function(study, metadata, uuid) { - const node = new osparc.data.model.Node(study, metadata, uuid); - node.addListener("keyChanged", () => this.fireEvent("reloadModel"), this); - node.addListener("changeInputNodes", () => this.fireDataEvent("pipelineChanged"), this); - node.addListener("reloadModel", () => this.fireEvent("reloadModel"), this); - node.addListener("updateStudyDocument", () => this.fireEvent("updateStudyDocument"), this); - osparc.utils.Utils.localCache.serviceToFavs(metadata.key); + createUnknownNode: function(nodeId) { + if (nodeId === undefined) { + nodeId = osparc.utils.Utils.uuidV4(); + } + const node = new osparc.data.model.NodeUnknown(this.getStudy(), null, null, nodeId); + this.__addNode(node); + node.populateNodeData(); return node; }, @@ -292,7 +350,6 @@ qx.Class.define("osparc.data.model.Workbench", { return null; } - this.fireEvent("restartAutoSaveTimer"); // create the node in the backend first const params = { url: { @@ -305,19 +362,15 @@ qx.Class.define("osparc.data.model.Workbench", { }; try { - const metadata = await osparc.store.Services.getService(key, version); const resp = await osparc.data.Resources.fetch("studies", "addNode", params); const nodeId = resp["node_id"]; - this.fireEvent("restartAutoSaveTimer"); - const node = this.__createNode(this.getStudy(), metadata, nodeId); - this.__initNodeSignals(node); - this.__addNode(node); - - node.populateNodeData(); - this.giveUniqueNameToNode(node, node.getLabel()); - node.checkState(); - + const node = this.__createNode(key, version, nodeId); + node.fetchMetadataAndPopulate() + .then(() => { + this.__giveUniqueNameToNode(node, node.getLabel()); + node.checkState(); + }); return node; } catch (err) { let errorMsg = ""; @@ -337,50 +390,52 @@ qx.Class.define("osparc.data.model.Workbench", { }, __initNodeSignals: function(node) { - if (node) { - node.addListener("showInLogger", e => this.fireDataEvent("showInLogger", e.getData()), this); - node.addListener("retrieveInputs", e => this.fireDataEvent("retrieveInputs", e.getData()), this); - node.addListener("fileRequested", e => this.fireDataEvent("fileRequested", e.getData()), this); - node.addListener("filePickerRequested", e => { - const { - portId, - nodeId, - file - } = e.getData(); - this.__filePickerNodeRequested(nodeId, portId, file); - }, this); - node.addListener("parameterRequested", e => { - const { - portId, - nodeId - } = e.getData(); - this.__parameterNodeRequested(nodeId, portId); - }, this); - node.addListener("probeRequested", e => { - const { - portId, - nodeId - } = e.getData(); - this.__probeNodeRequested(nodeId, portId); - }, this); - node.addListener("fileUploaded", () => { - // downstream nodes might have started downloading file picker's output. - // show feedback to the user - const downstreamNodes = this.__getDownstreamNodes(node); - downstreamNodes.forEach(downstreamNode => { - downstreamNode.getPortIds().forEach(portId => { - const link = downstreamNode.getLink(portId); - if (link && link["nodeUuid"] === node.getNodeId() && link["output"] === "outFile") { - // connected to file picker's output - setTimeout(() => { - // start retrieving state after 2" - downstreamNode.retrieveInputs(portId); - }, 2000); - } - }); + node.addListener("projectDocumentChanged", e => this.fireDataEvent("projectDocumentChanged", e.getData()), this); + node.addListener("keyChanged", () => this.fireEvent("reloadModel"), this); + node.addListener("changeInputNodes", () => this.fireDataEvent("pipelineChanged"), this); + node.addListener("reloadModel", () => this.fireEvent("reloadModel"), this); + node.addListener("showInLogger", e => this.fireDataEvent("showInLogger", e.getData()), this); + node.addListener("retrieveInputs", e => this.fireDataEvent("retrieveInputs", e.getData()), this); + node.addListener("fileRequested", e => this.fireDataEvent("fileRequested", e.getData()), this); + node.addListener("filePickerRequested", e => { + const { + portId, + nodeId, + file + } = e.getData(); + this.__filePickerNodeRequested(nodeId, portId, file); + }, this); + node.addListener("parameterRequested", e => { + const { + portId, + nodeId + } = e.getData(); + this.__parameterNodeRequested(nodeId, portId); + }, this); + node.addListener("probeRequested", e => { + const { + portId, + nodeId + } = e.getData(); + this.__probeNodeRequested(nodeId, portId); + }, this); + node.addListener("fileUploaded", () => { + // downstream nodes might have started downloading file picker's output. + // show feedback to the user + const downstreamNodes = this.__getDownstreamNodes(node); + downstreamNodes.forEach(downstreamNode => { + downstreamNode.getPortIds().forEach(portId => { + const link = downstreamNode.getLink(portId); + if (link && link["nodeUuid"] === node.getNodeId() && link["output"] === "outFile") { + // connected to file picker's output + setTimeout(() => { + // start retrieving state after 2" + downstreamNode.retrieveInputs(portId); + }, 2000); + } }); - }, this); - } + }); + }, this); }, getFreePosition: function(node, toTheLeft = true) { @@ -433,43 +488,50 @@ qx.Class.define("osparc.data.model.Workbench", { return; } - const requesterNode = this.getNode(nodeId); - const freePos = this.getFreePosition(requesterNode); - filePicker.setPosition(freePos); + const populateNewNode = () => { + const requesterNode = this.getNode(nodeId); + const freePos = this.getFreePosition(requesterNode); + filePicker.setPosition(freePos); - // create connection - const filePickerId = filePicker.getNodeId(); - requesterNode.addInputNode(filePickerId); - // reload also before port connection happens - this.fireEvent("reloadModel"); - requesterNode.addPortLink(portId, filePickerId, "outFile") - .then(success => { - if (success) { - if (file) { - const fileObj = file.data; - osparc.file.FilePicker.setOutputValueFromStore( - filePicker, - fileObj.getLocation(), - fileObj.getDatasetId(), - fileObj.getFileId(), - fileObj.getLabel() - ); + // create connection + const filePickerId = filePicker.getNodeId(); + requesterNode.addInputNode(filePickerId); + // reload also before port connection happens + this.fireEvent("reloadModel"); + requesterNode.addPortLink(portId, filePickerId, "outFile") + .then(success => { + if (success) { + if (file) { + const fileObj = file.data; + osparc.file.FilePicker.setOutputValueFromStore( + filePicker, + fileObj.getLocation(), + fileObj.getDatasetId(), + fileObj.getFileId(), + fileObj.getLabel() + ); + } + this.fireDataEvent("openNode", filePicker.getNodeId()); + this.fireEvent("reloadModel"); + } else { + this.removeNode(filePickerId); + const msg = qx.locale.Manager.tr("File couldn't be assigned"); + osparc.FlashMessenger.logError(msg); } - this.fireDataEvent("openNode", filePicker.getNodeId()); - this.fireEvent("reloadModel"); - } else { - this.removeNode(filePickerId); - const msg = qx.locale.Manager.tr("File couldn't be assigned"); - osparc.FlashMessenger.logError(msg); - } - }); + }); + }; + if (filePicker.getMetadata()) { + populateNewNode(); + } else { + filePicker.addListenerOnce("changeMetadata", () => populateNewNode(), this); + } }, __parameterNodeRequested: async function(nodeId, portId) { const requesterNode = this.getNode(nodeId); // create a new ParameterNode - const type = osparc.utils.Ports.getPortType(requesterNode.getMetaData()["inputs"], portId); + const type = osparc.utils.Ports.getPortType(requesterNode.getMetadata()["inputs"], portId); const parameterMetadata = osparc.store.Services.getParameterMetadata(type); if (parameterMetadata) { const parameterNode = await this.createNode(parameterMetadata["key"], parameterMetadata["version"]); @@ -477,20 +539,27 @@ qx.Class.define("osparc.data.model.Workbench", { return; } - // do not overlap the new Parameter Node with other nodes - const freePos = this.getFreePosition(requesterNode); - parameterNode.setPosition(freePos); - - // create connection - const pmId = parameterNode.getNodeId(); - requesterNode.addInputNode(pmId); - // bypass the compatibility check - if (requesterNode.getPropsForm().addPortLink(portId, pmId, "out_1") !== true) { - this.removeNode(pmId); - const msg = qx.locale.Manager.tr("Parameter couldn't be assigned"); - osparc.FlashMessenger.logError(msg); + const populateNewNode = () => { + // do not overlap the new Parameter Node with other nodes + const freePos = this.getFreePosition(requesterNode); + parameterNode.setPosition(freePos); + + // create connection + const pmId = parameterNode.getNodeId(); + requesterNode.addInputNode(pmId); + // bypass the compatibility check + if (requesterNode.getPropsForm().addPortLink(portId, pmId, "out_1") !== true) { + this.removeNode(pmId); + const msg = qx.locale.Manager.tr("Parameter couldn't be assigned"); + osparc.FlashMessenger.logError(msg); + } + this.fireEvent("reloadModel"); + }; + if (parameterNode.getMetadata()) { + populateNewNode(); + } else { + parameterNode.addListenerOnce("changeMetadata", () => populateNewNode(), this); } - this.fireEvent("reloadModel"); } }, @@ -498,8 +567,8 @@ qx.Class.define("osparc.data.model.Workbench", { const requesterNode = this.getNode(nodeId); // create a new ProbeNode - const requesterPortMD = requesterNode.getMetaData()["outputs"][portId]; - const type = osparc.utils.Ports.getPortType(requesterNode.getMetaData()["outputs"], portId); + const requesterPortMD = requesterNode.getMetadata()["outputs"][portId]; + const type = osparc.utils.Ports.getPortType(requesterNode.getMetadata()["outputs"], portId); const probeMetadata = osparc.store.Services.getProbeMetadata(type); if (probeMetadata) { const probeNode = await this.createNode(probeMetadata["key"], probeMetadata["version"]); @@ -507,66 +576,84 @@ qx.Class.define("osparc.data.model.Workbench", { return; } - probeNode.setLabel(requesterPortMD.label); + const populateNewNode = () => { + probeNode.setLabel(requesterPortMD.label); - // do not overlap the new Parameter Node with other nodes - const freePos = this.getFreePosition(requesterNode, false); - probeNode.setPosition(freePos); + // do not overlap the new Parameter Node with other nodes + const freePos = this.getFreePosition(requesterNode, false); + probeNode.setPosition(freePos); - // create connection - const probeId = probeNode.getNodeId(); - probeNode.addInputNode(nodeId); - // bypass the compatibility check - if (probeNode.getPropsForm().addPortLink("in_1", nodeId, portId) !== true) { - this.removeNode(probeId); - const msg = qx.locale.Manager.tr("Probe couldn't be assigned"); - osparc.FlashMessenger.logError(msg); + // create connection + const probeId = probeNode.getNodeId(); + probeNode.addInputNode(nodeId); + // bypass the compatibility check + if (probeNode.getPropsForm().addPortLink("in_1", nodeId, portId) !== true) { + this.removeNode(probeId); + const msg = qx.locale.Manager.tr("Probe couldn't be assigned"); + osparc.FlashMessenger.logError(msg); + } + this.fireEvent("reloadModel"); + }; + if (probeNode.getMetadata()) { + populateNewNode(); + } else { + probeNode.addListenerOnce("changeMetadata", () => populateNewNode(), this); } - this.fireEvent("reloadModel"); } }, __addNode: function(node) { const nodeId = node.getNodeId(); this.__nodes[nodeId] = node; - this.fireEvent("pipelineChanged"); + const nodeAdded = () => { + this.fireEvent("pipelineChanged"); + }; + if (node.getMetadata()) { + nodeAdded(); + } else { + node.addListenerOnce("changeMetadata", () => nodeAdded(), this); + } }, removeNode: async function(nodeId) { if (!osparc.data.Permissions.getInstance().canDo("study.node.delete", true)) { - return false; + return; } if (this.getStudy().isPipelineRunning()) { osparc.FlashMessenger.logAs(this.self().CANT_DELETE_NODE, "ERROR"); - return false; + return; } let node = this.getNode(nodeId); if (node) { - this.fireEvent("restartAutoSaveTimer"); // remove the node in the backend first const removed = await node.removeNode(); if (removed) { - this.fireEvent("restartAutoSaveTimer"); - - delete this.__nodes[nodeId]; + this.__nodeRemoved(nodeId); + } + } + }, - // remove first the connected edges - const connectedEdges = this.getConnectedEdges(nodeId); - connectedEdges.forEach(connectedEdgeId => { - this.removeEdge(connectedEdgeId); - }); + __nodeRemoved: function(nodeId) { + delete this.__nodes[nodeId]; - // remove it from ui model - if (this.getStudy()) { - this.getStudy().getUi().removeNode(nodeId); - } + // remove first the connected edges + const connectedEdgeIds = this.getConnectedEdges(nodeId); + connectedEdgeIds.forEach(connectedEdgeId => { + this.removeEdge(connectedEdgeId); + }); - this.fireEvent("pipelineChanged"); - return true; - } + // remove it from ui model + if (this.getStudy()) { + this.getStudy().getUi().removeNode(nodeId); } - return false; + + this.fireEvent("pipelineChanged"); + + this.fireDataEvent("nodeRemoved", { + nodeId, + connectedEdgeIds, + }); }, addServiceBetween: async function(service, leftNodeId, rightNodeId) { @@ -621,8 +708,10 @@ qx.Class.define("osparc.data.model.Workbench", { const rightNode = this.getNode(rightNodeId); if (rightNode) { // no need to make any changes to a just removed node (it would trigger a patch call) - rightNode.removeInputNode(leftNodeId); + // first remove the port connections rightNode.removeNodePortConnections(leftNodeId); + // then the node connection + rightNode.removeInputNode(leftNodeId); } delete this.__edges[edgeId]; @@ -646,7 +735,7 @@ qx.Class.define("osparc.data.model.Workbench", { return false; }, - giveUniqueNameToNode: function(node, label, suffix = 2) { + __giveUniqueNameToNode: function(node, label, suffix = 2) { const newLabel = label + "_" + suffix; const allModels = this.getNodes(); const nodes = Object.values(allModels); @@ -654,105 +743,20 @@ qx.Class.define("osparc.data.model.Workbench", { if (node2.getNodeId() !== node.getNodeId() && node2.getLabel().localeCompare(node.getLabel()) === 0) { node.setLabel(newLabel); - this.giveUniqueNameToNode(node, label, suffix+1); - } - } - }, - - __populateNodesData: function(workbenchData, workbenchUIData) { - Object.entries(workbenchData).forEach(([nodeId, nodeData]) => { - this.getNode(nodeId).populateNodeData(nodeData); - - if ("position" in nodeData) { - // old way for storing the position - this.getNode(nodeId).populateNodeUIData(nodeData); - } - if (workbenchUIData && "workbench" in workbenchUIData && nodeId in workbenchUIData.workbench) { - this.getNode(nodeId).populateNodeUIData(workbenchUIData.workbench[nodeId]); - } - }); - }, - - __deserialize: function(workbenchInitData, workbenchUIInitData) { - this.__deserializeNodes(workbenchInitData, workbenchUIInitData) - .then(() => { - this.__deserializeEdges(workbenchInitData); - workbenchInitData = null; - workbenchUIInitData = null; - this.setDeserialized(true); - }); - }, - - __deserializeNodes: function(workbenchData, workbenchUIData = {}) { - const nodeIds = Object.keys(workbenchData); - const serviceMetadataPromises = []; - nodeIds.forEach(nodeId => { - const nodeData = workbenchData[nodeId]; - serviceMetadataPromises.push(osparc.store.Services.getService(nodeData.key, nodeData.version)); - }); - return Promise.allSettled(serviceMetadataPromises) - .then(results => { - const missing = results.filter(result => result.status === "rejected" || result.value === null) - if (missing.length) { - const errorMsg = qx.locale.Manager.tr("Service metadata missing"); - osparc.FlashMessenger.logError(errorMsg); - return; - } - const values = results.map(result => result.value); - // Create first all the nodes - for (let i=0; i { - const node = this.getNode(nodeId); - this.giveUniqueNameToNode(node, node.getLabel()); - }); - }); - }, - - __deserializeEdges: function(workbenchData) { - for (const nodeId in workbenchData) { - const nodeData = workbenchData[nodeId]; - const node = this.getNode(nodeId); - if (node === null) { - continue; + this.__giveUniqueNameToNode(node, label, suffix+1); } - this.__addInputOutputNodesAndEdges(node, nodeData.inputNodes); } }, - __addInputOutputNodesAndEdges: function(node, inputOutputNodeIds) { - if (inputOutputNodeIds) { - inputOutputNodeIds.forEach(inputOutputNodeId => { - const node1 = this.getNode(inputOutputNodeId); - if (node1 === null) { - return; - } - const edge = new osparc.data.model.Edge(null, node1, node); - this.addEdge(edge); - node.addInputNode(inputOutputNodeId); - }); - } - }, - - serialize: function(clean = true) { + serialize: function() { if (this.__workbenchInitData !== null) { // workbench is not initialized return this.__workbenchInitData; } - let workbench = {}; - const allModels = this.getNodes(); - const nodes = Object.values(allModels); + const workbench = {}; + const nodes = Object.values(this.getNodes()); for (const node of nodes) { - const data = node.serialize(clean); + const data = node.serialize(); if (data) { workbench[node.getNodeId()] = data; } @@ -765,17 +769,12 @@ qx.Class.define("osparc.data.model.Workbench", { // workbenchUI is not initialized return this.__workbenchUIInitData; } - let workbenchUI = {}; - const nodes = this.getNodes(); - for (const nodeId in nodes) { - const node = nodes[nodeId]; - workbenchUI[nodeId] = {}; - workbenchUI[nodeId]["position"] = node.getPosition(); - const marker = node.getMarker(); - if (marker) { - workbenchUI[nodeId]["marker"] = { - color: marker.getColor() - }; + const workbenchUI = {}; + const nodes = Object.values(this.getNodes()); + for (const node of nodes) { + const data = node.serializeUI(); + if (data) { + workbenchUI[node.getNodeId()] = data; } } return workbenchUI; @@ -784,8 +783,9 @@ qx.Class.define("osparc.data.model.Workbench", { /** * Call patch Node, but the changes were already applied on the frontend * @param workbenchDiffs {Object} Diff Object coming from the JsonDiffPatch lib. Use only the keys, not the changes. + * @param workbenchSource {Object} Workbench object that was used to check the diffs on the frontend. */ - patchWorkbenchDelayed: function(workbenchDiffs) { + patchWorkbenchDiffs: function(workbenchDiffs, workbenchSource) { const promises = []; Object.keys(workbenchDiffs).forEach(nodeId => { const node = this.getNode(nodeId); @@ -793,12 +793,16 @@ qx.Class.define("osparc.data.model.Workbench", { // the node was removed return; } + // use the node data that was used to check the diffs + const nodeData = workbenchSource[nodeId]; + if (!nodeData) { + // skip if nodeData is undefined or null + return; + } - const nodeData = node.serialize(); let patchData = {}; if (workbenchDiffs[nodeId] instanceof Array) { - // if workbenchDiffs is an array means that the node was either added or removed - // the node was added + // if workbenchDiffs is an array means that the node was added patchData = nodeData; } else { // patch only what was changed @@ -821,6 +825,123 @@ qx.Class.define("osparc.data.model.Workbench", { } }) return Promise.all(promises); - } + }, + + /** + * Update the workbench from the given patches. + * @param workbenchPatches {Array} Array of workbench patches. + * @param uiPatches {Array} Array of UI patches. They might contain info (position) about new nodes. + */ + updateWorkbenchFromPatches: function(workbenchPatches, uiPatches) { + // group the patches by nodeId + const nodesAdded = []; + const nodesRemoved = []; + const workbenchPatchesByNode = {}; + const workbenchUiPatchesByNode = {}; + workbenchPatches.forEach(workbenchPatch => { + const nodeId = workbenchPatch.path.split("/")[2]; + + const pathParts = workbenchPatch.path.split("/"); + if (pathParts.length === 3) { + if (workbenchPatch.op === "add") { + // node was added + nodesAdded.push(nodeId); + } else if (workbenchPatch.op === "remove") { + // node was removed + nodesRemoved.push(nodeId); + } + } + + if (!(nodeId in workbenchPatchesByNode)) { + workbenchPatchesByNode[nodeId] = []; + } + workbenchPatchesByNode[nodeId].push(workbenchPatch); + }); + + // first, remove nodes + if (nodesRemoved.length) { + this.__removeNodesFromPatches(nodesRemoved, workbenchPatchesByNode); + } + + // second, add nodes if any + if (nodesAdded.length) { + // this will call update nodes once finished + nodesAdded.forEach(nodeId => { + const uiPatchFound = uiPatches.find(uiPatch => { + const pathParts = uiPatch.path.split("/"); + return uiPatch.op === "add" && pathParts.length === 4 && pathParts[3] === nodeId; + }); + if (uiPatchFound) { + workbenchUiPatchesByNode[nodeId] = uiPatchFound; + } + }); + this.__addNodesFromPatches(nodesAdded, workbenchPatchesByNode, workbenchUiPatchesByNode); + } else { + // third, update nodes + this.__updateNodesFromPatches(workbenchPatchesByNode); + } + }, + + __removeNodesFromPatches: function(nodesRemoved, workbenchPatchesByNode) { + nodesRemoved.forEach(nodeId => { + const node = this.getNode(nodeId); + + // if the user is in that node, restore the node to the workbench + if (this.getStudy().getUi().getCurrentNodeId() === nodeId) { + this.getStudy().getUi().setMode("pipeline"); + this.getStudy().getUi().setCurrentNodeId(null); + } + if (node) { + node.nodeRemoved(nodeId); + } + this.__nodeRemoved(nodeId); + delete workbenchPatchesByNode[nodeId]; + }); + }, + + __addNodesFromPatches: function(nodesAdded, workbenchPatchesByNode, workbenchUiPatchesByNode = {}) { + nodesAdded.forEach(nodeId => { + const addNodePatch = workbenchPatchesByNode[nodeId].find(workbenchPatch => { + const pathParts = workbenchPatch.path.split("/"); + return pathParts.length === 3 && workbenchPatch.op === "add"; + }); + const nodeData = addNodePatch.value; + // delete the node "add" from the workbenchPatchesByNode + const index = workbenchPatchesByNode[nodeId].indexOf(addNodePatch); + if (index > -1) { + workbenchPatchesByNode[nodeId].splice(index, 1); + } + + const nodeUiData = workbenchUiPatchesByNode[nodeId] && workbenchUiPatchesByNode[nodeId]["value"] ? workbenchUiPatchesByNode[nodeId]["value"] : {}; + + const node = this.__createNode(nodeData["key"], nodeData["version"], nodeId); + node.fetchMetadataAndPopulate(nodeData, nodeUiData) + .then(() => { + this.fireDataEvent("nodeAdded", node); + node.checkState(); + // check it was already linked + if (nodeData.inputNodes && nodeData.inputNodes.length > 0) { + nodeData.inputNodes.forEach(inputNodeId => { + node.fireDataEvent("edgeCreated", { + nodeId1: inputNodeId, + nodeId2: nodeId, + }); + }); + } + }); + }); + }, + + __updateNodesFromPatches: function(workbenchPatchesByNode) { + Object.keys(workbenchPatchesByNode).forEach(nodeId => { + const node = this.getNode(nodeId); + if (node === null) { + console.warn(`Node with id ${nodeId} not found, skipping patch application.`); + return; + } + const nodePatches = workbenchPatchesByNode[nodeId]; + node.updateNodeFromPatch(nodePatches); + }); + }, } }); diff --git a/services/static-webserver/client/source/class/osparc/desktop/ControlsBar.js b/services/static-webserver/client/source/class/osparc/desktop/ControlsBar.js deleted file mode 100644 index 9c4c9fe9ffe9..000000000000 --- a/services/static-webserver/client/source/class/osparc/desktop/ControlsBar.js +++ /dev/null @@ -1,104 +0,0 @@ -/* ************************************************************************ - - osparc - the simcore frontend - - https://osparc.io - - Copyright: - 2018 IT'IS Foundation, https://itis.swiss - - License: - MIT: https://opensource.org/licenses/MIT - - Authors: - * Odei Maiz (odeimaiz) - -************************************************************************ */ - -/** - * Widget that shows the play/stop study button. - * - * *Example* - * - * Here is a little example of how to use the widget. - * - *
- *   let controlsBar = new osparc.desktop.ControlsBar();
- *   this.getRoot().add(controlsBar);
- * 
- */ - -qx.Class.define("osparc.desktop.ControlsBar", { - extend: qx.ui.toolbar.ToolBar, - - construct: function() { - this.base(arguments); - - this.setSpacing(10); - - this.__initDefault(); - this.__attachEventHandlers(); - }, - - events: { - "showWorkbench": "qx.event.type.Event", - "showSettings": "qx.event.type.Event", - "groupSelection": "qx.event.type.Event", - "ungroupSelection": "qx.event.type.Event" - }, - - members: { - __serviceFilters: null, - __viewCtrls: null, - __workbenchViewButton: null, - __settingsViewButton: null, - __iterationCtrls: null, - __parametersButton: null, - - setWorkbenchVisibility: function(isWorkbenchContext) { - this.__serviceFilters.setVisibility(isWorkbenchContext ? "visible" : "excluded"); - }, - - setExtraViewVisibility: function(hasExtraView) { - this.__viewCtrls.setVisibility(hasExtraView ? "visible" : "excluded"); - }, - - __initDefault: function() { - const filterCtrls = new qx.ui.toolbar.Part(); - const serviceFilters = this.__serviceFilters = new osparc.filter.group.ServiceFilterGroup("workbench"); - osparc.filter.UIFilterController.getInstance().registerContainer("workbench", serviceFilters); - filterCtrls.add(serviceFilters); - this.add(filterCtrls); - - this.addSpacer(); - - const viewCtrls = this.__viewCtrls = new qx.ui.toolbar.Part(); - const workbenchViewButton = this.__workbenchViewButton = this.__createWorkbenchButton(); - const settingsViewButton = this.__settingsViewButton = this.__createSettingsButton(); - viewCtrls.add(workbenchViewButton); - viewCtrls.add(settingsViewButton); - this.add(viewCtrls); - const viewRadioGroup = new qx.ui.form.RadioGroup(); - viewRadioGroup.add(workbenchViewButton, settingsViewButton); - }, - - __createWorkbenchButton: function() { - const workbenchButton = this.__createRadioButton(this.tr("Workbench view"), "vector-square", "workbenchViewBtn", "showWorkbench"); - return workbenchButton; - }, - - __createSettingsButton: function() { - const settingsButton = this.__createRadioButton(this.tr("Node view"), "list", "settingsViewBtn", "showSettings"); - return settingsButton; - }, - - __createRadioButton: function(label, icon, widgetId, singalName) { - const button = new qx.ui.toolbar.RadioButton(label); - osparc.utils.Utils.setIdToWidget(button, widgetId); - button.addListener("execute", () => { - this.fireEvent(singalName); - }, this); - return button; - } - } -}); diff --git a/services/static-webserver/client/source/class/osparc/desktop/MainPage.js b/services/static-webserver/client/source/class/osparc/desktop/MainPage.js index a0742f2c1853..706d99032d2f 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/MainPage.js +++ b/services/static-webserver/client/source/class/osparc/desktop/MainPage.js @@ -56,7 +56,7 @@ qx.Class.define("osparc.desktop.MainPage", { // Some resources request before building the main stack osparc.MaintenanceTracker.getInstance().startTracker(); osparc.CookieExpirationTracker.getInstance().startTracker(); - // osparc.NewUITracker.getInstance().startTracker(); + osparc.NewUITracker.getInstance().startTracker(); const store = osparc.store.Store.getInstance(); const preloadPromises = []; @@ -70,6 +70,8 @@ qx.Class.define("osparc.desktop.MainPage", { preloadPromises.push(osparc.store.Products.getInstance().fetchUiConfig()); preloadPromises.push(osparc.store.PollTasks.getInstance().fetchTasks()); preloadPromises.push(osparc.store.Jobs.getInstance().fetchJobsLatest()); + preloadPromises.push(osparc.data.Permissions.getInstance().fetchPermissions()); + preloadPromises.push(osparc.data.Permissions.getInstance().fetchFunctionPermissions()); Promise.all(preloadPromises) .then(() => { const mainStack = this.__createMainStack(); @@ -115,7 +117,9 @@ qx.Class.define("osparc.desktop.MainPage", { const preferencesSettings = osparc.Preferences.getInstance(); if (!isReadOnly && preferencesSettings.getConfirmBackToDashboard()) { const studyName = this.__studyEditor.getStudy().getName(); - const win = new osparc.ui.window.Confirmation(); + const win = new osparc.ui.window.Confirmation().set({ + confirmAction: "warning", + }); if (osparc.product.Utils.getProductName().includes("s4l")) { let msg = this.tr("Do you want to close ") + "" + studyName + "?"; msg += "

"; @@ -232,21 +236,13 @@ qx.Class.define("osparc.desktop.MainPage", { const studyId = data["studyData"].uuid; const studyName = data["studyData"].name; const copyData = data["copyData"]; + const hidden = false; const templateAccessRights = data["accessRights"]; const templateType = data["templateType"]; - const params = { - url: { - "study_id": studyId, - "copy_data": copyData - }, - }; - const options = { - pollTask: true - }; - const fetchPromise = osparc.data.Resources.fetch("studies", "postToTemplate", params, options); + const pollPromise = osparc.store.Templates.createTemplate(studyId, copyData, hidden); const pollTasks = osparc.store.PollTasks.getInstance(); - pollTasks.createPollingTask(fetchPromise) + pollTasks.createPollingTask(pollPromise) .then(task => { const tutorialBrowser = this.__dashboard.getTutorialBrowser(); if (tutorialBrowser && templateType === osparc.data.model.StudyUI.TUTORIAL_TYPE) { @@ -259,9 +255,9 @@ qx.Class.define("osparc.desktop.MainPage", { task.addListener("resultReceived", e => { const templateData = e.getData(); // these operations need to be done after template creation - osparc.store.Study.addCollaborators(templateData, templateAccessRights); + osparc.store.Study.getInstance().addCollaborators(templateData, templateAccessRights); if (templateType) { - osparc.store.Study.patchTemplateType(templateData["uuid"], templateType) + osparc.store.Study.getInstance().patchTemplateType(templateData, templateType) .then(() => { if (tutorialBrowser && templateType === osparc.data.model.StudyUI.TUTORIAL_TYPE) { tutorialBrowser.reloadResources(false); @@ -269,7 +265,8 @@ qx.Class.define("osparc.desktop.MainPage", { if (appBrowser && templateType === osparc.data.model.StudyUI.HYPERTOOL_TYPE) { appBrowser.reloadResources(false); } - }); + }) + .catch(err => osparc.FlashMessenger.logError(err)); } }); }) @@ -312,7 +309,7 @@ qx.Class.define("osparc.desktop.MainPage", { const currentStudy = store.getCurrentStudy(); while (currentStudy.isLocked()) { await osparc.utils.Utils.sleep(1000); - store.getStudyState(studyId); + osparc.store.Study.getInstance().fetchStudyState(studyId); } this.__loadingPage.setMessages([]); this.__openSnapshot(studyId, snapshotId); @@ -331,15 +328,10 @@ qx.Class.define("osparc.desktop.MainPage", { const msg = this.tr("No snapshot found"); throw new Error(msg); } - const params2 = { - url: { - "studyId": studyId - } - }; - osparc.data.Resources.fetch("studies", "getOne", params2) + osparc.store.Study.getInstance().getOne(studyId) .then(studyData => { if (!studyData) { - const msg = this.tr("Study not found"); + const msg = this.tr("Project not found"); throw new Error(msg); } osparc.desktop.MainPageHandler.getInstance().loadStudy(studyData); @@ -363,20 +355,14 @@ qx.Class.define("osparc.desktop.MainPage", { const currentStudy = store.getCurrentStudy(); while (currentStudy.isLocked()) { await osparc.utils.Utils.sleep(1000); - store.getStudyState(studyId); + osparc.store.Study.getInstance().fetchStudyState(studyId); } this.__loadingPage.setMessages([]); this.__openIteration(iterationUuid); }, __openIteration: function(iterationUuid) { - const params = { - url: { - "studyId": iterationUuid - } - }; - // OM TODO. DO NOT ADD ITERATIONS TO STUDIES CACHE - osparc.data.Resources.fetch("studies", "getOne", params) + osparc.store.Study.getInstance().getOne(iterationUuid) .then(studyData => { if (!studyData) { const msg = this.tr("Iteration not found"); diff --git a/services/static-webserver/client/source/class/osparc/desktop/MainPageDesktop.js b/services/static-webserver/client/source/class/osparc/desktop/MainPageDesktop.js deleted file mode 100644 index 577ddece319e..000000000000 --- a/services/static-webserver/client/source/class/osparc/desktop/MainPageDesktop.js +++ /dev/null @@ -1,88 +0,0 @@ -/* ************************************************************************ - - osparc - the simcore frontend - - https://osparc.io - - Copyright: - 2023 IT'IS Foundation, https://itis.swiss - - License: - MIT: https://opensource.org/licenses/MIT - - Authors: - * Odei Maiz (odeimaiz) - -************************************************************************ */ - -qx.Class.define("osparc.desktop.MainPageDesktop", { - extend: qx.ui.core.Widget, - - construct: function() { - this.base(arguments); - - this._setLayout(new qx.ui.layout.VBox(null, null, "separator-vertical")); - - this._add(osparc.notification.RibbonNotifications.getInstance()); - - const navBar = new osparc.navigation.NavigationBar(); - navBar.populateLayout(); - // exclude some items from the navigation bar - navBar.getChildControl("dashboard-label").exclude(); - navBar.getChildControl("dashboard-button").exclude(); - navBar.getChildControl("notifications-button").exclude(); - navBar.getChildControl("help").exclude(); - - // exclude all the menu entries except "log-out" from user menu - const userMenuButton = navBar.getChildControl("user-menu"); - const userMenu = userMenuButton.getMenu(); - // eslint-disable-next-line no-underscore-dangle - const userMenuEntries = userMenu._getCreatedChildControls(); - Object.entries(userMenuEntries).forEach(([id, userMenuEntry]) => { - if (!["mini-profile-view", "po-center", "log-out"].includes(id)) { - userMenuEntry.exclude(); - } - }); - // exclude also the separators - userMenu.getChildren().forEach(child => { - if (child.classname === "qx.ui.menu.Separator") { - child.exclude(); - } - }); - this._add(navBar); - - osparc.MaintenanceTracker.getInstance().startTracker(); - - const store = osparc.store.Store.getInstance(); - const preloadPromises = []; - const walletsEnabled = osparc.desktop.credits.Utils.areWalletsEnabled(); - if (walletsEnabled) { - preloadPromises.push(store.reloadCreditPrice()); - preloadPromises.push(store.reloadWallets()); - } - preloadPromises.push(store.getAllClassifiers(true)); - preloadPromises.push(osparc.store.Tags.getInstance().fetchTags()); - preloadPromises.push(osparc.store.Products.getInstance().fetchUiConfig()); - preloadPromises.push(osparc.store.PollTasks.getInstance().fetchTasks()); - Promise.all(preloadPromises) - .then(() => { - const desktopCenter = new osparc.desktop.credits.DesktopCenter(); - this._add(desktopCenter, { - flex: 1 - }); - - this.__listenToWalletSocket(); - }); - }, - - members: { - __listenToWalletSocket: function() { - const socket = osparc.wrapper.WebSocket.getInstance(); - if (!socket.slotExists("walletOsparcCreditsUpdated")) { - socket.on("walletOsparcCreditsUpdated", data => { - osparc.desktop.credits.Utils.creditsUpdated(data["wallet_id"], data["osparc_credits"]); - }, this); - } - } - } -}); diff --git a/services/static-webserver/client/source/class/osparc/desktop/MainPageHandler.js b/services/static-webserver/client/source/class/osparc/desktop/MainPageHandler.js index cce33017aa98..0fce20f0e48e 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/MainPageHandler.js +++ b/services/static-webserver/client/source/class/osparc/desktop/MainPageHandler.js @@ -64,18 +64,13 @@ qx.Class.define("osparc.desktop.MainPageHandler", { this.setLoadingPageHeader(qx.locale.Manager.tr("Loading ") + osparc.product.Utils.getStudyAlias()); this.showLoadingPage(); - const params = { - url: { - "studyId": studyId - } - }; - osparc.data.Resources.fetch("studies", "getOne", params) + osparc.store.Study.getInstance().getOne(studyId) .then(studyData => { if (!studyData) { - const msg = qx.locale.Manager.tr("Study not found"); + const msg = qx.locale.Manager.tr("Project not found"); throw new Error(msg); } - this.loadStudy(studyData); + return this.loadStudy(studyData); // return so errors propagate }) .catch(err => { osparc.FlashMessenger.logError(err); @@ -88,30 +83,28 @@ qx.Class.define("osparc.desktop.MainPageHandler", { const studyAlias = osparc.product.Utils.getStudyAlias({firstUpperCase: true}); // check if it's locked let locked = false; - let lockedBy = false; - if ("state" in studyData && "locked" in studyData["state"]) { - locked = studyData["state"]["locked"]["value"]; - lockedBy = studyData["state"]["locked"]["owner"]; + let lockedBy = []; + if ("state" in studyData) { + const state = studyData["state"]; + locked = osparc.study.Utils.state.isProjectLocked(state); + const currentUserGroupIds = osparc.study.Utils.state.getCurrentGroupIds(state); + lockedBy = currentUserGroupIds.filter(gid => gid !== osparc.store.Groups.getInstance().getMyGroupId()); } - if (locked && lockedBy["user_id"] !== osparc.auth.Data.getInstance().getUserId()) { - const msg = `${studyAlias} ${qx.locale.Manager.tr("is already open by")} ${ // it will be replaced "userName" - "first_name" in lockedBy && lockedBy["first_name"] != null ? - lockedBy["first_name"] : - qx.locale.Manager.tr("another user.") - }`; + if (locked && lockedBy.length) { + const msg = `${studyAlias} ${qx.locale.Manager.tr("is already open by another user.")}`; throw new Error(msg); } // check if there is any linked node missing if (osparc.study.Utils.isAnyLinkedNodeMissing(studyData)) { - const msg = `${qx.locale.Manager.tr("We encountered an issue with the")} ${studyAlias}
${qx.locale.Manager.tr("Please contact support.")}`; + const msg = `${qx.locale.Manager.tr("We found an issue with some links.")}
${qx.locale.Manager.tr("They will be removed.")}`; throw new Error(msg); } this.setLoadingPageHeader(qx.locale.Manager.tr("Loading ") + studyData.name); this.showLoadingPage(); - osparc.store.Services.getStudyServicesMetadata(studyData) + return osparc.store.Services.getStudyServicesMetadata(studyData) .finally(() => { const inaccessibleServices = osparc.store.Services.getInaccessibleServices(studyData["workbench"]); if (inaccessibleServices.length) { diff --git a/services/static-webserver/client/source/class/osparc/desktop/MainPanel.js b/services/static-webserver/client/source/class/osparc/desktop/MainPanel.js deleted file mode 100644 index 06161e10d7c4..000000000000 --- a/services/static-webserver/client/source/class/osparc/desktop/MainPanel.js +++ /dev/null @@ -1,85 +0,0 @@ -/* ************************************************************************ - - osparc - the simcore frontend - - https://osparc.io - - Copyright: - 2018 IT'IS Foundation, https://itis.swiss - - License: - MIT: https://opensource.org/licenses/MIT - - Authors: - * Odei Maiz (odeimaiz) - -************************************************************************ */ - -/* eslint no-underscore-dangle: 0 */ - -/** - * Widget containing a Vertical Box with a MainView and ControlsBar. - * Used as Main View in the study editor. - * - * *Example* - * - * Here is a little example of how to use the widget. - * - *
- *   let mainPanel = this.__mainPanel = new osparc.desktop.MainPanel();
- *   mainPanel.setMainView(widget);
- *   this.getRoot().add(mainPanel);
- * 
- */ - -qx.Class.define("osparc.desktop.MainPanel", { - extend: qx.ui.core.Widget, - - construct: function() { - this.base(arguments); - - this._setLayout(new qx.ui.layout.VBox()); - - const wbToolbar = this.__wbToolbar = new osparc.desktop.WorkbenchToolbar(); - this._add(wbToolbar); - - const hBox = this.__mainView = new qx.ui.container.Composite(new qx.ui.layout.HBox(5)).set({ - allowGrowY: true - }); - this._add(hBox, { - flex: 1 - }); - - const controlsBar = this.__controlsBar = new osparc.desktop.ControlsBar(); - this._add(controlsBar); - }, - - properties: { - mainView: { - nullable: false, - check : "qx.ui.core.Widget", - apply : "__applyMainView" - } - }, - - members: { - __wbToolbar: null, - __mainView: null, - __controlsBar: null, - - __applyMainView: function(newWidget) { - this.__mainView.removeAll(); - this.__mainView.add(newWidget, { - flex: 1 - }); - }, - - getToolbar: function() { - return this.__wbToolbar; - }, - - getControls: function() { - return this.__controlsBar; - } - } -}); diff --git a/services/static-webserver/client/source/class/osparc/desktop/SlideshowView.js b/services/static-webserver/client/source/class/osparc/desktop/SlideshowView.js index 007038747383..ded648e6dc08 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/SlideshowView.js +++ b/services/static-webserver/client/source/class/osparc/desktop/SlideshowView.js @@ -405,15 +405,17 @@ qx.Class.define("osparc.desktop.SlideshowView", { }, __requestServiceBetween: function(leftNodeId, rightNodeId) { - const srvCat = new osparc.workbench.ServiceCatalog(); - srvCat.setContext(leftNodeId, rightNodeId); - srvCat.addListener("addService", e => { - const data = e.getData(); - const service = data.service; - this.__addServiceBetween(service, leftNodeId, rightNodeId); - }, this); - srvCat.center(); - srvCat.open(); + if (osparc.workbench.ServiceCatalog.canItBeOpened(this.getStudy())) { + const srvCat = new osparc.workbench.ServiceCatalog(); + srvCat.setContext(leftNodeId, rightNodeId); + srvCat.addListener("addService", e => { + const data = e.getData(); + const service = data.service; + this.__addServiceBetween(service, leftNodeId, rightNodeId); + }, this); + srvCat.center(); + srvCat.open(); + } }, __addServiceBetween: async function(service, leftNodeId, rightNodeId) { diff --git a/services/static-webserver/client/source/class/osparc/desktop/StudyEditor.js b/services/static-webserver/client/source/class/osparc/desktop/StudyEditor.js index 55a98858f60d..773af71ce609 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/StudyEditor.js +++ b/services/static-webserver/client/source/class/osparc/desktop/StudyEditor.js @@ -81,6 +81,7 @@ qx.Class.define("osparc.desktop.StudyEditor", { }); this.__updatingStudy = 0; + this.__throttledPatchPending = false; }, events: { @@ -105,7 +106,48 @@ qx.Class.define("osparc.desktop.StudyEditor", { statics: { AUTO_SAVE_INTERVAL: 3000, + DIFF_CHECK_INTERVAL: 300, + THROTTLE_PATCH_TIME: 500, READ_ONLY_TEXT: qx.locale.Manager.tr("You do not have writing permissions.
Your changes will not be saved."), + + curateBackendProjectDocument: function(projectDocument) { + // ignore the ``state`` property, it has its own channel + [ + "state", + ].forEach(prop => { + delete projectDocument[prop]; + }); + // in order to pair it the with frontend's node serialization + // remove null entries + // remove state entries + Object.keys(projectDocument["workbench"]).forEach(nodeId => { + const node = projectDocument["workbench"][nodeId]; + Object.keys(node).forEach(nodeProp => { + if (nodeProp === "state") { + delete node[nodeProp]; + } + if (node[nodeProp] === null) { + delete node[nodeProp]; + } + }); + }); + delete projectDocument["ui"]["icon"]; + delete projectDocument["ui"]["templateType"]; + }, + + curateFrontendProjectDocument: function(myStudy) { + // the updatedStudy model doesn't contain the following properties + [ + "accessRights", + "creationDate", + "folderId", + "prjOwner", + "tags", + "trashedBy", + ].forEach(prop => { + delete myStudy[prop]; + }); + } }, members: { @@ -113,12 +155,16 @@ qx.Class.define("osparc.desktop.StudyEditor", { __viewsStack: null, __workbenchView: null, __slideshowView: null, - __autoSaveTimer: null, __studyEditorIdlingTracker: null, - __studyDataInBackend: null, + __lastSyncedProjectDocument: null, + __lastSyncedProjectVersion: null, + __pendingProjectData: null, + __applyProjectDocumentTimer: null, __updatingStudy: null, __updateThrottled: null, __nodesSlidesTree: null, + __throttledPatchPending: null, + __blockUpdates: null, setStudyData: function(studyData) { if (this.__settingStudy) { @@ -129,12 +175,7 @@ qx.Class.define("osparc.desktop.StudyEditor", { this._showLoadingPage(this.tr("Starting") + " " + studyData.name); // Before starting a study, make sure the latest version is fetched - const params = { - url: { - "studyId": studyData.uuid - } - }; - osparc.data.Resources.fetch("studies", "getOne", params) + osparc.store.Study.getInstance().getOne(studyData.uuid) .then(latestStudyData => { const study = new osparc.data.model.Study(latestStudyData); this.setStudy(study); @@ -153,7 +194,7 @@ qx.Class.define("osparc.desktop.StudyEditor", { study.openStudy() .then(studyData => { - this.__setStudyDataInBackend(studyData); + this.__setLastSyncedProjectDocument(studyData); this.__workbenchView.setStudy(study); this.__slideshowView.setStudy(study); @@ -179,20 +220,7 @@ qx.Class.define("osparc.desktop.StudyEditor", { if ("status" in err && err["status"]) { if (err["status"] == 402) { msg = err["message"]; - // The backend might have thrown a 402 because the wallet was negative - const match = msg.match(/last transaction of\s([-]?\d+(\.\d+)?)\sresulted/); - let debt = null; - if ("debtAmount" in err) { - // the study has some debt that needs to be paid - debt = err["debtAmount"]; - } else if (match) { - // the study has some debt that needs to be paid - debt = parseFloat(match[1]); // Convert the captured string to a number - } - if (debt) { - // if get here, it means that the 402 was thrown due to the debt - osparc.store.Store.getInstance().setStudyDebt(study.getUuid(), debt); - } + osparc.study.Utils.extractDebtFromError(study.getUuid(), err); } else if (err["status"] == 409) { // max_open_studies_per_user msg = err["message"]; } else if (err["status"] == 423) { // Locked @@ -224,7 +252,7 @@ qx.Class.define("osparc.desktop.StudyEditor", { // Count dynamic services. // If it is larger than PROJECTS_MAX_NUM_RUNNING_DYNAMIC_NODES, dynamics won't start -> Flash Message - const maxNumber = osparc.store.StaticInfo.getInstance().getMaxNumberDyNodes(); + const maxNumber = osparc.store.StaticInfo.getMaxNumberDyNodes(); const dontCheck = study.getDisableServiceAutoStart(); if (maxNumber && !dontCheck) { const nodes = study.getWorkbench().getNodes(); @@ -237,9 +265,7 @@ qx.Class.define("osparc.desktop.StudyEditor", { } } - if (osparc.data.model.Study.canIWrite(study.getAccessRights())) { - this.__startAutoSaveTimer(); - } else { + if (!osparc.data.model.Study.canIWrite(study.getAccessRights())) { const msg = this.self().READ_ONLY_TEXT; osparc.FlashMessenger.logAs(msg, "WARNING"); } @@ -261,17 +287,21 @@ qx.Class.define("osparc.desktop.StudyEditor", { this.nodeSelected(nodeId); }, this); - workbench.addListener("updateStudyDocument", () => this.updateStudyDocument()); - workbench.addListener("restartAutoSaveTimer", () => this.__restartAutoSaveTimer()); + study.listenToChanges(); // this includes the listener on the workbench and ui + study.addListener("projectDocumentChanged", e => this.projectDocumentChanged(e.getData()), this); + + if (osparc.utils.DisabledPlugins.isRTCEnabled()) { + this.__listenToProjectDocument(); + } }, - __setStudyDataInBackend: function(studyData) { - this.__studyDataInBackend = osparc.data.model.Study.deepCloneStudyObject(studyData, true); + __setLastSyncedProjectDocument: function(studyData) { + this.__lastSyncedProjectDocument = osparc.data.model.Study.deepCloneStudyObject(studyData, true); - // remove the runHash, this.__studyDataInBackend is only used for diff comparison and the frontend doesn't keep it - Object.keys(this.__studyDataInBackend["workbench"]).forEach(nodeId => { - if ("runHash" in this.__studyDataInBackend["workbench"][nodeId]) { - delete this.__studyDataInBackend["workbench"][nodeId]["runHash"]; + // remove the runHash, this.__lastSyncedProjectDocument is only used for diff comparison and the frontend doesn't keep it + Object.keys(this.__lastSyncedProjectDocument["workbench"]).forEach(nodeId => { + if ("runHash" in this.__lastSyncedProjectDocument["workbench"][nodeId]) { + delete this.__lastSyncedProjectDocument["workbench"][nodeId]["runHash"]; } }); }, @@ -283,9 +313,130 @@ qx.Class.define("osparc.desktop.StudyEditor", { this.__listenToNodeUpdated(); this.__listenToNodeProgress(); this.__listenToNoMoreCreditsEvents(); - this.__listenToEvent(); + this.__listenToServiceCustomEvents(); this.__listenToServiceStatus(); this.__listenToStatePorts(); + + const socket = osparc.wrapper.WebSocket.getInstance(); + [ + "connect", + "reconnect", + ].forEach(evtName => { + socket.addListener(evtName, () => { + // after a reconnect, re-sync the project document + console.log("WebSocket reconnected, re-syncing project document"); + const studyId = this.getStudy().getUuid(); + osparc.store.Study.getInstance().getOne(studyId) + .then(latestStudyData => { + const latestData = { + "version": this.__lastSyncedProjectVersion, // do not increase the version + "document": latestStudyData, + }; + this.__applyProjectDocument(latestData); + }) + .catch(err => { + console.error("Failed to re-sync project document after WebSocket reconnect:", err); + }); + }); + }); + }, + + __listenToProjectDocument: function() { + const socket = osparc.wrapper.WebSocket.getInstance(); + + if (!socket.slotExists("projectDocument:updated")) { + socket.on("projectDocument:updated", data => { + if (data["projectId"] === this.getStudy().getUuid()) { + if (data["clientSessionId"] && data["clientSessionId"] === osparc.utils.Utils.getClientSessionID()) { + // ignore my own updates + console.debug("ProjectDocument Discarded: My own", data); + return; + } + this.__projectDocumentReceived(data); + } + }, this); + } + }, + + __projectDocumentReceived: function(data) { + const documentVersion = data["version"]; + + // Ignore outdated updates + if (this.__lastSyncedProjectVersion && documentVersion <= this.__lastSyncedProjectVersion) { + // ignore old updates + console.debug("ProjectDocument Discarded: Ignoring old", data); + return; + } + + // Always keep the latest version in pending buffer + if (!this.__pendingProjectData || documentVersion > (this.__pendingProjectData.version || 0)) { + this.__pendingProjectData = data; + } + + // Reset the timer if it's already running + if (this.__applyProjectDocumentTimer) { + console.debug("ProjectDocument Discarded: Resetting applyProjectDocument timer"); + clearTimeout(this.__applyProjectDocumentTimer); + } + + // Throttle applying updates + this.__applyProjectDocumentTimer = setTimeout(() => { + if (!this.__pendingProjectData) { + return; + } + this.__applyProjectDocumentTimer = null; + + // Apply the latest buffered project document + const latestData = this.__pendingProjectData; + this.__pendingProjectData = null; + + this.__applyProjectDocument(latestData); + }, 3*this.self().THROTTLE_PATCH_TIME); + // make it 3 times longer. + // when another client adds a node: + // - there is a POST call + // - then (after the throttle) a PATCH on its position + // without waiting for it 3 times, this client might place it on the default 0,0 + }, + + __applyProjectDocument: function(data) { + console.debug("ProjectDocument applying:", data); + this.__lastSyncedProjectVersion = data["version"]; + const updatedProjectDocument = data["document"]; + + // curate projectDocument:updated document + this.self().curateBackendProjectDocument(updatedProjectDocument); + + const myStudy = this.getStudy().serialize(); + // curate myStudy + this.self().curateFrontendProjectDocument(myStudy); + + this.__blockUpdates = true; + const delta = osparc.wrapper.JsonDiffPatch.getInstance().diff(myStudy, updatedProjectDocument); + const jsonPatches = osparc.wrapper.JsonDiffPatch.getInstance().deltaToJsonPatches(delta); + const uiPatches = []; + const workbenchPatches = []; + const studyPatches = []; + for (const jsonPatch of jsonPatches) { + if (jsonPatch.path.startsWith('/ui/')) { + uiPatches.push(jsonPatch); + } else if (jsonPatch.path.startsWith('/workbench/')) { + workbenchPatches.push(jsonPatch); + } else { + studyPatches.push(jsonPatch); + } + } + if (workbenchPatches.length > 0) { + this.getStudy().getWorkbench().updateWorkbenchFromPatches(workbenchPatches, uiPatches); + } + if (uiPatches.length > 0) { + this.getStudy().getUi().updateUiFromPatches(uiPatches); + } + if (studyPatches.length > 0) { + this.getStudy().updateStudyFromPatches(studyPatches); + } + + this.__blockUpdates = false; }, __listenToLogger: function() { @@ -386,7 +537,7 @@ qx.Class.define("osparc.desktop.StudyEditor", { } }, - __listenToEvent: function() { + __listenToServiceCustomEvents: function() { const socket = osparc.wrapper.WebSocket.getInstance(); // callback for events @@ -536,8 +687,8 @@ qx.Class.define("osparc.desktop.StudyEditor", { }, __editSlides: function() { - if (this.getStudy().getUi().getMode() !== "workbench") { - // if the user is not in "workbench" mode, return + if (["app", "guided"].includes(this.getStudy().getUi().getMode())) { + // if the user is in "app" mode, return return; } @@ -599,13 +750,14 @@ qx.Class.define("osparc.desktop.StudyEditor", { osparc.data.Resources.fetch("runPipeline", "startPipeline", params) .then(resp => this.__onPipelineSubmitted(resp)) .catch(err => { - let msg = err.message; const errStatus = err.status; if (errStatus == "409") { - this.getStudyLogger().error(null, "Pipeline is already running"); + osparc.FlashMessenger.logError(err); + const msg = osparc.FlashMessenger.extractMessage(err); + this.getStudyLogger().error(null, msg); } else if (errStatus == "422") { this.getStudyLogger().info(null, "The pipeline is up-to-date"); - msg = this.tr("The pipeline is up-to-date. Do you want to re-run it?"); + const msg = this.tr("The pipeline is up-to-date. Do you want to re-run it?"); const win = new osparc.ui.window.Confirmation(msg).set({ caption: this.tr("Re-run"), confirmText: this.tr("Run"), @@ -618,10 +770,8 @@ qx.Class.define("osparc.desktop.StudyEditor", { this.__requestStartPipeline(studyId, partialPipeline, true); } }, this); - } else if (err.status == "402") { - osparc.FlashMessenger.logAs(msg, "WARNING"); } else { - osparc.FlashMessenger.logAs(msg, "WARNING"); + osparc.FlashMessenger.logError(err); this.getStudyLogger().error(null, "Unsuccessful pipeline submission"); } this.getStudy().setPipelineRunning(false); @@ -642,17 +792,6 @@ qx.Class.define("osparc.desktop.StudyEditor", { this.__reloadSnapshotsAndIterations(); } this.getStudyLogger().info(null, "Pipeline started"); - /* If no projectStateUpdated comes in 60 seconds, client must - check state of pipeline and update button accordingly. */ - const timer = setTimeout(() => { - osparc.store.Store.getInstance().getStudyState(pipelineId); - }, 60000); - const socket = osparc.wrapper.WebSocket.getInstance(); - socket.getSocket().once("projectStateUpdated", ({ "project_uuid": projectUuid }) => { - if (projectUuid === pipelineId) { - clearTimeout(timer); - } - }); } }, @@ -809,6 +948,7 @@ qx.Class.define("osparc.desktop.StudyEditor", { }, this); }, + // ------------------ IDLING TRACKER ------------------ __startIdlingTracker: function() { if (this.__studyEditorIdlingTracker) { this.__studyEditorIdlingTracker.stop(); @@ -825,62 +965,55 @@ qx.Class.define("osparc.desktop.StudyEditor", { this.__studyEditorIdlingTracker = null; } }, - - __startAutoSaveTimer: function() { - // Save every 3 seconds - const timer = this.__autoSaveTimer = new qx.event.Timer(this.self().AUTO_SAVE_INTERVAL); - timer.addListener("interval", () => { - if (!osparc.wrapper.WebSocket.getInstance().isConnected()) { - return; - } - this.__checkStudyChanges(); - }, this); - timer.start(); - }, - - __stopAutoSaveTimer: function() { - if (this.__autoSaveTimer && this.__autoSaveTimer.isEnabled()) { - this.__autoSaveTimer.stop(); - this.__autoSaveTimer.setEnabled(false); - } - }, - - __restartAutoSaveTimer: function() { - if (this.__autoSaveTimer && this.__autoSaveTimer.isEnabled()) { - this.__autoSaveTimer.restart(); - } - }, + // ------------------ IDLING TRACKER ------------------ __stopTimers: function() { this.__stopIdlingTracker(); - this.__stopAutoSaveTimer(); }, __getStudyDiffs: function() { - const newObj = this.getStudy().serialize(); - const delta = osparc.wrapper.JsonDiffPatch.getInstance().diff(this.__studyDataInBackend, newObj); + const sourceStudy = this.getStudy().serialize(); + const studyDiffs = { + sourceStudy, + delta: {}, + } + const delta = osparc.wrapper.JsonDiffPatch.getInstance().diff(this.__lastSyncedProjectDocument, sourceStudy); if (delta) { // lastChangeDate and creationDate should not be taken into account as data change delete delta["creationDate"]; delete delta["lastChangeDate"]; - return delta; + studyDiffs.delta = delta; } - return {}; + return studyDiffs; }, + // didStudyChange takes around 0.5ms didStudyChange: function() { const studyDiffs = this.__getStudyDiffs(); - return Boolean(Object.keys(studyDiffs).length); + const changed = Boolean(Object.keys(studyDiffs.delta).length); + this.getStudy().setSavePending(changed); + return changed; }, - __checkStudyChanges: function() { - if (this.didStudyChange()) { - if (this.__updatingStudy > 0) { - // throttle update - this.__updateThrottled = true; - } else { + /** + * @param {JSON Patch} data It will soon be used to patch the project document https://datatracker.ietf.org/doc/html/rfc6902 + */ + projectDocumentChanged: function(patchData) { + patchData["userGroupId"] = osparc.auth.Data.getInstance().getGroupId(); + // avoid echo loop + if (this.__blockUpdates) { + return; + } + + this.getStudy().setSavePending(true); + // throttling: do not update study document right after a change, wait for THROTTLE_PATCH_TIME + if (!this.__throttledPatchPending) { + this.__throttledPatchPending = true; + + setTimeout(() => { this.updateStudyDocument(); - } + this.__throttledPatchPending = false; + }, this.self().THROTTLE_PATCH_TIME); } }, @@ -891,10 +1024,11 @@ qx.Class.define("osparc.desktop.StudyEditor", { }); } + this.getStudy().setSavePending(true); this.__updatingStudy++; const studyDiffs = this.__getStudyDiffs(); - return this.getStudy().patchStudyDelayed(studyDiffs) - .then(studyData => this.__setStudyDataInBackend(studyData)) + return this.getStudy().patchStudyDiffs(studyDiffs.delta, studyDiffs.sourceStudy) + .then(studyData => this.__setLastSyncedProjectDocument(studyData)) .catch(error => { if ("status" in error && error.status === 409) { console.log("Flash message blocked"); // Workaround for osparc-issues #1189 @@ -906,6 +1040,7 @@ qx.Class.define("osparc.desktop.StudyEditor", { throw error; }) .finally(() => { + this.getStudy().setSavePending(false); this.__updatingStudy--; if (this.__updateThrottled && this.__updatingStudy === 0) { this.__updateThrottled = false; @@ -915,13 +1050,7 @@ qx.Class.define("osparc.desktop.StudyEditor", { }, __closeStudy: function() { - const params = { - url: { - "studyId": this.getStudy().getUuid() - }, - data: osparc.utils.Utils.getClientSessionID() - }; - osparc.data.Resources.fetch("studies", "close", params) + osparc.store.Study.getInstance().closeStudy(this.getStudy().getUuid()) .catch(err => console.error(err)); }, diff --git a/services/static-webserver/client/source/class/osparc/desktop/StudyEditorIdlingTracker.js b/services/static-webserver/client/source/class/osparc/desktop/StudyEditorIdlingTracker.js index 4c8575f0442c..e258f88e73ea 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/StudyEditorIdlingTracker.js +++ b/services/static-webserver/client/source/class/osparc/desktop/StudyEditorIdlingTracker.js @@ -46,7 +46,7 @@ qx.Class.define("osparc.desktop.StudyEditorIdlingTracker", { } let msg = qx.locale.Manager.tr("Are you still there?") + "
"; - msg += `If not, ${osparc.store.StaticInfo.getInstance().getDisplayName()} will try to close the ${osparc.product.Utils.getStudyAlias()} in:`; + msg += `If not, ${osparc.store.StaticInfo.getDisplayName()} will try to close the ${osparc.product.Utils.getStudyAlias()} in:`; msg += osparc.utils.Utils.formatSeconds(timeoutSec); this.__idleFlashMessage.setMessage(msg); }, diff --git a/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js b/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js index ade394646951..21f87e6174fd 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js +++ b/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js @@ -55,10 +55,46 @@ qx.Class.define("osparc.desktop.WorkbenchView", { }, openNodeDataManager: function(node) { - const win = osparc.widget.StudyDataManager.popUpInWindow(node.getStudy().getUuid(), node.getNodeId(), node.getLabel()); + const win = osparc.widget.StudyDataManager.popUpInWindow(node.getStudy().serialize(), node.getNodeId(), node.getLabel()); const closeBtn = win.getChildControl("close-button"); osparc.utils.Utils.setIdToWidget(closeBtn, "nodeDataManagerCloseBtn"); - } + }, + + __handleIframeStateChange: function(node, iframeLayout) { + if (iframeLayout.classname === "osparc.viewer.NodeViewer") { + iframeLayout._removeAll(); + } else { + iframeLayout.removeAll(); + } + if (node && node.getIFrame()) { + const iFrame = node.getIFrame(); + const src = iFrame.getSource(); + let showPage = iFrame; + if (node.getStatus().getLockState().isLockedBySomeoneElse()) { + showPage = node.getLockedPage(); + } else if (src === null || src === "about:blank") { + showPage = node.getLoadingPage(); + } + if (iframeLayout.classname === "osparc.viewer.NodeViewer") { + iframeLayout._add(showPage, { + flex: 1 + }); + } else { + iframeLayout.add(showPage, { + flex: 1 + }); + } + } + }, + + listenToIframeStateChanges: function(node, iframeLayout) { + if (node && node.getIFrame()) { + const iFrame = node.getIFrame(); + node.getIframeHandler().addListener("iframeStateChanged", () => this.__handleIframeStateChange(node, iframeLayout), this); + iFrame.addListener("load", () => this.__handleIframeStateChange(node, iframeLayout)); + this.__handleIframeStateChange(node, iframeLayout); + } + }, }, events: { @@ -246,6 +282,14 @@ qx.Class.define("osparc.desktop.WorkbenchView", { this.__connectEvents(); study.getWorkbench().addListener("pipelineChanged", () => this.__evalSlidesButtons()); + study.getWorkbench().addListener("nodeAdded", e => { + const node = e.getData(); + this.__nodeAdded(node); + }); + study.getWorkbench().addListener("nodeRemoved", e => { + const {nodeId, connectedEdgeIds} = e.getData(); + this.__nodeRemoved(nodeId, connectedEdgeIds); + }); study.getUi().getSlideshow().addListener("changeSlideshow", () => this.__evalSlidesButtons()); study.getUi().addListener("changeMode", () => this.__evalSlidesButtons()); this.__evalSlidesButtons(); @@ -349,6 +393,7 @@ qx.Class.define("osparc.desktop.WorkbenchView", { alignX: "center", marginLeft: 10 }); + // do not allow modifying the pipeline this.getStudy().bind("pipelineRunning", addNewNodeBtn, "enabled", { converter: running => !running }); @@ -387,7 +432,7 @@ qx.Class.define("osparc.desktop.WorkbenchView", { }); this.__addTopBarSpacer(topBar); - const studyOptionsPage = this.__studyOptionsPage = this.__createTabPage("@FontAwesome5Solid/book", this.tr("Study options")); + const studyOptionsPage = this.__studyOptionsPage = this.__createTabPage("@FontAwesome5Solid/book", this.tr("Project options")); studyOptionsPage.getLayout().set({ separator: "separator-vertical", spacing: 10 @@ -438,7 +483,7 @@ qx.Class.define("osparc.desktop.WorkbenchView", { this.__addTopBarSpacer(topBar); - const commentsButton = new qx.ui.form.Button().set({ + const conversationButton = new qx.ui.form.Button().set({ appearance: "form-button-outlined", toolTipText: this.tr("Conversations"), icon: "@FontAwesome5Solid/comments/16", @@ -446,8 +491,9 @@ qx.Class.define("osparc.desktop.WorkbenchView", { marginTop: 7, ...osparc.navigation.NavigationBar.BUTTON_OPTIONS }); - commentsButton.addListener("execute", () => osparc.study.Conversations.popUpInWindow(study.serialize())); - topBar.add(commentsButton); + osparc.study.Conversations.makeButtonBlink(conversationButton); + conversationButton.addListener("execute", () => osparc.study.Conversations.popUpInWindow(study.serialize())); + topBar.add(conversationButton); const startAppButtonTB = this.__startAppButtonTB = new qx.ui.form.Button().set({ appearance: "form-button-outlined", @@ -611,7 +657,7 @@ qx.Class.define("osparc.desktop.WorkbenchView", { }, this); workbench.addListener("fileRequested", () => { - if (this.getStudy().getUi().getMode() === "workbench") { + if (["workbench", "pipeline"].includes(this.getStudy().getUi().getMode())) { const tabViewLeftPanel = this.getChildControl("side-panel-left-tabs"); tabViewLeftPanel.setSelection([this.__storagePage]); } @@ -746,9 +792,7 @@ qx.Class.define("osparc.desktop.WorkbenchView", { widget.addListener("restore", () => this.setMaximized(false), this); } }); - node.getIframeHandler().addListener("iframeChanged", () => this.__iFrameChanged(node), this); - iFrame.addListener("load", () => this.__iFrameChanged(node), this); - this.__iFrameChanged(node); + osparc.desktop.WorkbenchView.listenToIframeStateChanges(node, this.__iframePage); } else { // This will keep what comes after at the bottom this.__iframePage.add(new qx.ui.core.Spacer(), { @@ -757,20 +801,6 @@ qx.Class.define("osparc.desktop.WorkbenchView", { } }, - __iFrameChanged: function(node) { - this.__iframePage.removeAll(); - - if (node && node.getIFrame()) { - const loadingPage = node.getLoadingPage(); - const iFrame = node.getIFrame(); - const src = iFrame.getSource(); - const iFrameView = (src === null || src === "about:blank") ? loadingPage : iFrame; - this.__iframePage.add(iFrameView, { - flex: 1 - }); - } - }, - __populateSecondaryColumn: function(node) { [ this.__studyOptionsPage, @@ -793,6 +823,16 @@ qx.Class.define("osparc.desktop.WorkbenchView", { } else if (node) { this.__populateSecondaryColumnNode(node); } + + if ( + node instanceof osparc.data.model.Node && + node.isComputational() && + node.getPropsForm() + ) { + node.getStudy().bind("pipelineRunning", node.getPropsForm(), "enabled", { + converter: pipelineRunning => !pipelineRunning + }); + } }, __populateSecondaryColumnStudy: function(study) { @@ -873,25 +913,34 @@ qx.Class.define("osparc.desktop.WorkbenchView", { __getAnnotationsSection: function() { const annotationsSection = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)); - annotationsSection.add(new qx.ui.basic.Label(this.tr("Annotations")).set({ + annotationsSection.add(new qx.ui.basic.Label(this.tr("Add to Workbench")).set({ font: "text-14" })); - const annotationsButtons = new qx.ui.container.Composite(new qx.ui.layout.HBox(5)); + const annotationsButtons = new qx.ui.container.Composite(new qx.ui.layout.Flow(5, 5)); annotationsSection.add(annotationsButtons); const buttonsHeight = 28; + + const addConversationBtn = new qx.ui.form.Button().set({ + label: this.tr("Conversation"), + icon: "@FontAwesome5Solid/comment/14", + height: buttonsHeight + }); + addConversationBtn.addListener("execute", () => this.__workbenchUI.startConversation(), this); + annotationsButtons.add(addConversationBtn); + const addNoteBtn = new qx.ui.form.Button().set({ label: this.tr("Note"), - icon: "@FontAwesome5Solid/plus/14", + icon: "@FontAwesome5Solid/sticky-note/14", height: buttonsHeight }); addNoteBtn.addListener("execute", () => this.__workbenchUI.startAnnotationsNote(), this); annotationsButtons.add(addNoteBtn); const addRectBtn = new qx.ui.form.Button().set({ - label: this.tr("Rectangle"), - icon: "@FontAwesome5Solid/plus/14", + label: this.tr("Box"), + icon: "@FontAwesome5Regular/square/14", height: buttonsHeight }); addRectBtn.addListener("execute", () => this.__workbenchUI.startAnnotationsRect(), this); @@ -899,7 +948,7 @@ qx.Class.define("osparc.desktop.WorkbenchView", { const addTextBtn = new qx.ui.form.Button().set({ label: this.tr("Text"), - icon: "@FontAwesome5Solid/plus/14", + icon: "@FontAwesome5Solid/font/14", height: buttonsHeight }); addTextBtn.addListener("execute", () => this.__workbenchUI.startAnnotationsText(), this); @@ -1025,7 +1074,7 @@ qx.Class.define("osparc.desktop.WorkbenchView", { this.__serviceOptionsPage.bind("width", vBox, "width"); // HEADER - const nodeMetadata = node.getMetaData(); + const nodeMetadata = node.getMetadata(); const version = osparc.store.Services.getVersionDisplay(nodeMetadata["key"], nodeMetadata["version"]); const header = new qx.ui.basic.Label(`${nodeMetadata["name"]} ${version}`).set({ paddingLeft: 5 @@ -1033,7 +1082,7 @@ qx.Class.define("osparc.desktop.WorkbenchView", { vBox.add(header); // INPUTS FORM - if (node.isPropertyInitialized("propsForm") && node.getPropsForm()) { + if (node.hasPropsForm()) { const inputsForm = node.getPropsForm(); const inputs = new osparc.desktop.PanelView(this.tr("Inputs"), inputsForm); inputs._innerContainer.set({ @@ -1090,8 +1139,6 @@ qx.Class.define("osparc.desktop.WorkbenchView", { } const nodeOptions = new osparc.widget.NodeOptions(node); - nodeOptions.buildLayout(); - return nodeOptions; }, @@ -1115,6 +1162,19 @@ qx.Class.define("osparc.desktop.WorkbenchView", { this.addListener("disappear", () => qx.event.message.Bus.getInstance().unsubscribe("maximizeIframe", maximizeIframeCb, this), this); }, + __nodeAdded: function(node) { + this.__workbenchUI.addNode(node, node.getPosition()); + }, + + __nodeRemoved: function(nodeId, connectedEdgeIds) { + // remove first the connected edges + connectedEdgeIds.forEach(edgeId => { + this.__workbenchUI.clearEdge(edgeId); + }); + // then remove the node + this.__workbenchUI.clearNode(nodeId); + }, + __removeNode: function(nodeId) { const workbench = this.getStudy().getWorkbench(); const node = workbench.getNode(nodeId); @@ -1162,18 +1222,9 @@ qx.Class.define("osparc.desktop.WorkbenchView", { } }, - __doRemoveNode: async function(nodeId) { + __doRemoveNode: function(nodeId) { const workbench = this.getStudy().getWorkbench(); - const connectedEdges = workbench.getConnectedEdges(nodeId); - const removed = await workbench.removeNode(nodeId); - if (removed) { - // remove first the connected edges - for (let i = 0; i < connectedEdges.length; i++) { - const edgeId = connectedEdges[i]; - this.__workbenchUI.clearEdge(edgeId); - } - this.__workbenchUI.clearNode(nodeId); - } + workbench.removeNode(nodeId); if ([this.__currentNodeId, null].includes(this.__nodesTree.getCurrentNodeId())) { this.nodeSelected(this.getStudy().getUuid()); } diff --git a/services/static-webserver/client/source/class/osparc/desktop/ZoomButtons.js b/services/static-webserver/client/source/class/osparc/desktop/ZoomButtons.js index 78a61438ea1c..2da87a0cddef 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/ZoomButtons.js +++ b/services/static-webserver/client/source/class/osparc/desktop/ZoomButtons.js @@ -28,8 +28,6 @@ * */ -const ZOOM_BUTTON_SIZE = 32; - qx.Class.define("osparc.desktop.ZoomButtons", { extend: qx.ui.toolbar.ToolBar, @@ -52,6 +50,10 @@ qx.Class.define("osparc.desktop.ZoomButtons", { "zoomReset": "qx.event.type.Event" }, + statics: { + ZOOM_BUTTON_SIZE: 32, + }, + members: { __buildLayout: function() { this.add(this.__getZoomOutButton()); @@ -64,9 +66,9 @@ qx.Class.define("osparc.desktop.ZoomButtons", { appearance: "form-button-outlined", padding: [5, 5], marginLeft: 10, - width: ZOOM_BUTTON_SIZE, - height: ZOOM_BUTTON_SIZE, - maxHeight: ZOOM_BUTTON_SIZE + width: this.self().ZOOM_BUTTON_SIZE, + height: this.self().ZOOM_BUTTON_SIZE, + maxHeight: this.self().ZOOM_BUTTON_SIZE, }); if (tooltip) { btn.setToolTipText(tooltip); diff --git a/services/static-webserver/client/source/class/osparc/desktop/account/DeleteAccount.js b/services/static-webserver/client/source/class/osparc/desktop/account/DeleteAccount.js index 8c3d301923e8..f6d1340e084a 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/account/DeleteAccount.js +++ b/services/static-webserver/client/source/class/osparc/desktop/account/DeleteAccount.js @@ -38,8 +38,8 @@ qx.Class.define("osparc.desktop.account.DeleteAccount", { let control = null; switch (id) { case "intro-text": { - const supportEmail = osparc.store.VendorInfo.getInstance().getSupportEmail(); - const retentionDays = osparc.store.StaticInfo.getInstance().getAccountDeletionRetentionDays(); + const supportEmail = osparc.store.VendorInfo.getSupportEmail(); + const retentionDays = osparc.store.StaticInfo.getAccountDeletionRetentionDays(); const text = this.tr(`\ This account will be deleted in ${retentionDays} days.
\ During this period, if you want to recover it or delete your\ @@ -111,7 +111,7 @@ qx.Class.define("osparc.desktop.account.DeleteAccount", { password: form.getItem("password").getValue() } }; - const retentionDays = osparc.store.StaticInfo.getInstance().getAccountDeletionRetentionDays(); + const retentionDays = osparc.store.StaticInfo.getAccountDeletionRetentionDays(); osparc.data.Resources.fetch("auth", "unregister", params) .then(() => { diff --git a/services/static-webserver/client/source/class/osparc/desktop/account/MyAccount.js b/services/static-webserver/client/source/class/osparc/desktop/account/MyAccount.js index a77f891f3287..7c70607500ed 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/account/MyAccount.js +++ b/services/static-webserver/client/source/class/osparc/desktop/account/MyAccount.js @@ -46,19 +46,21 @@ qx.Class.define("osparc.desktop.account.MyAccount", { }, statics: { - createMiniProfileView: function(withSpacer = true) { + createMiniProfileView: function(userData) { const layout = new qx.ui.container.Composite(new qx.ui.layout.VBox(6)).set({ alignX: "center", minWidth: 120, maxWidth: 150 }); - const authData = osparc.auth.Data.getInstance(); - const username = authData.getUsername(); - const email = authData.getEmail(); + if (!userData) { + userData = osparc.auth.Data.getInstance(); + } + const userName = userData.getUserName(); + const email = userData.getEmail(); const avatarSize = 80; const img = new qx.ui.basic.Image().set({ - source: osparc.utils.Avatar.emailToThumbnail(email, username, avatarSize), + source: osparc.utils.Avatar.emailToThumbnail(email, userName, avatarSize), maxWidth: avatarSize, maxHeight: avatarSize, scale: true, @@ -69,27 +71,27 @@ qx.Class.define("osparc.desktop.account.MyAccount", { }); layout.add(img); - const usernameLabel = new qx.ui.basic.Label().set({ + const userNameLabel = new qx.ui.basic.Label().set({ font: "text-14", alignX: "center" }); - authData.bind("username", usernameLabel, "value"); - layout.add(usernameLabel); + userData.bind("userName", userNameLabel, "value"); + layout.add(userNameLabel); const fullNameLabel = new qx.ui.basic.Label().set({ font: "text-13", alignX: "center" }); layout.add(fullNameLabel); - authData.bind("firstName", fullNameLabel, "value", { - converter: () => authData.getFullName() + userData.bind("firstName", fullNameLabel, "value", { + converter: () => userData.getFullName() }); - authData.bind("lastName", fullNameLabel, "value", { - converter: () => authData.getFullName() + userData.bind("lastName", fullNameLabel, "value", { + converter: () => userData.getFullName() }); - if (authData.getRole() !== "user") { - const role = authData.getFriendlyRole(); + if (userData.getRole() !== "user") { + const role = userData.getFriendlyRole(); const roleLabel = new qx.ui.basic.Label(role).set({ font: "text-13", alignX: "center" @@ -97,9 +99,7 @@ qx.Class.define("osparc.desktop.account.MyAccount", { layout.add(roleLabel); } - if (withSpacer) { - layout.add(new qx.ui.core.Spacer(15, 15)); - } + layout.add(new qx.ui.core.Spacer(15, 15)); return layout; } diff --git a/services/static-webserver/client/source/class/osparc/desktop/account/ProfilePage.js b/services/static-webserver/client/source/class/osparc/desktop/account/ProfilePage.js index e522d53975f8..e15647e63946 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/account/ProfilePage.js +++ b/services/static-webserver/client/source/class/osparc/desktop/account/ProfilePage.js @@ -19,7 +19,7 @@ /** * User profile in preferences dialog * - * - first name, last name, username, email + * - first name, last name, userName, email * */ @@ -31,35 +31,67 @@ qx.Class.define("osparc.desktop.account.ProfilePage", { this._setLayout(new qx.ui.layout.VBox(15)); - this.__userProfileData = {}; - this.__userPrivacyData = {}; - - this.__fetchProfile(); - this._add(this.__createProfileUser()); this._add(this.__createPrivacySection()); - if (osparc.store.StaticInfo.getInstance().is2FARequired()) { + if (osparc.store.StaticInfo.is2FARequired()) { this._add(this.__create2FASection()); } this._add(this.__createPasswordSection()); + this._add(this.__createContactSection()); this._add(this.__createDeleteAccount()); + + this.__userProfileData = {}; + this.__userPrivacyData = {}; + + this.__fetchMyProfile(); + }, + + statics: { + PROFILE: { + POS: { + USERNAME: 0, + FIRST_NAME: 1, + LAST_NAME: 2, + EMAIL: 3, + PHONE: 4, + }, + }, + + createSectionBox: function(title) { + const box = new osparc.widget.SectionBox(title).set({ + alignX: "left", + maxWidth: 500 + }); + return box; + }, }, members: { __userProfileData: null, __userProfileModel: null, + __userProfileForm: null, __userProfileRenderer: null, __updateProfileBtn: null, __userPrivacyData: null, __userPrivacyModel: null, + __privacyRenderer: null, __updatePrivacyBtn: null, - __userProfileForm: null, + __sms2FAItem: null, + __personalInfoModel: null, + __personalInfoRenderer: null, + + __fetchMyProfile: function() { + this.__userProfileRenderer.setEnabled(false); + this.__privacyRenderer.setEnabled(false); + this.__personalInfoRenderer.setEnabled(false); - __fetchProfile: function() { osparc.data.Resources.getOne("profile", {}, null, false) .then(profile => { this.__setDataToProfile(profile); this.__setDataToPrivacy(profile["privacy"]); + this.__userProfileRenderer.setEnabled(true); + this.__privacyRenderer.setEnabled(true); + this.__personalInfoRenderer.setEnabled(true); }) .catch(err => console.error(err)); }, @@ -68,48 +100,82 @@ qx.Class.define("osparc.desktop.account.ProfilePage", { if (data) { this.__userProfileData = data; this.__userProfileModel.set({ - "username": data["userName"] || "", + "userName": data["userName"] || "", "firstName": data["first_name"] || "", "lastName": data["last_name"] || "", "email": data["login"], + "phone": data["phone"] || "-", "expirationDate": data["expirationDate"] || null, }); + if (data["contact"]) { + const contact = data["contact"]; + this.__personalInfoModel.set({ + "institution": contact["institution"] || "", + "address": contact["address"] || "", + "city": contact["city"] || "", + "state": contact["state"] || "", + "country": contact["country"] || "", + "postalCode": contact["postalCode"] || "", + }); + } } this.__updateProfileBtn.setEnabled(false); + + if (this.__sms2FAItem) { + this.__sms2FAItem.setEnabled(Boolean(data["phone"])); + } }, __setDataToPrivacy: function(privacyData) { if (privacyData) { this.__userPrivacyData = privacyData; this.__userPrivacyModel.set({ - "hideUsername": "hideUsername" in privacyData ? privacyData["hideUsername"] : false, + "hideUserName": "hideUserName" in privacyData ? privacyData["hideUserName"] : false, "hideFullname": "hideFullname" in privacyData ? privacyData["hideFullname"] : true, "hideEmail": "hideEmail" in privacyData ? privacyData["hideEmail"] : true, }); const visibleIcon = "@FontAwesome5Solid/eye/12"; const hiddenIcon = "@FontAwesome5Solid/eye-slash/12"; - const icons = { - 0: this.__userPrivacyModel.getHideUsername() ? hiddenIcon : visibleIcon, - 1: this.__userPrivacyModel.getHideFullname() ? hiddenIcon : visibleIcon, - 2: this.__userPrivacyModel.getHideFullname() ? hiddenIcon : visibleIcon, - 3: this.__userPrivacyModel.getHideEmail() ? hiddenIcon : visibleIcon, + const createImage = source => { + return new qx.ui.basic.Image(source).set({ + alignX: "center", + alignY: "middle", + }); + } + const pos = this.self().PROFILE.POS; + const widgets = { + [pos.USERNAME]: createImage(this.__userPrivacyModel.getHideUserName() ? hiddenIcon : visibleIcon), + [pos.FIRST_NAME]: createImage(this.__userPrivacyModel.getHideFullname() ? hiddenIcon : visibleIcon), + [pos.LAST_NAME]: createImage(this.__userPrivacyModel.getHideFullname() ? hiddenIcon : visibleIcon), + [pos.EMAIL]: createImage(this.__userPrivacyModel.getHideEmail() ? hiddenIcon : visibleIcon), }; - this.__userProfileRenderer.setIcons(icons); + if (osparc.store.StaticInfo.isUpdatePhoneNumberEnabled()) { + const updatePhoneNumberButton = new qx.ui.form.Button(null, "@FontAwesome5Solid/pencil-alt/12").set({ + padding: [1, 5], + }); + updatePhoneNumberButton.addListener("execute", () => this.__openPhoneNumberUpdater(), this); + widgets[pos.PHONE] = updatePhoneNumberButton; + } + this.__userProfileRenderer.setWidgets(widgets); } this.__updatePrivacyBtn.setEnabled(false); }, + __resetUserData: function() { + this.__setDataToProfile(this.__userProfileData); + }, + + __resetPrivacyData: function() { + this.__setDataToPrivacy(this.__userPrivacyData); + }, + __createProfileUser: function() { // layout - const box = osparc.ui.window.TabbedView.createSectionBox(this.tr("User")); - box.set({ - alignX: "left", - maxWidth: 500 - }); + const box = this.self().createSectionBox(this.tr("User")); - const username = new qx.ui.form.TextField().set({ - placeholder: this.tr("username") + const userName = new qx.ui.form.TextField().set({ + placeholder: this.tr("userName") }); const firstName = new qx.ui.form.TextField().set({ @@ -124,13 +190,21 @@ qx.Class.define("osparc.desktop.account.ProfilePage", { readOnly: true }); + const phoneNumber = new qx.ui.form.TextField().set({ + placeholder: this.tr("Phone Number"), + readOnly: true + }); + const profileForm = this.__userProfileForm = new qx.ui.form.Form(); - profileForm.add(username, "Username", null, "username"); + profileForm.add(userName, "UserName", null, "userName"); profileForm.add(firstName, "First Name", null, "firstName"); profileForm.add(lastName, "Last Name", null, "lastName"); profileForm.add(email, "Email", null, "email"); - const singleWithIcon = this.__userProfileRenderer = new osparc.ui.form.renderer.SingleWithIcon(profileForm); - box.add(singleWithIcon); + if (osparc.store.StaticInfo.is2FARequired()) { + profileForm.add(phoneNumber, "Phone Number", null, "phone"); + } + this.__userProfileRenderer = new osparc.ui.form.renderer.SingleWithWidget(profileForm); + box.add(this.__userProfileRenderer); const expirationLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox(5)).set({ paddingLeft: 16, @@ -144,24 +218,25 @@ qx.Class.define("osparc.desktop.account.ProfilePage", { expirationLayout.add(expirationDate); const infoLabel = this.tr("Please contact us via email:
"); const infoExtension = new osparc.ui.hint.InfoHint(infoLabel); - const supportEmail = osparc.store.VendorInfo.getInstance().getSupportEmail(); + const supportEmail = osparc.store.VendorInfo.getSupportEmail(); infoExtension.setHintText(infoLabel + supportEmail); expirationLayout.add(infoExtension); box.add(expirationLayout); // binding to a model const raw = { - "username": "", + "userName": "", "firstName": "", "lastName": "", "email": "", + "phone": "", "expirationDate": null, }; const model = this.__userProfileModel = qx.data.marshal.Json.createModel(raw); const controller = new qx.data.controller.Object(model); - controller.addTarget(username, "value", "username", true); + controller.addTarget(userName, "value", "userName", true); controller.addTarget(email, "value", "email", true); controller.addTarget(firstName, "value", "firstName", true, null, { converter: function(data) { @@ -169,6 +244,7 @@ qx.Class.define("osparc.desktop.account.ProfilePage", { } }); controller.addTarget(lastName, "value", "lastName", true); + controller.addTarget(phoneNumber, "value", "phone", true); controller.addTarget(expirationDate, "value", "expirationDate", false, { converter: expirationDay => { if (expirationDay) { @@ -203,8 +279,8 @@ qx.Class.define("osparc.desktop.account.ProfilePage", { } const patchData = {}; - if (this.__userProfileData["userName"] !== model.getUsername()) { - patchData["userName"] = model.getUsername(); + if (this.__userProfileData["userName"] !== model.getUserName()) { + patchData["userName"] = model.getUserName(); } if (this.__userProfileData["first_name"] !== model.getFirstName()) { patchData["first_name"] = model.getFirstName(); @@ -234,18 +310,17 @@ qx.Class.define("osparc.desktop.account.ProfilePage", { }); const profileFields = [ - username, + userName, firstName, lastName, ] const valueChanged = () => { const anyChanged = - username.getValue() !== this.__userProfileData["userName"] || + userName.getValue() !== this.__userProfileData["userName"] || firstName.getValue() !== this.__userProfileData["first_name"] || lastName.getValue() !== this.__userProfileData["last_name"]; updateProfileBtn.setEnabled(anyChanged); }; - valueChanged(); profileFields.forEach(privacyField => privacyField.addListener("changeValue", () => valueChanged())); return box; @@ -254,24 +329,18 @@ qx.Class.define("osparc.desktop.account.ProfilePage", { __createPrivacySection: function() { // binding to a model const defaultModel = { - "hideUsername": false, + "hideUserName": false, "hideFullname": true, "hideEmail": true, }; const privacyModel = this.__userPrivacyModel = qx.data.marshal.Json.createModel(defaultModel, true); - const box = osparc.ui.window.TabbedView.createSectionBox(this.tr("Privacy")); - box.set({ - alignX: "left", - maxWidth: 500 - }); + const box = this.self().createSectionBox(this.tr("Privacy")); + box.addHelper(this.tr("Choose what others see.")); - const label = osparc.ui.window.TabbedView.createHelpLabel(this.tr("For Privacy reasons, you might want to hide some personal data.")); - box.add(label); - - const hideUsername = new qx.ui.form.CheckBox().set({ - value: defaultModel.hideUsername + const hideUserName = new qx.ui.form.CheckBox().set({ + value: defaultModel.hideUserName }); const hideFullname = new qx.ui.form.CheckBox().set({ value: defaultModel.hideFullname @@ -281,13 +350,14 @@ qx.Class.define("osparc.desktop.account.ProfilePage", { }); const privacyForm = new qx.ui.form.Form(); - privacyForm.add(hideUsername, "Hide Username", null, "hideUsername"); + privacyForm.add(hideUserName, "Hide UserName", null, "hideUserName"); privacyForm.add(hideFullname, "Hide Full Name", null, "hideFullname"); privacyForm.add(hideEmail, "Hide Email", null, "hideEmail"); - box.add(new qx.ui.form.renderer.Single(privacyForm)); + this.__privacyRenderer = new qx.ui.form.renderer.Single(privacyForm); + box.add(this.__privacyRenderer); const privacyModelCtrl = new qx.data.controller.Object(privacyModel); - privacyModelCtrl.addTarget(hideUsername, "value", "hideUsername", true); + privacyModelCtrl.addTarget(hideUserName, "value", "hideUserName", true); privacyModelCtrl.addTarget(hideFullname, "value", "hideFullname", true); privacyModelCtrl.addTarget(hideEmail, "value", "hideEmail", true); @@ -307,8 +377,8 @@ qx.Class.define("osparc.desktop.account.ProfilePage", { const patchData = { "privacy": {} }; - if (this.__userPrivacyData["hideUsername"] !== privacyModel.getHideUsername()) { - patchData["privacy"]["hideUsername"] = privacyModel.getHideUsername(); + if (this.__userPrivacyData["hideUserName"] !== privacyModel.getHideUserName()) { + patchData["privacy"]["hideUserName"] = privacyModel.getHideUserName(); } if (this.__userPrivacyData["hideFullname"] !== privacyModel.getHideFullname()) { patchData["privacy"]["hideFullname"] = privacyModel.getHideFullname(); @@ -357,13 +427,13 @@ qx.Class.define("osparc.desktop.account.ProfilePage", { box.add(optOutMessage); const privacyFields = [ - hideUsername, + hideUserName, hideFullname, hideEmail, ] const valueChanged = () => { const anyChanged = - hideUsername.getValue() !== this.__userPrivacyData["hideUsername"] || + hideUserName.getValue() !== this.__userPrivacyData["hideUserName"] || hideFullname.getValue() !== this.__userPrivacyData["hideFullname"] || hideEmail.getValue() !== this.__userPrivacyData["hideEmail"]; updatePrivacyBtn.setEnabled(anyChanged); @@ -374,17 +444,14 @@ qx.Class.define("osparc.desktop.account.ProfilePage", { optOutMessage.exclude(); } }; - valueChanged(); privacyFields.forEach(privacyField => privacyField.addListener("changeValue", () => valueChanged())); return box; }, __create2FASection: function() { - const box = osparc.ui.window.TabbedView.createSectionBox(this.tr("Two-Factor Authentication")); - - const label = osparc.ui.window.TabbedView.createHelpLabel(this.tr("Set your preferred method to use for two-factor authentication when signing in:")); - box.add(label); + const box = this.self().createSectionBox(this.tr("Two-Factor Authentication")); + box.addHelper(this.tr("Set your preferred method to use for two-factor authentication when signing in:")); const form = new qx.ui.form.Form(); @@ -404,6 +471,9 @@ qx.Class.define("osparc.desktop.account.ProfilePage", { label: "Disabled" }].forEach(options => { const lItem = new qx.ui.form.ListItem(options.label, null, options.id); + if (options.id === "SMS") { + this.__sms2FAItem = lItem; + } twoFAPreferenceSB.add(lItem); }); const value = preferencesSettings.getTwoFAPreference(); @@ -448,21 +518,9 @@ qx.Class.define("osparc.desktop.account.ProfilePage", { return box; }, - __resetUserData: function() { - this.__setDataToProfile(this.__userProfileData); - }, - - __resetPrivacyData: function() { - this.__setDataToPrivacy(this.__userPrivacyData); - }, - __createPasswordSection: function() { // layout - const box = osparc.ui.window.TabbedView.createSectionBox(this.tr("Password")); - box.set({ - alignX: "left", - maxWidth: 500 - }); + const box = this.self().createSectionBox(this.tr("Password")); const currentPassword = new osparc.ui.form.PasswordField().set({ required: true, @@ -528,13 +586,77 @@ qx.Class.define("osparc.desktop.account.ProfilePage", { return box; }, - __createDeleteAccount: function() { + __createContactSection: function() { // layout - const box = osparc.ui.window.TabbedView.createSectionBox(this.tr("Danger Zone")).set({ - alignX: "left", - maxWidth: 500 + const box = this.self().createSectionBox(this.tr("Contact")); + + const institution = new qx.ui.form.TextField().set({ + placeholder: osparc.product.Utils.getInstitutionAlias().label, + readOnly: true, + }); + + const address = new qx.ui.form.TextField().set({ + placeholder: this.tr("Address"), + readOnly: true, + }); + const city = new qx.ui.form.TextField().set({ + placeholder: this.tr("City"), + readOnly: true, + }); + + const state = new qx.ui.form.TextField().set({ + placeholder: this.tr("State"), + readOnly: true, }); + const country = new qx.ui.form.TextField().set({ + placeholder: this.tr("Country"), + readOnly: true, + }); + + const postalCode = new qx.ui.form.TextField().set({ + placeholder: this.tr("Postal Code"), + readOnly: true, + }); + + const personalInfoForm = new qx.ui.form.Form(); + personalInfoForm.add(institution, osparc.product.Utils.getInstitutionAlias().label, null, "institution"); + personalInfoForm.add(address, this.tr("Address"), null, "address"); + personalInfoForm.add(city, this.tr("City"), null, "city"); + personalInfoForm.add(state, this.tr("State"), null, "state"); + personalInfoForm.add(country, this.tr("Country"), null, "country"); + personalInfoForm.add(postalCode, this.tr("Postal Code"), null, "postalCode"); + this.__personalInfoRenderer = new qx.ui.form.renderer.Single(personalInfoForm); + box.add(this.__personalInfoRenderer); + + // binding to a model + const raw = { + "institution": null, + "address": null, + "city": null, + "state": null, + "country": null, + "postalCode": null, + }; + + const model = this.__personalInfoModel = qx.data.marshal.Json.createModel(raw); + const controller = new qx.data.controller.Object(model); + + controller.addTarget(institution, "value", "institution", true); + controller.addTarget(address, "value", "address", true); + controller.addTarget(city, "value", "city", true); + controller.addTarget(state, "value", "state", true); + controller.addTarget(country, "value", "country", true); + controller.addTarget(postalCode, "value", "postalCode", true); + + return box; + }, + + __createDeleteAccount: function() { + // layout + const box = this.self().createSectionBox(this.tr("Delete Account")); + box.addHelper(this.tr("Request the deletion of your account.")); + const deleteBtn = new qx.ui.form.Button(this.tr("Delete Account")).set({ appearance: "danger-button", alignX: "right", @@ -549,6 +671,24 @@ qx.Class.define("osparc.desktop.account.ProfilePage", { box.add(deleteBtn); return box; - } + }, + + __openPhoneNumberUpdater: function() { + const verifyPhoneNumberView = new osparc.auth.ui.VerifyPhoneNumberView().set({ + userEmail: osparc.auth.Data.getInstance().getEmail(), + updatingNumber: true, + }); + verifyPhoneNumberView.getChildControl("title").exclude(); + verifyPhoneNumberView.getChildControl("send-via-email-button").exclude(); + const win = osparc.ui.window.Window.popUpInWindow(verifyPhoneNumberView, this.tr("Update Phone Number"), 330, 135).set({ + clickAwayClose: false, + resizable: false, + showClose: true + }); + verifyPhoneNumberView.addListener("done", () => { + win.close(); + this.__fetchMyProfile(); + }, this); + }, } }); diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/BuyCreditsStepper.js b/services/static-webserver/client/source/class/osparc/desktop/credits/BuyCreditsStepper.js index 9789fd0fbc64..557e862df0b3 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/BuyCreditsStepper.js +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/BuyCreditsStepper.js @@ -68,7 +68,7 @@ qx.Class.define("osparc.desktop.credits.BuyCreditsStepper", { const { paymentId, paymentFormUrl } = data; this.setPaymentId(paymentId) this.__iframe = new qx.ui.embed.Iframe(paymentFormUrl).set({ - decorator: "no-border-2" + decorator: "no-border-0" }); this.add(this.__iframe); this.setSelection([this.__iframe]) diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsIndicatorButton.js b/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsIndicatorButton.js index 25ae3083b791..c5e88e283ee7 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsIndicatorButton.js +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsIndicatorButton.js @@ -23,11 +23,6 @@ qx.Class.define("osparc.desktop.credits.CreditsIndicatorButton", { osparc.utils.Utils.setIdToWidget(this, "creditsIndicatorButton"); - this.set({ - cursor: "pointer", - padding: [3, 8] - }); - this.getChildControl("image").set({ width: 24, height: 24 @@ -64,16 +59,7 @@ qx.Class.define("osparc.desktop.credits.CreditsIndicatorButton", { }, __positionCreditsContainer: function() { - const bounds = this.getBounds(); - const cel = this.getContentElement(); - if (cel) { - const domEle = cel.getDomElement(); - if (domEle) { - const rect = domEle.getBoundingClientRect(); - bounds.left = parseInt(rect.x); - bounds.top = parseInt(rect.y); - } - } + const bounds = osparc.utils.Utils.getBounds(this); const bottom = bounds.top + bounds.height; const right = bounds.left + bounds.width; this.__creditsContainer.setPosition(right, bottom); @@ -84,9 +70,8 @@ qx.Class.define("osparc.desktop.credits.CreditsIndicatorButton", { }, __handleOutsideEvent: function(event) { - const offset = 0; - const onContainer = osparc.utils.Utils.isMouseOnElement(this.__creditsContainer, event, offset); - const onButton = osparc.utils.Utils.isMouseOnElement(this, event, offset); + const onContainer = osparc.utils.Utils.isMouseOnElement(this.__creditsContainer, event); + const onButton = osparc.utils.Utils.isMouseOnElement(this, event); if (!onContainer && !onButton) { this.__hideCreditsContainer(); } diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsSummary.js b/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsSummary.js index b6fa37b07acd..6d59821089e9 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsSummary.js +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/CreditsSummary.js @@ -46,7 +46,7 @@ qx.Class.define("osparc.desktop.credits.CreditsSummary", { WIDTH: 350, TIME_RANGES: [{ key: 1, - label: "Today" + label: "Last 24h" }, { key: 7, label: "Last week" @@ -116,8 +116,8 @@ qx.Class.define("osparc.desktop.credits.CreditsSummary", { const trItem = new qx.ui.form.ListItem(tr.label, null, tr.key); control.add(trItem); }); - // default one week - const found = control.getSelectables().find(trItem => trItem.getModel() === 7); + // default last 24h + const found = control.getSelectables().find(trItem => trItem.getModel() === 1); if (found) { control.setSelection([found]); } diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/DateFilters.js b/services/static-webserver/client/source/class/osparc/desktop/credits/DateFilters.js index b661a16b45f6..35d5e9d8dff1 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/DateFilters.js +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/DateFilters.js @@ -21,11 +21,25 @@ qx.Class.define("osparc.desktop.credits.DateFilters", { members: { _buildLayout() { this._removeAll(); - const defaultFrom = new Date() - defaultFrom.setMonth(defaultFrom.getMonth() - 1) - // Range defaults to previous month + + // Range defaults: today + const defaultFrom = new Date(); + const defaultTo = new Date(); + this.__from = this.__addDateInput("From", defaultFrom); - this.__until = this.__addDateInput("Until"); + this.__until = this.__addDateInput("Until", defaultTo); + + const todayBtn = new qx.ui.form.Button("Today").set({ + allowStretchY: false, + alignY: "bottom" + }); + todayBtn.addListener("execute", () => { + const today = new Date(); + this.__from.setValue(today); + this.__until.setValue(today); + }); + this._add(todayBtn); + const lastWeekBtn = new qx.ui.form.Button("Last week").set({ allowStretchY: false, alignY: "bottom" @@ -38,6 +52,7 @@ qx.Class.define("osparc.desktop.credits.DateFilters", { this.__until.setValue(today); }); this._add(lastWeekBtn); + const lastMonthBtn = new qx.ui.form.Button("Last month").set({ allowStretchY: false, alignY: "bottom" @@ -50,6 +65,7 @@ qx.Class.define("osparc.desktop.credits.DateFilters", { this.__until.setValue(today); }); this._add(lastMonthBtn); + const lastYearBtn = new qx.ui.form.Button("Last year").set({ allowStretchY: false, alignY: "bottom" diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/PaymentGatewayWindow.js b/services/static-webserver/client/source/class/osparc/desktop/credits/PaymentGatewayWindow.js index b50a0ec75c81..4189c99410cb 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/PaymentGatewayWindow.js +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/PaymentGatewayWindow.js @@ -21,7 +21,7 @@ qx.Class.define("osparc.desktop.credits.PaymentGatewayWindow", { statics: { popUp: function(url, title, options) { const iframe = new qx.ui.embed.Iframe(url).set({ - decorator: "no-border-2" + decorator: "no-border-0" }) return osparc.ui.window.Window.popUpInWindow(iframe, title, options.width, options.height).set({ clickAwayClose: false diff --git a/services/static-webserver/client/source/class/osparc/desktop/credits/Utils.js b/services/static-webserver/client/source/class/osparc/desktop/credits/Utils.js index a797f06a79aa..d9a4c93ec810 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/credits/Utils.js +++ b/services/static-webserver/client/source/class/osparc/desktop/credits/Utils.js @@ -50,10 +50,8 @@ qx.Class.define("osparc.desktop.credits.Utils", { }; }, - areWalletsEnabled: function() { - const statics = osparc.store.Store.getInstance().get("statics"); - return Boolean(statics && statics["isPaymentEnabled"]); + return Boolean(osparc.store.StaticInfo.getValue("isPaymentEnabled")); }, getNoWriteAccessInformationLabel: function() { diff --git a/services/static-webserver/client/source/class/osparc/desktop/organizations/MembersList.js b/services/static-webserver/client/source/class/osparc/desktop/organizations/MembersList.js index a155551492cc..4fb63a41a05c 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/organizations/MembersList.js +++ b/services/static-webserver/client/source/class/osparc/desktop/organizations/MembersList.js @@ -110,7 +110,7 @@ qx.Class.define("osparc.desktop.organizations.MembersList", { .then(values => { values.forEach(user => { if (user) { - this.__addMember(user.getUsername()); + this.__addMember(user.getUserName()); } }); }) @@ -158,10 +158,7 @@ qx.Class.define("osparc.desktop.organizations.MembersList", { }, configureItem: item => { item.subscribeToFilterGroup("organizationMembersList"); - item.getChildControl("thumbnail").getContentElement() - .setStyles({ - "border-radius": "16px" - }); + item.getChildControl("thumbnail").setDecorator("circled"); item.addListener("promoteToMember", e => { const listedMember = e.getData(); this.__promoteToUser(listedMember); @@ -328,8 +325,10 @@ qx.Class.define("osparc.desktop.organizations.MembersList", { const readAccessRole = osparc.data.Roles.ORG["read"]; const newAccessRights = readAccessRole.accessRights; + const orgId = this.__currentOrg.getGroupId(); + const userId = "id" in listedMember ? listedMember["id"] : listedMember["key"]; const groupsStore = osparc.store.Groups.getInstance(); - groupsStore.patchMember(this.__currentOrg.getGroupId(), listedMember["id"], newAccessRights) + groupsStore.patchAccessRights(orgId, userId, newAccessRights) .then(() => { osparc.FlashMessenger.logAs(this.tr(`Successfully promoted to ${readAccessRole.label}`)); this.__reloadOrgMembers(); @@ -348,7 +347,7 @@ qx.Class.define("osparc.desktop.organizations.MembersList", { const noReadAccessRole = osparc.data.Roles.ORG["noRead"]; const newAccessRights = noReadAccessRole.accessRights; const orgId = this.__currentOrg.getGroupId(); - const userId = "id" in listedMember ? listedMember["id"] : listedMember["key"] + const userId = "id" in listedMember ? listedMember["id"] : listedMember["key"]; const groupsStore = osparc.store.Groups.getInstance(); groupsStore.patchAccessRights(orgId, userId, newAccessRights) .then(() => { diff --git a/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationsList.js b/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationsList.js index dd488f9b3287..82abb9e17120 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationsList.js +++ b/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationsList.js @@ -69,6 +69,11 @@ qx.Class.define("osparc.desktop.organizations.OrganizationsList", { statics: { sortOrganizations: function(a, b) { + const collabTypeOrder = osparc.store.Groups.COLLAB_TYPE_ORDER; + const typeDiff = collabTypeOrder.indexOf(a.getGroupType()) - collabTypeOrder.indexOf(b.getGroupType()); + if (typeDiff !== 0) { + return typeDiff; + } const sorted = osparc.share.Collaborators.sortByAccessRights(a.getAccessRights(), b.getAccessRights()); if (sorted !== 0) { return sorted; @@ -84,7 +89,7 @@ qx.Class.define("osparc.desktop.organizations.OrganizationsList", { getOrgModel: function(orgId) { let org = null; this.__orgsModel.forEach(orgModel => { - if (orgModel.getGroupId() === parseInt(orgId)) { + if ("getGroupId" in orgModel && orgModel.getGroupId() === parseInt(orgId)) { org = orgModel; } }); @@ -141,15 +146,15 @@ qx.Class.define("osparc.desktop.organizations.OrganizationsList", { ctrl.bindProperty("description", "subtitle", null, item, id); ctrl.bindProperty("groupMembers", "groupMembers", null, item, id); ctrl.bindProperty("accessRights", "accessRights", null, item, id); + // handle separator + ctrl.bindProperty("isSeparator", "enabled", { + converter: val => !val // disable clicks on separator + }, item, id); }, configureItem: item => { item.subscribeToFilterGroup("organizationsList"); osparc.utils.Utils.setIdToWidget(item, "organizationListItem"); - const thumbnail = item.getChildControl("thumbnail"); - thumbnail.getContentElement() - .setStyles({ - "border-radius": "16px" - }); + item.getChildControl("thumbnail").setDecorator("circled"); item.addListener("openEditOrganization", e => { const orgKey = e.getData(); @@ -160,6 +165,15 @@ qx.Class.define("osparc.desktop.organizations.OrganizationsList", { const orgKey = e.getData(); this.__deleteOrganization(orgKey); }); + item.addListener("changeEnabled", e => { + if (!e.getData()) { + item.set({ + minHeight: 1, + maxHeight: 1, + decorator: "separator-strong", + }); + } + }); } }); @@ -184,7 +198,28 @@ qx.Class.define("osparc.desktop.organizations.OrganizationsList", { const groupsStore = osparc.store.Groups.getInstance(); const orgs = Object.values(groupsStore.getOrganizations()); orgs.sort(this.self().sortOrganizations); - orgs.forEach(org => orgsModel.append(org)); + + // insert a separator between product and non-product groups + const productGroup = [ + osparc.store.Groups.COLLAB_TYPE.EVERYONE, + osparc.store.Groups.COLLAB_TYPE.SUPPORT, + ]; + const hasProductGroup = orgs.some(org => productGroup.includes(org.getGroupType())); + const hasNonProductGroup = orgs.some(org => !productGroup.includes(org.getGroupType())); + let separatorInserted = false; + orgs.forEach(org => { + const isProductGroup = productGroup.includes(org.getGroupType()); + // Only insert separator if both sides exist + if (!isProductGroup && hasProductGroup && hasNonProductGroup && !separatorInserted) { + const separator = { + isSeparator: true + }; + orgsModel.append(qx.data.marshal.Json.createModel(separator)); + separatorInserted = true; + } + orgsModel.append(org); + }); + this.setOrganizationsLoaded(true); if (orgId) { this.fireDataEvent("organizationSelected", orgId); diff --git a/services/static-webserver/client/source/class/osparc/desktop/organizations/ServicesList.js b/services/static-webserver/client/source/class/osparc/desktop/organizations/ServicesList.js index 2265026c55a5..492d6b6ea4c9 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/organizations/ServicesList.js +++ b/services/static-webserver/client/source/class/osparc/desktop/organizations/ServicesList.js @@ -92,10 +92,12 @@ qx.Class.define("osparc.desktop.organizations.ServicesList", { .then(serviceData => { if (serviceData) { serviceData["resourceType"] = "service"; - const resourceDetails = new osparc.dashboard.ResourceDetails(serviceData).set({ + const { + resourceDetails, + } = osparc.dashboard.ResourceDetails.popUpInWindow(serviceData); + resourceDetails.set({ showOpenButton: false }); - osparc.dashboard.ResourceDetails.popUpInWindow(resourceDetails); } }); }); @@ -120,7 +122,7 @@ qx.Class.define("osparc.desktop.organizations.ServicesList", { const orgServices = []; Object.keys(servicesLatest).forEach(key => { const serviceLatest = servicesLatest[key]; - if (groupId in serviceLatest["accessRights"]) { + if (serviceLatest["accessRights"] && groupId in serviceLatest["accessRights"]) { orgServices.push(serviceLatest); } }); diff --git a/services/static-webserver/client/source/class/osparc/desktop/organizations/TutorialsList.js b/services/static-webserver/client/source/class/osparc/desktop/organizations/TutorialsList.js index 6636b4bd918e..c735aefe30a1 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/organizations/TutorialsList.js +++ b/services/static-webserver/client/source/class/osparc/desktop/organizations/TutorialsList.js @@ -90,10 +90,12 @@ qx.Class.define("osparc.desktop.organizations.TutorialsList", { .then(templateData => { if (templateData) { templateData["resourceType"] = "tutorial"; - const resourceDetails = new osparc.dashboard.ResourceDetails(templateData).set({ + const { + resourceDetails, + } = osparc.dashboard.ResourceDetails.popUpInWindow(templateData); + resourceDetails.set({ showOpenButton: false }); - osparc.dashboard.ResourceDetails.popUpInWindow(resourceDetails); } }); }); diff --git a/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/ConfirmationsPage.js b/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/ConfirmationsPage.js index c883c6bd37f0..3181f39ebb03 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/ConfirmationsPage.js +++ b/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/ConfirmationsPage.js @@ -39,7 +39,9 @@ qx.Class.define("osparc.desktop.preferences.pages.ConfirmationsPage", { members: { __createConfirmationsSettings: function() { // layout - const label = osparc.ui.window.TabbedView.createHelpLabel(this.tr("Ask for confirmation for the following actions:")); + const label = new qx.ui.basic.Label(this.tr("Ask for confirmation for the following actions:")).set({ + font: "text-13", + }); this._add(label); this._add(new qx.ui.core.Spacer(null, 10)); @@ -125,12 +127,8 @@ qx.Class.define("osparc.desktop.preferences.pages.ConfirmationsPage", { __createExperimentalSettings: function() { // layout - const box = osparc.ui.window.TabbedView.createSectionBox("Experimental preferences"); - - const label = osparc.ui.window.TabbedView.createHelpLabel(this.tr( - "This is a list of experimental preferences" - )); - box.add(label); + const box = new osparc.widget.SectionBox("Experimental preferences"); + box.addHelper(this.tr("This is a list of experimental preferences")); const preferencesSettings = osparc.Preferences.getInstance(); diff --git a/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/GeneralPage.js b/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/GeneralPage.js index 6f02c5a65b96..d464c861d195 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/GeneralPage.js +++ b/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/GeneralPage.js @@ -46,7 +46,7 @@ qx.Class.define("osparc.desktop.preferences.pages.GeneralPage", { members: { __addCreditsIndicatorSettings: function() { - const box = osparc.ui.window.TabbedView.createSectionBox(this.tr("Credits Indicator")); + const box = new osparc.widget.SectionBox(this.tr("Credits Indicator")); const form = new qx.ui.form.Form(); @@ -93,10 +93,9 @@ qx.Class.define("osparc.desktop.preferences.pages.GeneralPage", { }, __addInactivitySetting: function() { - const box = osparc.ui.window.TabbedView.createSectionBox(this.tr("Automatic Shutdown of Idle Instances")); + const box = new osparc.widget.SectionBox(this.tr("Automatic Shutdown of Idle Instances")); - const label = osparc.ui.window.TabbedView.createHelpLabel(this.tr("Enter 0 to disable this function"), "text-13-italic"); - box.add(label); + box.addHelper(this.tr("Enter 0 to disable this function")); const form = new qx.ui.form.Form(); const inactivitySpinner = new qx.ui.form.Spinner().set({ @@ -118,7 +117,7 @@ qx.Class.define("osparc.desktop.preferences.pages.GeneralPage", { }, __addJobConcurrencySetting: function() { - const box = osparc.ui.window.TabbedView.createSectionBox(this.tr("Job Concurrency")); + const box = new osparc.widget.SectionBox(this.tr("Job Concurrency")); const form = new qx.ui.form.Form(); const jobConcurrencySpinner = new qx.ui.form.Spinner().set({ minimum: 1, @@ -136,9 +135,9 @@ qx.Class.define("osparc.desktop.preferences.pages.GeneralPage", { }, __addLowDiskSpaceSetting: function() { - const box = osparc.ui.window.TabbedView.createSectionBox(this.tr("Low Disk Space Threshold")); - const label = osparc.ui.window.TabbedView.createHelpLabel(this.tr("Set the warning Threshold for Low Disk Space availability"), "text-13-italic"); - box.add(label); + const box = new osparc.widget.SectionBox(this.tr("Low Disk Space Threshold")); + box.addHelper(this.tr("Set the warning Threshold for Low Disk Space availability")); + const form = new qx.ui.form.Form(); const diskUsageSpinner = new qx.ui.form.Spinner().set({ minimum: 1, @@ -157,10 +156,8 @@ qx.Class.define("osparc.desktop.preferences.pages.GeneralPage", { }, __addS4LUserPrivacySettings: function() { - const box = osparc.ui.window.TabbedView.createSectionBox("Privacy Settings"); - - const label = osparc.ui.window.TabbedView.createHelpLabel(this.tr("Help us improve Sim4Life user experience"), "text-13-italic"); - box.add(label); + const box = new osparc.widget.SectionBox("Privacy Settings"); + box.addHelper(this.tr("Help us improve Sim4Life user experience")); const preferencesSettings = osparc.Preferences.getInstance(); diff --git a/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/TagsPage.js b/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/TagsPage.js index 1ac154c10759..76e0bcf3b219 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/TagsPage.js +++ b/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/TagsPage.js @@ -21,7 +21,9 @@ qx.Class.define("osparc.desktop.preferences.pages.TagsPage", { const msg = this.tr("\ Tags help you organize the ") + studiesLabel + this.tr(" in the Dashboard by categorizing topics, making it easier to search and filter. \ Once the tags are created, they can be assigned to the ") + studyLabel + this.tr(" via 'More options...' on the ") + studyLabel + this.tr(" cards."); - const intro = osparc.ui.window.TabbedView.createHelpLabel(msg); + const intro = new qx.ui.basic.Label(msg).set({ + font: "text-13", + }); this._add(intro); this.__renderLayout(); diff --git a/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/TokensPage.js b/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/TokensPage.js index b22542d6d4d6..98ccab426f4c 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/TokensPage.js +++ b/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/TokensPage.js @@ -43,12 +43,8 @@ qx.Class.define("osparc.desktop.preferences.pages.TokensPage", { __createAPIKeysSection: function() { // layout - const box = osparc.ui.window.TabbedView.createSectionBox(this.tr("API Keys")); - - const label = osparc.ui.window.TabbedView.createHelpLabel(this.tr( - "List API keys associated to your account." - )); - box.add(label); + const box = new osparc.widget.SectionBox(this.tr("API Keys")); + box.addHelper(this.tr("List API keys associated to your account.")); const apiKeysList = this.__apiKeysList = new qx.ui.container.Composite(new qx.ui.layout.VBox(8)); box.add(apiKeysList); @@ -173,10 +169,8 @@ qx.Class.define("osparc.desktop.preferences.pages.TokensPage", { __createTokensSection: function() { // layout - const box = osparc.ui.window.TabbedView.createSectionBox(this.tr("API Tokens for External Services")); - - const label = osparc.ui.window.TabbedView.createHelpLabel(this.tr("Provide the API tokens needed to access external services.")); - box.add(label); + const box = new osparc.widget.SectionBox(this.tr("API Tokens for External Services")); + box.addHelper(this.tr("Provide the API tokens needed to access external services.")); const validTokensGB = this.__validTokensGB = osparc.ui.window.TabbedView.createSectionBox(this.tr("Current Tokens")); box.add(validTokensGB); diff --git a/services/static-webserver/client/source/class/osparc/desktop/preferences/window/ShowAPIKey.js b/services/static-webserver/client/source/class/osparc/desktop/preferences/window/ShowAPIKey.js index 8154c1f62962..d75fde8a1f96 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/preferences/window/ShowAPIKey.js +++ b/services/static-webserver/client/source/class/osparc/desktop/preferences/window/ShowAPIKey.js @@ -76,7 +76,7 @@ qx.Class.define("osparc.desktop.preferences.window.ShowAPIKey", { const hBox = this.__createEntry(title); if (label) { // partially hide the key and secret - hBox.getChildren()[1].setValue(label.substring(1, 8) + "****") + hBox.getChildren()[1].setValue(label.substring(0, 8) + "****") } return hBox; }, diff --git a/services/static-webserver/client/source/class/osparc/desktop/wallets/MembersList.js b/services/static-webserver/client/source/class/osparc/desktop/wallets/MembersList.js index a38b322b5a48..3cf57a6ce6aa 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/wallets/MembersList.js +++ b/services/static-webserver/client/source/class/osparc/desktop/wallets/MembersList.js @@ -146,10 +146,7 @@ qx.Class.define("osparc.desktop.wallets.MembersList", { }, configureItem: item => { item.subscribeToFilterGroup("walletMembersList"); - item.getChildControl("thumbnail").getContentElement() - .setStyles({ - "border-radius": "16px" - }); + item.getChildControl("thumbnail").setDecorator("circled"); item.addListener("promoteToAccountant", e => { const listedMember = e.getData(); this.__promoteToAccountant(listedMember); diff --git a/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletListItem.js b/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletListItem.js index 5fdf8fe6c2bf..fc6750140763 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletListItem.js +++ b/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletListItem.js @@ -153,7 +153,7 @@ qx.Class.define("osparc.desktop.wallets.WalletListItem", { rowSpan: 2 }); break; - case "favourite-button": + case "preferred-button": control = new qx.ui.form.Button().set({ iconPosition: "right", width: 110, // make Primary and Secondary buttons same width @@ -326,8 +326,11 @@ qx.Class.define("osparc.desktop.wallets.WalletListItem", { }, __applyPreferredWallet: function(isPreferredWallet) { - const favouriteButton = this.getChildControl("favourite-button"); - favouriteButton.setBackgroundColor("transparent"); + const favouriteButton = this.getChildControl("preferred-button"); + favouriteButton.set({ + backgroundColor: "transparent", + width: 60, + }); const favouriteButtonIcon = favouriteButton.getChildControl("icon"); if (isPreferredWallet) { favouriteButton.set({ diff --git a/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletsList.js b/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletsList.js index 5ed1afa1372f..57c069388d79 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletsList.js +++ b/services/static-webserver/client/source/class/osparc/desktop/wallets/WalletsList.js @@ -229,8 +229,8 @@ qx.Class.define("osparc.desktop.wallets.WalletsList", { flex: 1 }); if (showCurrently) { - const selectColumn = new qx.ui.basic.Label(this.tr("Currently in use")).set({ - marginRight: 18 + const selectColumn = new qx.ui.basic.Label(this.tr("Preferred")).set({ + marginRight: 8 // align it with the "preferred-button" }); header.add(selectColumn) } diff --git a/services/static-webserver/client/source/class/osparc/editor/AnnotationEditor.js b/services/static-webserver/client/source/class/osparc/editor/AnnotationEditor.js index f62e7b8ef1e7..c50fba4456e4 100644 --- a/services/static-webserver/client/source/class/osparc/editor/AnnotationEditor.js +++ b/services/static-webserver/client/source/class/osparc/editor/AnnotationEditor.js @@ -129,26 +129,28 @@ qx.Class.define("osparc.editor.AnnotationEditor", { return; } + const annotationTypes = osparc.workbench.Annotation.TYPES; + const attrs = annotation.getAttributes(); - if (annotation.getType() === "text") { + if (annotation.getType() === annotationTypes.TEXT) { const textField = this.getChildControl("text-field").set({ value: attrs.text }); textField.addListener("changeValue", e => annotation.setText(e.getData())); - } else if (annotation.getType() === "note") { + } else if (annotation.getType() === annotationTypes.NOTE) { const textArea = this.getChildControl("text-area").set({ value: attrs.text }); textArea.addListener("changeValue", e => annotation.setText(e.getData())); } - if (["text", "rect"].includes(annotation.getType())) { + if ([annotationTypes.TEXT, annotationTypes.RECT].includes(annotation.getType())) { const colorPicker = this.getChildControl("color-picker"); annotation.bind("color", colorPicker, "value"); colorPicker.bind("value", annotation, "color"); } - if (annotation.getType() === "text") { + if (annotation.getType() === annotationTypes.TEXT) { const fontSizeField = this.getChildControl("font-size").set({ value: attrs.fontSize }) @@ -162,8 +164,10 @@ qx.Class.define("osparc.editor.AnnotationEditor", { } const colorPicker = this.getChildControl("color-picker"); - marker.bind("color", colorPicker, "color"); - colorPicker.bind("color", marker, "color"); + marker.bind("color", colorPicker, "value"); + colorPicker.bind("value", marker, "color"); + + this.getChildControl("delete-btn").exclude(); }, addDeleteButton: function() { diff --git a/services/static-webserver/client/source/class/osparc/editor/AnnotationNoteCreator.js b/services/static-webserver/client/source/class/osparc/editor/AnnotationNoteCreator.js index 4d3f31c15ee2..da44144e674a 100644 --- a/services/static-webserver/client/source/class/osparc/editor/AnnotationNoteCreator.js +++ b/services/static-webserver/client/source/class/osparc/editor/AnnotationNoteCreator.js @@ -85,54 +85,7 @@ qx.Class.define("osparc.editor.AnnotationNoteCreator", { control = new qx.ui.form.Button(this.tr("Select recipient")).set({ allowGrowX: false }); - control.addListener("execute", () => { - const currentStudy = osparc.store.Store.getInstance().getCurrentStudy().serialize(); - currentStudy["resourceType"] = "study"; - const recipientsManager = new osparc.share.NewCollaboratorsManager(currentStudy, false, false); - recipientsManager.setCaption("Recipient"); - recipientsManager.getActionButton().setLabel(this.tr("Add")); - recipientsManager.addListener("addCollaborators", e => { - const data = e.getData(); - const recipientGids = data["selectedGids"]; - - if (recipientGids && recipientGids.length) { - const recipientGid = parseInt(recipientGids[0]); - this.__setRecipientGid(recipientGid); - recipientsManager.close(); - - const currentAccessRights = this.__study.getAccessRights(); - const proposeSharing = []; - if (!(parseInt(recipientGid) in currentAccessRights)) { - proposeSharing.push(recipientGid); - } - if (proposeSharing.length) { - const collaboratorsManager = new osparc.share.NewCollaboratorsManager(currentStudy, false, true, proposeSharing); - collaboratorsManager.addListener("addCollaborators", ev => { - const { - selectedGids, - newAccessRights, - } = ev.getData(); - const newCollaborators = {}; - selectedGids.forEach(gid => { - newCollaborators[gid] = newAccessRights; - }); - const studyData = this.__study.serialize(); - osparc.store.Study.addCollaborators(studyData, newCollaborators) - .then(() => { - const potentialCollaborators = osparc.store.Groups.getInstance().getPotentialCollaborators() - selectedGids.forEach(gid => { - if (gid in potentialCollaborators && "getUserId" in potentialCollaborators[gid]) { - const uid = potentialCollaborators[gid].getUserId(); - osparc.notification.Notifications.postNewStudy(uid, studyData["uuid"]); - } - }); - }) - .finally(() => collaboratorsManager.close()); - }); - } - } - }, this); - }, this); + control.addListener("execute", () => this.__selectRecipientTapped(), this); this.getChildControl("recipient-layout").add(control); break; case "selected-recipient": @@ -178,6 +131,59 @@ qx.Class.define("osparc.editor.AnnotationNoteCreator", { return control || this.base(arguments, id); }, + __selectRecipientTapped: function() { + const currentStudyData = osparc.store.Store.getInstance().getCurrentStudy().serialize(); + currentStudyData["resourceType"] = "study"; + const usersManager = new osparc.share.NewCollaboratorsManager(currentStudyData, false, false).set({ + acceptOnlyOne: true, + }); + usersManager.setCaption("Recipient"); + usersManager.getActionButton().setLabel(this.tr("Add")); + usersManager.addListener("addCollaborators", e => { + usersManager.close(); + const data = e.getData(); + const userGids = data["selectedGids"]; + if (userGids && userGids.length) { + const userGid = parseInt(userGids[0]); + this.__recipientSelected(userGid); + } + }, this); + }, + + __recipientSelected: function(userGid) { + const currentAccessRights = this.__study.getAccessRights(); + if (userGid in currentAccessRights) { + this.__setRecipientGid(userGid); + } else { + const msg = this.tr("This user has no access to the project. Do you want to share it?"); + const win = new osparc.ui.window.Confirmation(msg).set({ + caption: this.tr("Share"), + confirmText: this.tr("Share"), + confirmAction: "create" + }); + win.center(); + win.open(); + win.addListener("close", () => { + if (win.getConfirmed()) { + const newCollaborators = { + [userGid]: osparc.data.Roles.STUDY["write"].accessRights + }; + const currentStudyData = osparc.store.Store.getInstance().getCurrentStudy().serialize(); + osparc.store.Study.getInstance().addCollaborators(currentStudyData, newCollaborators) + .then(() => { + this.__setRecipientGid(userGid); + const potentialCollaborators = osparc.store.Groups.getInstance().getPotentialCollaborators() + if (userGid in potentialCollaborators && "getUserId" in potentialCollaborators[userGid]) { + const uid = potentialCollaborators[userGid].getUserId(); + osparc.notification.Notifications.pushStudyShared(uid, currentStudyData["uuid"]); + } + }) + .finally(() => collaboratorsManager.close()); + } + }); + } + }, + __setRecipientGid: function(gid) { this.setRecipientGid(gid); // only users were proposed diff --git a/services/static-webserver/client/source/class/osparc/editor/HtmlEditor.js b/services/static-webserver/client/source/class/osparc/editor/HtmlEditor.js deleted file mode 100644 index 21b09135f4dc..000000000000 --- a/services/static-webserver/client/source/class/osparc/editor/HtmlEditor.js +++ /dev/null @@ -1,61 +0,0 @@ -/* ************************************************************************ - - osparc - the simcore frontend - - https://osparc.io - - Copyright: - 2023 IT'IS Foundation, https://itis.swiss - - License: - MIT: https://opensource.org/licenses/MIT - - Authors: - * Odei Maiz (odeimaiz) - -************************************************************************ */ - -qx.Class.define("osparc.editor.HtmlEditor", { - extend: osparc.editor.TextEditor, - - /** - * @param initText {String} Initialization text - */ - construct: function(initText = "") { - this.base(arguments, initText); - - this.getChildControl("preview-html"); - this.getChildControl("subtitle").set({ - value: this.tr("Supports HTML"), - url: "https://en.wikipedia.org/wiki/HTML", - }); - }, - - members: { - _createChildControlImpl: function(id) { - let control; - switch (id) { - case "preview-html": { - control = new qx.ui.embed.Html(); - const textArea = this.getChildControl("text-area"); - textArea.bind("value", control, "html"); - const tabs = this.getChildControl("tabs"); - const previewPage = new qx.ui.tabview.Page(this.tr("Preview")).set({ - layout: new qx.ui.layout.VBox(5) - }); - previewPage.getChildControl("button").getChildControl("label").set({ - font: "text-13" - }); - const scrollContainer = new qx.ui.container.Scroll(); - scrollContainer.add(control); - previewPage.add(scrollContainer, { - flex: 1 - }); - tabs.add(previewPage); - break; - } - } - return control || this.base(arguments, id); - } - } -}); diff --git a/services/static-webserver/client/source/class/osparc/editor/MarkdownEditor.js b/services/static-webserver/client/source/class/osparc/editor/MarkdownEditor.js index 0aa96f7781c4..8b12fa66a815 100644 --- a/services/static-webserver/client/source/class/osparc/editor/MarkdownEditor.js +++ b/services/static-webserver/client/source/class/osparc/editor/MarkdownEditor.js @@ -34,6 +34,20 @@ qx.Class.define("osparc.editor.MarkdownEditor", { value: this.tr("Markdown supported"), url: "https://en.wikipedia.org/wiki/Markdown", }); + + this.getChildControl("text-area").set({ + minimalLineHeight: 2, // defaults to 4 lines + maxHeight: 100, // 5 lines + autoSize: true, + }); + }, + + properties: { + compact: { + check: "Boolean", + init: null, + apply: "__applyCompact", + } }, members: { @@ -63,6 +77,12 @@ qx.Class.define("osparc.editor.MarkdownEditor", { } } return control || this.base(arguments, id); - } + }, + + __applyCompact: function(value) { + this.getChildControl("buttons").setVisibility(value ? "excluded" : "visible"); + this.getChildControl("tabs").getChildControl("bar").setVisibility(value ? "excluded" : "visible"); + this.getChildControl("subtitle").setVisibility(value ? "excluded" : "visible"); + }, } }); diff --git a/services/static-webserver/client/source/class/osparc/editor/TextEditor.js b/services/static-webserver/client/source/class/osparc/editor/TextEditor.js index 1acf44f50b52..277ea3786b42 100644 --- a/services/static-webserver/client/source/class/osparc/editor/TextEditor.js +++ b/services/static-webserver/client/source/class/osparc/editor/TextEditor.js @@ -32,6 +32,15 @@ qx.Class.define("osparc.editor.TextEditor", { } this.__addButtons(); + + this.addListener("keydown", e => { + if (e.isCtrlPressed() && e.getKeyIdentifier() === "Enter") { + const text = this.getChildControl("text-area").getValue(); + this.fireDataEvent("textChanged", text); + e.stopPropagation(); + e.preventDefault(); + } + }, this); }, events: { diff --git a/services/static-webserver/client/source/class/osparc/file/FileDrop.js b/services/static-webserver/client/source/class/osparc/file/FileDrop.js index 92b4f15206c9..6411e33276dd 100644 --- a/services/static-webserver/client/source/class/osparc/file/FileDrop.js +++ b/services/static-webserver/client/source/class/osparc/file/FileDrop.js @@ -256,9 +256,9 @@ qx.Class.define("osparc.file.FileDrop", { this._add(this.__dropMe); const svgLayer = this.__svgLayer; if (svgLayer.getReady()) { - this.__dropMe.rect = svgLayer.drawDashedRect(boxWidth, boxHeight); + this.__dropMe["rect"] = svgLayer.drawDashedRect(boxWidth, boxHeight); } else { - svgLayer.addListenerOnce("SvgWidgetReady", () => this.__dropMe.rect = svgLayer.drawDashedRect(boxWidth, boxHeight), this); + svgLayer.addListenerOnce("SvgWidgetReady", () => this.__dropMe["rect"] = svgLayer.drawDashedRect(boxWidth, boxHeight), this); } } const dropMe = this.__dropMe; @@ -269,10 +269,10 @@ qx.Class.define("osparc.file.FileDrop", { top: posY - parseInt(dropMeBounds.height/2)- parseInt(boxHeight/2) }); if ("rect" in dropMe) { - dropMe.rect.stroke({ + dropMe["rect"].stroke({ width: 1 }); - osparc.wrapper.Svg.updateItemPos(dropMe.rect, posX - boxWidth, posY - boxHeight); + osparc.wrapper.Svg.updateItemPos(dropMe["rect"], posX - boxWidth, posY - boxHeight); } }, @@ -280,7 +280,7 @@ qx.Class.define("osparc.file.FileDrop", { const dropMe = this.__dropMe; if (dropMe) { if ("rect" in dropMe) { - dropMe.rect.stroke({ + dropMe["rect"].stroke({ width: 0 }); } diff --git a/services/static-webserver/client/source/class/osparc/file/FileLabelWithActions.js b/services/static-webserver/client/source/class/osparc/file/FileLabelWithActions.js index 0b1bedb0aeb4..8eb59c65393f 100644 --- a/services/static-webserver/client/source/class/osparc/file/FileLabelWithActions.js +++ b/services/static-webserver/client/source/class/osparc/file/FileLabelWithActions.js @@ -65,6 +65,13 @@ qx.Class.define("osparc.file.FileLabelWithActions", { event: "changeMultiSelect", apply: "__changeMultiSelection", }, + + deleteEnabled: { + check: "Boolean", + init: true, + nullable: true, + apply: "__applyDeleteEnabled", + }, }, members: { @@ -106,11 +113,17 @@ qx.Class.define("osparc.file.FileLabelWithActions", { } }, + __applyDeleteEnabled: function(value) { + if (value === false) { + this.getChildControl("delete-button").setEnabled(false); + } + }, + setItemSelected: function(selectedItem) { if (selectedItem) { this.__selection = [selectedItem]; - this.getChildControl("download-button").setEnabled(true); // folders can also be downloaded - this.getChildControl("delete-button").setEnabled(true); // folders can also be deleted + this.getChildControl("download-button").setEnabled(true); + this.getChildControl("delete-button").setEnabled(this.isDeleteEnabled()); this.getChildControl("selected-label").setValue(selectedItem.getLabel()); } else { this.resetSelection(); @@ -136,7 +149,7 @@ qx.Class.define("osparc.file.FileLabelWithActions", { resetSelection: function() { this.__selection = []; this.getChildControl("download-button").setEnabled(false); - this.getChildControl("delete-button").setEnabled(false); + this.getChildControl("delete-button").setEnabled(this.isDeleteEnabled()); this.getChildControl("selected-label").resetValue(); }, diff --git a/services/static-webserver/client/source/class/osparc/file/FilePicker.js b/services/static-webserver/client/source/class/osparc/file/FilePicker.js index 5371045780a3..559595adddfc 100644 --- a/services/static-webserver/client/source/class/osparc/file/FilePicker.js +++ b/services/static-webserver/client/source/class/osparc/file/FilePicker.js @@ -283,7 +283,7 @@ qx.Class.define("osparc.file.FilePicker", { this.__buildInfoLayout(); } else { this.__addProgressBar(); - if (viewContext === "workbench") { + if (["workbench", "pipeline"].includes(viewContext)) { this.__buildWorkbenchLayout(); } else { this.setMargin(10); diff --git a/services/static-webserver/client/source/class/osparc/filter/CollaboratorToggleButton.js b/services/static-webserver/client/source/class/osparc/filter/CollaboratorToggleButton.js index 6a9274cea418..8ef76e88b807 100644 --- a/services/static-webserver/client/source/class/osparc/filter/CollaboratorToggleButton.js +++ b/services/static-webserver/client/source/class/osparc/filter/CollaboratorToggleButton.js @@ -24,34 +24,47 @@ qx.Class.define("osparc.filter.CollaboratorToggleButton", { appearance: "tagbutton" }); - if (collaborator["collabType"] === 0) { - this.setLabel(this.tr("Public")); - } else { - let label = collaborator.getLabel(); - if ("getEmail" in collaborator && collaborator.getEmail()) { - label += ` (${collaborator.getEmail()})`; - } - this.setLabel(label); - } - - if (collaborator.getDescription()) { - const ttt = collaborator.getLabel() + "
" + collaborator.getDescription(); - this.setToolTipText(ttt); - } - let iconPath = null; + let label = null; + let toolTipText = ""; switch (collaborator["collabType"]) { - case 0: - iconPath = "@FontAwesome5Solid/globe/14"; + case osparc.store.Groups.COLLAB_TYPE.EVERYONE: + iconPath = osparc.dashboard.CardBase.SHARED_ALL; + label = this.tr("Public"); + toolTipText = this.tr("Public to all users"); break; - case 1: - iconPath = "@FontAwesome5Solid/users/14"; + case osparc.store.Groups.COLLAB_TYPE.SUPPORT: + iconPath = osparc.dashboard.CardBase.SHARED_SUPPORT; + label = collaborator.getLabel(); + toolTipText = collaborator.getDescription(); break; - case 2: - iconPath = "@FontAwesome5Solid/user/14"; + case osparc.store.Groups.COLLAB_TYPE.ORGANIZATION: + iconPath = osparc.dashboard.CardBase.SHARED_ORGS; + label = collaborator.getLabel(); + toolTipText = collaborator.getDescription(); break; + case osparc.store.Groups.COLLAB_TYPE.USER: { + iconPath = osparc.dashboard.CardBase.SHARED_USER; + label = collaborator.getLabel(); + if (collaborator.getEmail()) { + toolTipText += collaborator.getEmail() + "
"; + } + if (collaborator.getFirstName()) { + toolTipText += [collaborator.getFirstName(), collaborator.getLastName()].join(" ").trim(); + } + break; + } } this.setIcon(iconPath); + this.getChildControl("icon").set({ + width: 17, // align with widest icon: "users" + scale: true, + }); + this.setLabel(label); + if (toolTipText) { + const infoButton = new osparc.ui.hint.InfoHint(toolTipText); + this._add(infoButton); + } this.getChildControl("check"); }, diff --git a/services/static-webserver/client/source/class/osparc/filter/OrganizationsAndMembers.js b/services/static-webserver/client/source/class/osparc/filter/OrganizationsAndMembers.js index 0ca91537931f..33cedf21cb02 100644 --- a/services/static-webserver/client/source/class/osparc/filter/OrganizationsAndMembers.js +++ b/services/static-webserver/client/source/class/osparc/filter/OrganizationsAndMembers.js @@ -64,18 +64,15 @@ qx.Class.define("osparc.filter.OrganizationsAndMembers", { const visibleCollaborators = Object.values(this.__visibleCollaborators); + const collabTypeOrder = osparc.store.Groups.COLLAB_TYPE_ORDER; // sort them first visibleCollaborators.sort((a, b) => { - if (a["collabType"] > b["collabType"]) { - return 1; - } - if (a["collabType"] < b["collabType"]) { - return -1; - } - if (a.getLabel() > b.getLabel()) { - return 1; - } - return -1; + const typeDiff = collabTypeOrder.indexOf(a["collabType"]) - collabTypeOrder.indexOf(b["collabType"]); + if (typeDiff !== 0) { + return typeDiff; + } + // fallback: sort alphabetically by label + return a.getLabel().localeCompare(b.getLabel()); }); visibleCollaborators.forEach(visibleCollaborator => { @@ -85,14 +82,14 @@ qx.Class.define("osparc.filter.OrganizationsAndMembers", { const btn = this.addOption(visibleCollaborator); let iconPath = null; switch (visibleCollaborator["collabType"]) { - case 0: - iconPath = "@FontAwesome5Solid/globe/14"; + case osparc.store.Groups.COLLAB_TYPE.EVERYONE: + iconPath = osparc.dashboard.CardBase.SHARED_ALL; break; - case 1: - iconPath = "@FontAwesome5Solid/users/14"; + case osparc.store.Groups.COLLAB_TYPE.ORGANIZATION: + iconPath = osparc.dashboard.CardBase.SHARED_ORGS; break; - case 2: - iconPath = "@FontAwesome5Solid/user/14"; + case osparc.store.Groups.COLLAB_TYPE.USER: + iconPath = osparc.dashboard.CardBase.SHARED_USER; break; } btn.setIcon(iconPath); diff --git a/services/static-webserver/client/source/class/osparc/form/renderer/PropForm.js b/services/static-webserver/client/source/class/osparc/form/renderer/PropForm.js index 6bd279ce20cc..452f9efa4689 100644 --- a/services/static-webserver/client/source/class/osparc/form/renderer/PropForm.js +++ b/services/static-webserver/client/source/class/osparc/form/renderer/PropForm.js @@ -41,7 +41,6 @@ qx.Class.define("osparc.form.renderer.PropForm", { "fileRequested": "qx.event.type.Data", "filePickerRequested": "qx.event.type.Data", "parameterRequested": "qx.event.type.Data", - "changeChildVisibility": "qx.event.type.Event" }, properties: { @@ -135,58 +134,69 @@ qx.Class.define("osparc.form.renderer.PropForm", { /* * <-- Dynamic inputs --> */ - __getEmptyDataLastPorts: function() { - let emptyDataPorts = []; + // + __getHideablePorts: function() { + const hideablePorts = []; const minVisibleInputs = this.getNode().getMinVisibleInputs(); if (minVisibleInputs === null) { - return emptyDataPorts; + return hideablePorts; } + // start from the last port and check the port types + // if all last ports are the same type, then mark them as hideable + let hideablePortType = null; const portIds = this.getPortIds(); - // it will always show 1 more, so: -1 - for (let i=minVisibleInputs-1; i=minVisibleInputs; i--) { const portId = portIds[i]; const ctrl = this._form.getControl(portId); - if (ctrl && ctrl.type.includes("data:") && !("link" in ctrl)) { - emptyDataPorts.push(portId); + if (ctrl && hideablePortType === null) { + hideablePortType = ctrl.type; + } + if (ctrl && + ctrl.type === hideablePortType && + // make sure it's not linked + !("link" in ctrl) + ) { + hideablePorts.unshift(portId); } else { - emptyDataPorts = []; + break; } } - return emptyDataPorts; + return hideablePorts; }, - __getVisibleEmptyDataLastPort: function() { - let emptyDataPorts = null; - this.getPortIds().forEach(portId => { - const ctrl = this._form.getControl(portId); + __getVisibleEmptyLastPort: function() { + let emptyPorts = null; + const hideablePorts = this.__getHideablePorts(); + for (let i=hideablePorts.length-1; i>=0; i--) { + const portId = hideablePorts[i] const label = this._getLabelFieldChild(portId).child; if ( - ctrl && ctrl.type.includes("data:") && !("link" in ctrl) && - label && label.isVisible() + label && label.isVisible() && + emptyPorts === null ) { - emptyDataPorts = portId; + emptyPorts = portId; } - }); - return emptyDataPorts; + } + return emptyPorts; }, __addInputPortButtonClicked: function() { - const emptyDataPorts = this.__getEmptyDataLastPorts(); - const lastEmptyDataPort = this.__getVisibleEmptyDataLastPort(); - if (emptyDataPorts.length>1 && lastEmptyDataPort) { - const idx = emptyDataPorts.indexOf(lastEmptyDataPort); - if (idx+1 < emptyDataPorts.length) { - this.__showPort(emptyDataPorts[idx+1]); + const emptyPorts = this.__getHideablePorts(); + const lastEmptyPort = this.__getVisibleEmptyLastPort(); + if (emptyPorts.length>1 && lastEmptyPort) { + const idx = emptyPorts.indexOf(lastEmptyPort); + if (idx+1 < emptyPorts.length) { + this.__showPort(emptyPorts[idx+1]); } this.__addInputPortButton.setVisibility(this.__checkAddInputPortButtonVisibility()); } }, __checkAddInputPortButtonVisibility: function() { - const emptyDataPorts = this.__getEmptyDataLastPorts(); - const lastEmptyDataPort = this.__getVisibleEmptyDataLastPort(); - const idx = emptyDataPorts.indexOf(lastEmptyDataPort); - if (idx < emptyDataPorts.length-1) { + const emptyPorts = this.__getHideablePorts(); + const lastEmptyPort = this.__getVisibleEmptyLastPort(); + const idx = emptyPorts.indexOf(lastEmptyPort); + if (idx < emptyPorts.length-1) { return "visible"; } return "excluded"; @@ -225,9 +235,9 @@ qx.Class.define("osparc.form.renderer.PropForm", { makeInputsDynamic: function() { this.getPortIds().forEach(portId => this.__showPort(portId)); - const emptyDataPorts = this.__getEmptyDataLastPorts(); - for (let i=1; i { @@ -302,7 +313,7 @@ qx.Class.define("osparc.form.renderer.PropForm", { if (["FileButton"].includes(field.widgetType)) { const menuButton = this.__getSelectFileButton(field.key); studyUI.bind("mode", menuButton, "visibility", { - converter: mode => mode === "workbench" ? "visible" : "excluded" + converter: mode => ["workbench", "pipeline"].includes(mode) ? "visible" : "excluded" }); optionsMenu.add(menuButton); } @@ -313,13 +324,12 @@ qx.Class.define("osparc.form.renderer.PropForm", { const paramsMenuBtn = this.__getParamsMenuButton(field.key); paramsMenuBtn.exclude(); optionsMenu.add(paramsMenuBtn); - const areParamsEnabled = osparc.utils.Utils.isDevelopmentPlatform(); [ newParamBtn, paramsMenuBtn ].forEach(btn => { studyUI.bind("mode", btn, "visibility", { - converter: mode => mode === "workbench" && areParamsEnabled ? "visible" : "excluded" + converter: mode => ["workbench", "pipeline"].includes(mode) ? "visible" : "excluded" }); }); } @@ -348,7 +358,7 @@ qx.Class.define("osparc.form.renderer.PropForm", { const inputNodeIDs = thisNode.getInputNodes(); inputNodeIDs.forEach(inputNodeId => { const inputNode = this.getStudy().getWorkbench().getNode(inputNodeId); - if (inputNode) { + if (inputNode && inputNode.getMetadata()) { for (const outputKey in inputNode.getOutputs()) { const paramButton = new qx.ui.menu.Button(); inputNode.bind("label", paramButton, "label", { @@ -390,7 +400,7 @@ qx.Class.define("osparc.form.renderer.PropForm", { menu.removeAll(); const inputNode = this.getStudy().getWorkbench().getNode(inputNodeId); - if (inputNode) { + if (inputNode && inputNode.getMetadata()) { for (const outputKey in inputNode.getOutputs()) { osparc.utils.Ports.arePortsCompatible(inputNode, outputKey, this.getNode(), targetPortId) .then(compatible => { @@ -416,6 +426,7 @@ qx.Class.define("osparc.form.renderer.PropForm", { __getNewParamButton: function(portId) { const newParamBtn = new qx.ui.menu.Button(this.tr("Set new parameter")); + osparc.utils.Utils.setIdToWidget(newParamBtn, "connect_new_parameter_btn_" + portId); newParamBtn.addListener("execute", () => this.fireDataEvent("parameterRequested", portId), this); return newParamBtn; }, @@ -534,29 +545,6 @@ qx.Class.define("osparc.form.renderer.PropForm", { }); }, - // overridden - setAccessLevel: function(data) { - const entry = this.self().GRID_POS; - const disableables = osparc.form.renderer.PropFormBase.getDisableables(); - Object.entries(data).forEach(([portId, visibility]) => { - Object.values(entry).forEach(entryPos => { - const layoutElement = this._getLayoutChild(portId, entryPos); - if (layoutElement && layoutElement.child) { - const control = layoutElement.child; - if (control) { - const vis = visibility === this._visibility.hidden ? "excluded" : "visible"; - const enabled = visibility === this._visibility.readWrite; - control.setVisibility(vis); - if (disableables.includes(entryPos)) { - control.setEnabled(enabled); - } - } - } - }); - }); - this.fireEvent("changeChildVisibility"); - }, - setPortErrorMessage: function(portId, msg) { const infoButton = this._getInfoFieldChild(portId); if (infoButton && "child" in infoButton) { @@ -921,6 +909,12 @@ qx.Class.define("osparc.form.renderer.PropForm", { if (!this.__isPortAvailable(toPortId)) { return false; } + const fromNode = study.getWorkbench().getNode(fromNodeId); + if (!fromNode) { + console.error("Node not found while creating link", fromNodeId); + return false; + } + const ctrlLink = this.getControlLink(toPortId); ctrlLink.setEnabled(false); this._form.getControl(toPortId)["link"] = { @@ -938,22 +932,25 @@ qx.Class.define("osparc.form.renderer.PropForm", { }; ctrlLink.addListener("mouseover", () => highlightEdgeUI(true)); ctrlLink.addListener("mouseout", () => highlightEdgeUI(false)); + const prettifyLinkString = () => { + const port = fromNode.getOutput(fromPortId); + const fromPortLabel = port ? port.label : null; + fromNode.bind("label", ctrlLink, "value", { + converter: label => label + ": " + fromPortLabel + }); - const workbench = study.getWorkbench(); - const fromNode = workbench.getNode(fromNodeId); - const port = fromNode.getOutput(fromPortId); - const fromPortLabel = port ? port.label : null; - fromNode.bind("label", ctrlLink, "value", { - converter: label => label + ": " + fromPortLabel - }); - // Hack: Show tooltip if element is disabled - const addToolTip = () => { - ctrlLink.getContentElement().removeAttribute("title"); - const toolTipText = fromNode.getLabel() + ":\n" + fromPortLabel; - ctrlLink.getContentElement().setAttribute("title", toolTipText); - }; - fromNode.addListener("changeLabel", () => addToolTip()); - addToolTip(); + const addToolTip = () => { + const toolTipText = fromNode.getLabel() + ":\n" + fromPortLabel; + osparc.utils.Utils.toolTipTextOnDisabledWidget(ctrlLink, toolTipText); + }; + fromNode.addListener("changeLabel", () => addToolTip()); + addToolTip(); + } + if (fromNode.getMetadata()) { + prettifyLinkString(); + } else { + fromNode.addListenerOnce("changeMetadata", () => prettifyLinkString(), this); + } this.__portLinkAdded(toPortId, fromNodeId, fromPortId); diff --git a/services/static-webserver/client/source/class/osparc/form/renderer/PropFormBase.js b/services/static-webserver/client/source/class/osparc/form/renderer/PropFormBase.js index fbc0a9a40777..ac7a1a3bd4de 100644 --- a/services/static-webserver/client/source/class/osparc/form/renderer/PropFormBase.js +++ b/services/static-webserver/client/source/class/osparc/form/renderer/PropFormBase.js @@ -48,6 +48,8 @@ qx.Class.define("osparc.form.renderer.PropFormBase", { grid.setColumnFlex(this.self().GRID_POS.FIELD_LINK_UNLINK, 0); grid.setColumnMinWidth(this.self().GRID_POS.CTRL_FIELD, 50); Object.keys(this.self().GRID_POS).forEach((_, idx) => grid.setColumnAlign(idx, "left", "middle")); + + form.addListener("changeData", e => this.fireDataEvent("changeData", e.getData()), this); }, properties: { @@ -57,6 +59,11 @@ qx.Class.define("osparc.form.renderer.PropFormBase", { } }, + events: { + "changeData": "qx.event.type.Data", + "unitChanged": "qx.event.type.Data", + }, + statics: { GRID_POS: { LABEL: 0, @@ -68,13 +75,6 @@ qx.Class.define("osparc.form.renderer.PropFormBase", { ROW_HEIGHT: 28, - getDisableables: function() { - return [ - this.GRID_POS.LABEL, - this.GRID_POS.CTRL_FIELD - ]; - }, - updateUnitLabelPrefix: function(item) { const { unitShort, @@ -225,7 +225,7 @@ qx.Class.define("osparc.form.renderer.PropFormBase", { const changedXUnits = this.getChangedXUnits(); Object.keys(changedXUnits).forEach(portId => { const ctrl = this._form.getControl(portId); - const nodeMD = this.getNode().getMetaData(); + const nodeMD = this.getNode().getMetadata(); const { unitPrefix } = osparc.utils.Units.decomposeXUnit(nodeMD.inputs[portId]["x_unit"]); @@ -273,7 +273,7 @@ qx.Class.define("osparc.form.renderer.PropFormBase", { const ctrl = this._form.getControl(portId); xUnits[portId] = osparc.utils.Units.composeXUnit(ctrl.unit, ctrl.unitPrefix); } - const nodeMD = this.getNode().getMetaData(); + const nodeMD = this.getNode().getMetadata(); const changedXUnits = {}; for (const portId in xUnits) { if (xUnits[portId] === null) { @@ -319,13 +319,6 @@ qx.Class.define("osparc.form.renderer.PropFormBase", { return false; }, - /** - * @abstract - */ - setAccessLevel: function() { - throw new Error("Abstract method called!"); - }, - __createInfoWHint: function(hint) { const infoWHint = new osparc.form.PortInfoHint(hint); return infoWHint; @@ -357,7 +350,7 @@ qx.Class.define("osparc.form.renderer.PropFormBase", { if (unit && unitRegistered) { unitLabel.addListener("pointerover", () => unitLabel.setCursor("pointer"), this); unitLabel.addListener("pointerout", () => unitLabel.resetCursor(), this); - const nodeMD = this.getNode().getMetaData(); + const nodeMD = this.getNode().getMetadata(); const originalUnit = "x_unit" in nodeMD.inputs[item.key] ? osparc.utils.Units.decomposeXUnit(nodeMD.inputs[item.key]["x_unit"]) : null; unitLabel.addListener("tap", () => { const nextPrefix = osparc.utils.Units.getNextPrefix(item.unitPrefix, originalUnit.unitPrefix); @@ -376,6 +369,10 @@ qx.Class.define("osparc.form.renderer.PropFormBase", { } item.setValue(newValue); this.self().updateUnitLabelPrefix(item); + this.fireDataEvent("unitChanged", { + portId: item.key, + prefix: newPrefix, + }); }, _getLayoutChild: function(portId, column) { diff --git a/services/static-webserver/client/source/class/osparc/form/renderer/PropFormEditor.js b/services/static-webserver/client/source/class/osparc/form/renderer/PropFormEditor.js deleted file mode 100644 index 83cef4b59967..000000000000 --- a/services/static-webserver/client/source/class/osparc/form/renderer/PropFormEditor.js +++ /dev/null @@ -1,201 +0,0 @@ -/* ************************************************************************ - - osparc - the simcore frontend - - https://osparc.io - - Copyright: - 2020 IT'IS Foundation, https://itis.swiss - - License: - MIT: https://opensource.org/licenses/MIT - - Authors: - * Odei Maiz (odeimaiz) - -************************************************************************ */ - -/** - * An extension of the PropFormBase that is able to handle the Access Level of each entry. - */ - -qx.Class.define("osparc.form.renderer.PropFormEditor", { - extend: osparc.form.renderer.PropFormBase, - - /** - * @param form {osparc.form.Auto} form widget to embed - * @param node {osparc.data.model.Node} Node owning the widget - */ - construct: function(form, node) { - this.base(arguments, form, node); - - this.__ctrlRadioButtonsMap = {}; - this.__addAccessLevelRBs(); - }, - - statics: { - GRID_POS: { - ...osparc.form.renderer.PropFormBase.GRID_POS, - ACCESS_LEVEL: Object.keys(osparc.form.renderer.PropFormBase.GRID_POS).length - } - }, - - // eslint-disable-next-line qx-rules/no-refs-in-members - members: { - _accessLevel: { - hidden: 0, - readOnly: 1, - readAndWrite: 2 - }, - - __ctrlRadioButtonsMap: null, - - // overridden - setAccessLevel: function(data) { - for (const key in data) { - const control = this.__getRadioButtonsFieldChild(key); - if (control) { - const group = this.__ctrlRadioButtonsMap[key]; - switch (data[key]) { - case this._visibility.hidden: { - group.setSelection([group.getSelectables()[0]]); - break; - } - case this._visibility.readOnly: { - group.setSelection([group.getSelectables()[1]]); - break; - } - case this._visibility.readWrite: { - group.setSelection([group.getSelectables()[2]]); - break; - } - } - } - } - }, - - linkAdded: function(portId, controlLink) { - let data = this._getCtrlFieldChild(portId); - if (data) { - let child = data.child; - let idx = data.idx; - const layoutProps = child.getLayoutProperties(); - controlLink.oldCtrl = child; - this._removeAt(idx); - this._addAt(controlLink, idx, { - row: layoutProps.row, - column: this.self().GRID_POS.CTRL_FIELD - }); - } - }, - - linkRemoved: function(portId) { - let data = this._getCtrlFieldChild(portId); - if (data) { - let child = data.child; - let idx = data.idx; - const layoutProps = child.getLayoutProperties(); - this._removeAt(idx); - this._addAt(child.oldCtrl, idx, { - row: layoutProps.row, - column: this.self().GRID_POS.CTRL_FIELD - }); - } - }, - - __addAccessLevelRBs: function() { - Object.keys(this._form.getControls()).forEach(portId => { - this.__addAccessLevelRB(portId); - }); - }, - - __addAccessLevelRB: function(portId) { - const rbHidden = new qx.ui.form.RadioButton(this.tr("Not Visible")); - rbHidden.accessLevel = this._visibility.hidden; - rbHidden.portId = portId; - const rbReadOnly = new qx.ui.form.RadioButton(this.tr("Read Only")); - rbReadOnly.accessLevel = this._visibility.readOnly; - rbReadOnly.portId = portId; - const rbEditable = new qx.ui.form.RadioButton(this.tr("Editable")); - rbEditable.accessLevel = this._visibility.readWrite; - rbEditable.portId = portId; - - const groupBox = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); - groupBox.add(rbHidden); - groupBox.add(rbReadOnly); - groupBox.add(rbEditable); - - const group = new qx.ui.form.RadioGroup(rbHidden, rbReadOnly, rbEditable); - group.setSelection([rbEditable]); - this.__ctrlRadioButtonsMap[portId] = group; - group.addListener("changeSelection", this.__onAccessLevelChanged, this); - - const ctrlField = this._getCtrlFieldChild(portId); - if (ctrlField) { - const idx = ctrlField.idx; - const child = ctrlField.child; - const layoutProps = child.getLayoutProperties(); - this._addAt(groupBox, idx, { - row: layoutProps.row, - column: this.self().GRID_POS.ACCESS_LEVEL - }); - } - }, - - __onAccessLevelChanged: function(e) { - const selectedButton = e.getData()[0]; - const { - accessLevel, - portId - } = selectedButton; - - const data = {}; - data[portId] = accessLevel; - - this.__setAccessLevel(data); - - let inputAccess = this.getNode().getInputAccess(); - if (inputAccess === null) { - inputAccess = {}; - } - inputAccess[portId] = accessLevel; - this.getNode().setInputAccess(inputAccess); - - const propsForm = this.getNode().getPropsForm(); - propsForm.setAccessLevel(data); - }, - - __addDelTag: function(label) { - const newLabel = "" + label + ""; - return newLabel; - }, - - __removeDelTag: function(label) { - let newLabel = label.replace("", ""); - newLabel = newLabel.replace("", ""); - return newLabel; - }, - - __setAccessLevel: function(data) { - for (const key in data) { - const label = this._getLabelFieldChild(key).child; - const newLabel = data[key] === this._visibility.hidden ? this.__addDelTag(label.getValue()) : this.__removeDelTag(label.getValue()); - label.setValue(newLabel); - - const enabled = data[key] === this._visibility.readWrite; - const disableables = osparc.form.renderer.PropFormBase.getDisableables(); - const ctrls = []; - disableables.forEach(disableable => ctrls.push(this._getLayoutChild(key, disableable))); - ctrls.forEach(ctrl => { - if (ctrl) { - ctrl.child.setEnabled(enabled); - } - }); - } - }, - - __getRadioButtonsFieldChild: function(portId) { - return this._getLayoutChild(portId, this.self().GRID_POS.ACCESS_LEVEL); - } - } -}); diff --git a/services/static-webserver/client/source/class/osparc/form/tag/TagManager.js b/services/static-webserver/client/source/class/osparc/form/tag/TagManager.js index 10db2579bee3..b0f9bb5e272d 100644 --- a/services/static-webserver/client/source/class/osparc/form/tag/TagManager.js +++ b/services/static-webserver/client/source/class/osparc/form/tag/TagManager.js @@ -155,43 +155,17 @@ qx.Class.define("osparc.form.tag.TagManager", { return tagButton; }, - __getAddTagPromise: function(tagId) { - const params = { - url: { - tagId, - studyId: this.__resourceId - } - }; - return osparc.data.Resources.fetch("studies", "addTag", params); - }, - - __getRemoveTagPromise: function(tagId) { - const params = { - url: { - tagId, - studyId: this.__resourceId - } - }; - return osparc.data.Resources.fetch("studies", "removeTag", params); - }, - __saveAddTag: function(tagId, tagButton) { - this.__getAddTagPromise(tagId) + osparc.store.Study.getInstance().addTag(this.__resourceId, tagId) .then(() => this.__selectedTags.push(tagId)) - .catch(err => { - console.error(err); - tagButton.setValue(false); - }) + .catch(() => tagButton.setValue(false)) .finally(() => tagButton.setFetching(false)); }, __saveRemoveTag: function(tagId, tagButton) { - this.__getRemoveTagPromise(tagId) + osparc.store.Study.getInstance().removeTag(this.__resourceId, tagId) .then(() => this.__selectedTags.remove(tagId)) - .catch(err => { - console.error(err); - tagButton.setValue(true); - }) + .catch(() => tagButton.setValue(true)) .finally(() => tagButton.setFetching(false)); }, @@ -203,14 +177,14 @@ qx.Class.define("osparc.form.tag.TagManager", { for (let i=0; i updatedData); } } for (let i=0; i updatedData); } } diff --git a/services/static-webserver/client/source/class/osparc/info/CommentAdd.js b/services/static-webserver/client/source/class/osparc/info/CommentAdd.js deleted file mode 100644 index 8bfb2a2c330e..000000000000 --- a/services/static-webserver/client/source/class/osparc/info/CommentAdd.js +++ /dev/null @@ -1,149 +0,0 @@ -/* ************************************************************************ - - osparc - the simcore frontend - - https://osparc.io - - Copyright: - 2023 IT'IS Foundation, https://itis.swiss - - License: - MIT: https://opensource.org/licenses/MIT - - Authors: - * Odei Maiz (odeimaiz) - -************************************************************************ */ - - -qx.Class.define("osparc.info.CommentAdd", { - extend: qx.ui.core.Widget, - - /** - * @param studyId {String} Study Id - * @param conversationId {String} Conversation Id - */ - construct: function(studyId, conversationId = null) { - this.base(arguments); - - this.__studyId = studyId; - this.__conversationId = conversationId; - - this._setLayout(new qx.ui.layout.VBox(5)); - - this.__buildLayout(); - }, - - events: { - "commentAdded": "qx.event.type.Event" - }, - - members: { - __studyId: null, - __conversationId: null, - - _createChildControlImpl: function(id) { - let control; - switch (id) { - case "add-comment-label": - control = new qx.ui.basic.Label().set({ - value: this.tr("Add comment") - }); - this._add(control); - break; - case "add-comment-layout": { - const grid = new qx.ui.layout.Grid(8, 5); - grid.setColumnWidth(0, 32); - grid.setColumnFlex(1, 1); - control = new qx.ui.container.Composite(grid); - this._add(control, { - flex: 1 - }); - break; - } - case "thumbnail": { - control = new qx.ui.basic.Image().set({ - alignY: "middle", - scale: true, - allowGrowX: true, - allowGrowY: true, - allowShrinkX: true, - allowShrinkY: true, - maxWidth: 32, - maxHeight: 32, - decorator: "rounded", - }); - const authData = osparc.auth.Data.getInstance(); - const myUsername = authData.getUsername(); - const myEmail = authData.getEmail(); - control.set({ - source: osparc.utils.Avatar.emailToThumbnail(myEmail, myUsername, 32) - }); - const layout = this.getChildControl("add-comment-layout"); - layout.add(control, { - row: 0, - column: 0 - }); - break; - } - case "comment-field": { - control = new osparc.editor.MarkdownEditor(); - control.getChildControl("buttons").exclude(); - const layout = this.getChildControl("add-comment-layout"); - layout.add(control, { - row: 0, - column: 1 - }); - break; - } - case "buttons-layout": { - control = new qx.ui.container.Composite(new qx.ui.layout.HBox(5).set({ - alignX: "right" - })); - this._add(control); - break; - } - case "add-comment-button": { - control = new qx.ui.form.Button(this.tr("Add message")).set({ - appearance: "form-button", - allowGrowX: false, - }); - this.getChildControl("buttons-layout").add(control); - break; - } - } - - return control || this.base(arguments, id); - }, - - __buildLayout: function() { - this.getChildControl("thumbnail"); - this.getChildControl("comment-field"); - const addButton = this.getChildControl("add-comment-button"); - addButton.addListener("execute", () => { - if (this.__conversationId) { - this.__addComment(); - } else { - // create new conversation first - osparc.study.Conversations.addConversation(this.__studyId) - .then(data => { - this.__conversationId = data["conversationId"]; - this.__addComment(); - }) - } - }); - }, - - __addComment: function() { - const commentField = this.getChildControl("comment-field"); - const comment = commentField.getChildControl("text-area").getValue(); - if (comment) { - osparc.study.Conversations.addMessage(this.__studyId, this.__conversationId, comment) - .then(data => { - this.fireDataEvent("commentAdded", data); - commentField.getChildControl("text-area").setValue(""); - }); - } - }, - } -}); diff --git a/services/static-webserver/client/source/class/osparc/info/CommentUI.js b/services/static-webserver/client/source/class/osparc/info/CommentUI.js deleted file mode 100644 index 222631985640..000000000000 --- a/services/static-webserver/client/source/class/osparc/info/CommentUI.js +++ /dev/null @@ -1,145 +0,0 @@ -/* ************************************************************************ - - osparc - the simcore frontend - - https://osparc.io - - Copyright: - 2023 IT'IS Foundation, https://itis.swiss - - License: - MIT: https://opensource.org/licenses/MIT - - Authors: - * Odei Maiz (odeimaiz) - -************************************************************************ */ - - -qx.Class.define("osparc.info.CommentUI", { - extend: qx.ui.core.Widget, - - /** - * @param comment {Object} comment - */ - construct: function(comment) { - this.base(arguments); - - this.__comment = comment; - - const isMyComment = this.__isMyComment(); - const layout = new qx.ui.layout.Grid(12, 4); - layout.setColumnFlex(1, 1); // comment - layout.setColumnFlex(isMyComment ? 0 : 2, 3); // spacer - this._setLayout(layout); - this.setPadding(5); - - this.__buildLayout(); - }, - - members: { - __comment: null, - - __isMyComment: function() { - return this.__comment && osparc.auth.Data.getInstance().getGroupId() === this.__comment["userGroupId"]; - }, - - _createChildControlImpl: function(id) { - let control; - switch (id) { - case "thumbnail": - control = new qx.ui.basic.Image().set({ - scale: true, - maxWidth: 32, - maxHeight: 32, - decorator: "rounded", - marginTop: 2, - }); - this._add(control, { - row: 0, - column: this.__isMyComment() ? 2 : 0, - rowSpan: 2, - }); - break; - case "header-layout": - control = new qx.ui.container.Composite(new qx.ui.layout.HBox(5).set({ - alignX: this.__isMyComment() ? "right" : "left" - })); - control.addAt(new qx.ui.basic.Label("-"), 1); - this._add(control, { - row: 0, - column: 1 - }); - break; - case "user-name": - control = new qx.ui.basic.Label().set({ - font: "text-12" - }); - this.getChildControl("header-layout").addAt(control, this.__isMyComment() ? 2 : 0); - break; - case "last-updated": - control = new qx.ui.basic.Label().set({ - font: "text-12" - }); - this.getChildControl("header-layout").addAt(control, this.__isMyComment() ? 0 : 2); - break; - case "comment-content": - control = new osparc.ui.markdown.Markdown().set({ - decorator: "rounded", - noMargin: true, - paddingLeft: 8, - paddingRight: 8, - allowGrowX: true, - }); - control.getContentElement().setStyles({ - "text-align": this.__isMyComment() ? "right" : "left", - }); - this._add(control, { - row: 1, - column: 1, - }); - break; - case "spacer": - control = new qx.ui.core.Spacer(); - this._add(control, { - row: 1, - column: this.__isMyComment() ? 0 : 2, - }); - break; - } - - return control || this.base(arguments, id); - }, - - __buildLayout: function() { - const thumbnail = this.getChildControl("thumbnail"); - - const userName = this.getChildControl("user-name"); - - const date = new Date(this.__comment["modified"]); - const date2 = osparc.utils.Utils.formatDateAndTime(date); - const lastUpdate = this.getChildControl("last-updated"); - lastUpdate.setValue(date2); - - const commentContent = this.getChildControl("comment-content"); - commentContent.setValue(this.__comment["content"]); - - osparc.store.Users.getInstance().getUser(this.__comment["userGroupId"]) - .then(user => { - if (user) { - thumbnail.setSource(user.getThumbnail()); - userName.setValue(user.getLabel()); - } else { - thumbnail.setSource(osparc.utils.Avatar.emailToThumbnail()); - userName.setValue("Unknown user"); - } - }) - .catch(() => { - thumbnail.setSource(osparc.utils.Avatar.emailToThumbnail()); - userName.setValue("Unknown user"); - }); - - this.getChildControl("spacer"); - } - } -}); diff --git a/services/static-webserver/client/source/class/osparc/info/Conversation.js b/services/static-webserver/client/source/class/osparc/info/Conversation.js deleted file mode 100644 index d63d9b7732d9..000000000000 --- a/services/static-webserver/client/source/class/osparc/info/Conversation.js +++ /dev/null @@ -1,238 +0,0 @@ -/* ************************************************************************ - - osparc - the simcore frontend - - https://osparc.io - - Copyright: - 2023 IT'IS Foundation, https://itis.swiss - - License: - MIT: https://opensource.org/licenses/MIT - - Authors: - * Odei Maiz (odeimaiz) - -************************************************************************ */ - - -qx.Class.define("osparc.info.Conversation", { - extend: qx.ui.tabview.Page, - - /** - * @param studyData {String} Study Data - * @param conversationId {String} Conversation Id - */ - construct: function(studyData, conversationId) { - this.base(arguments); - - this.__studyData = studyData; - - if (conversationId) { - this.setConversationId(conversationId); - } - - this._setLayout(new qx.ui.layout.VBox(10)); - - this.set({ - padding: 10, - showCloseButton: false, - }); - - this.getChildControl("button").set({ - font: "text-13", - }); - this.__addConversationButtons(); - - this.__buildLayout(); - - this.fetchMessages(); - }, - - properties: { - conversationId: { - check: "String", - init: null, - nullable: false, - event: "changeConversationId" - }, - }, - - events: { - "conversationDeleted": "qx.event.type.Event", - }, - - members: { - __studyData: null, - __nextRequestParams: null, - __messagesTitle: null, - __messagesList: null, - __loadMoreMessages: null, - - __addConversationButtons: function() { - const tabButton = this.getChildControl("button"); - - const buttonsAesthetics = { - focusable: false, - keepActive: true, - padding: 0, - backgroundColor: "transparent", - }; - const renameButton = new qx.ui.form.Button(null, "@FontAwesome5Solid/pencil-alt/10").set({ - ...buttonsAesthetics, - }); - renameButton.addListener("execute", () => { - const titleEditor = new osparc.widget.Renamer(tabButton.getLabel()); - titleEditor.addListener("labelChanged", e => { - titleEditor.close(); - const newLabel = e.getData()["newLabel"]; - if (this.getConversationId()) { - osparc.study.Conversations.renameConversation(this.__studyData["uuid"], this.getConversationId(), newLabel) - .then(() => { - this.getChildControl("button").setLabel(newLabel); - }); - } else { - // create new conversation first - osparc.study.Conversations.addConversation(this.__studyData["uuid"], newLabel) - .then(data => { - this.setConversationId(data["conversationId"]); - this.getChildControl("button").setLabel(newLabel); - }); - } - }, this); - titleEditor.center(); - titleEditor.open(); - }); - // eslint-disable-next-line no-underscore-dangle - tabButton._add(renameButton, { - row: 0, - column: 3 - }); - - const trashButton = new qx.ui.form.Button(null, "@FontAwesome5Solid/times/12").set({ - ...buttonsAesthetics, - paddingLeft: 4, // adds spacing between buttons - }); - trashButton.addListener("execute", () => { - const deleteConversation = () => { - osparc.study.Conversations.deleteConversation(this.__studyData["uuid"], this.getConversationId()) - .then(() => this.fireEvent("conversationDeleted")); - } - if (this.__messagesList.getChildren().length === 0) { - deleteConversation(); - } else { - const msg = this.tr("Are you sure you want to delete the conversation?"); - const confirmationWin = new osparc.ui.window.Confirmation(msg).set({ - caption: this.tr("Delete Conversation"), - confirmText: this.tr("Delete"), - confirmAction: "delete" - }); - confirmationWin.open(); - confirmationWin.addListener("close", () => { - if (confirmationWin.getConfirmed()) { - deleteConversation(); - } - }, this); - } - }); - // eslint-disable-next-line no-underscore-dangle - tabButton._add(trashButton, { - row: 0, - column: 4 - }); - this.bind("conversationId", trashButton, "visibility", { - converter: value => value ? "visible" : "excluded" - }); - }, - - __buildLayout: function() { - this.__messagesTitle = new qx.ui.basic.Label(); - this._add(this.__messagesTitle); - - this.__messagesList = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)).set({ - alignY: "middle" - }); - this._add(this.__messagesList, { - flex: 1 - }); - - this.__loadMoreMessages = new osparc.ui.form.FetchButton(this.tr("Load more messages...")); - this.__loadMoreMessages.addListener("execute", () => this.fetchMessages(false)); - this._add(this.__loadMoreMessages); - - if (osparc.data.model.Study.canIWrite(this.__studyData["accessRights"])) { - const addMessages = new osparc.info.CommentAdd(this.__studyData["uuid"], this.getConversationId()); - addMessages.setPaddingLeft(10); - addMessages.addListener("commentAdded", e => { - const data = e.getData(); - if (data["conversationId"]) { - this.setConversationId(data["conversationId"]); - } - this.fetchMessages(); - }); - this._add(addMessages); - } - }, - - fetchMessages: function(removeMessages = true) { - if (this.getConversationId() === null) { - this.__messagesTitle.setValue(this.tr("No messages yet")); - this.__messagesList.hide(); - this.__loadMoreMessages.hide(); - return; - } - - this.__messagesList.show(); - this.__loadMoreMessages.show(); - this.__loadMoreMessages.setFetching(true); - - if (removeMessages) { - this.__messagesList.removeAll(); - } - - this.__getNextRequest() - .then(resp => { - const messages = resp["data"]; - this.__addMessages(messages); - this.__nextRequestParams = resp["_links"]["next"]; - if (this.__nextRequestParams === null) { - this.__loadMoreMessages.exclude(); - } - }) - .finally(() => this.__loadMoreMessages.setFetching(false)); - }, - - __getNextRequest: function() { - const params = { - url: { - studyId: this.__studyData["uuid"], - conversationId: this.getConversationId(), - offset: 0, - limit: 42 - } - }; - const nextRequestParams = this.__nextRequestParams; - if (nextRequestParams) { - params.url.offset = nextRequestParams.offset; - params.url.limit = nextRequestParams.limit; - } - const options = { - resolveWResponse: true - }; - return osparc.data.Resources.fetch("conversations", "getMessagesPage", params, options); - }, - - __addMessages: function(messages) { - if (messages.length === 1) { - this.__messagesTitle.setValue(this.tr("1 Message")); - } else if (messages.length > 1) { - this.__messagesTitle.setValue(messages.length + this.tr(" Messages")); - } - - messages.forEach(message => { - const messageUi = new osparc.info.CommentUI(message); - this.__messagesList.add(messageUi); - }); - }, - } -}); diff --git a/services/static-webserver/client/source/class/osparc/info/FunctionLarge.js b/services/static-webserver/client/source/class/osparc/info/FunctionLarge.js new file mode 100644 index 000000000000..941a8040f137 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/info/FunctionLarge.js @@ -0,0 +1,187 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + + +qx.Class.define("osparc.info.FunctionLarge", { + extend: osparc.info.CardLarge, + + /** + * @param func {osparc.data.model.Function} Function model + */ + construct: function(func) { + this.base(arguments); + + this.setFunction(func); + + this.setOpenOptions(false); + + this._attachHandlers(); + }, + + events: { + "updateFunction": "qx.event.type.Data", + }, + + properties: { + function: { + check: "osparc.data.model.Function", + init: null, + nullable: false + } + }, + + members: { + _rebuildLayout: function() { + this._removeAll(); + + const vBox = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)); + + const infoElements = this.__infoElements(); + const isStudy = true; + const infoLayout = osparc.info.Utils.infoElementsToLayout(infoElements, isStudy); + vBox.add(infoLayout); + + // inputs, default inputs and outputs + const info = { + "Inputs": this.getFunction().getInputSchema()["schema_content"], + "Default Inputs": this.getFunction().getDefaultInputs(), + "Outputs": this.getFunction().getOutputSchema()["schema_content"], + }; + const jsonViewer = new osparc.widget.JsonFormatterWidget(info); + const scroll = new qx.ui.container.Scroll(); + scroll.add(jsonViewer); + vBox.add(scroll, { + flex: 1 + }); + + // Copy Id button + const text = "Function Id"; + const copyIdButton = new qx.ui.form.Button(null, "@FontAwesome5Solid/copy/12").set({ + label: text, + toolTipText: "Copy " + text, + marginTop: 15, + allowGrowX: false + }); + copyIdButton.addListener("execute", () => osparc.utils.Utils.copyTextToClipboard(this.getFunction().getUuid())); + vBox.add(copyIdButton); + + // All in a scroll container + const scrollContainer = new qx.ui.container.Scroll(); + scrollContainer.add(vBox); + + this._add(scrollContainer, { + flex: 1 + }); + }, + + __infoElements: function() { + const canIWrite = this.getFunction().canIWrite(); + + const infoLayout = { + "TITLE": { + view: osparc.info.FunctionUtils.createTitle(this.getFunction()), + action: { + button: osparc.utils.Utils.getEditButton(canIWrite), + callback: canIWrite ? this.__openTitleEditor : null, + ctx: this + } + }, + "THUMBNAIL": { + view: this.__createThumbnail(), + action: null + }, + "DESCRIPTION": { + view: osparc.info.FunctionUtils.createDescription(this.getFunction()), + action: { + button: osparc.utils.Utils.getEditButton(canIWrite), + callback: canIWrite ? this.__openDescriptionEditor : null, + ctx: this + } + }, + "ACCESS_RIGHTS": { + label: this.tr("Permissions"), + view: osparc.info.FunctionUtils.createOwner(this.getFunction()), + action: null + }, + "CREATED": { + label: this.tr("Created"), + view: osparc.info.FunctionUtils.createCreationDate(this.getFunction()), + action: null + }, + "MODIFIED": { + label: this.tr("Modified"), + view: osparc.info.FunctionUtils.createLastChangeDate(this.getFunction()), + action: null + }, + }; + return infoLayout; + }, + + __createThumbnail: function() { + const maxWidth = 190; + const maxHeight = 220; + const thumb = osparc.info.FunctionUtils.createThumbnail(this.getFunction(), maxWidth, maxHeight); + thumb.set({ + maxWidth: 120, + maxHeight: 139 + }); + thumb.getChildControl("image").set({ + width: 120, + height: 139, + scale: true, + }); + + return thumb; + }, + + __openTitleEditor: function() { + const title = this.tr("Edit Title"); + const titleEditor = new osparc.widget.Renamer(this.getFunction().getTitle(), null, title); + titleEditor.addListener("labelChanged", e => { + titleEditor.close(); + const newLabel = e.getData()["newLabel"]; + this.__patchFunction("title", newLabel); + }, this); + titleEditor.center(); + titleEditor.open(); + }, + + __openDescriptionEditor: function() { + const title = this.tr("Edit Description"); + const textEditor = new osparc.editor.MarkdownEditor(this.getFunction().getDescription()); + textEditor.setMaxHeight(570); + const win = osparc.ui.window.Window.popUpInWindow(textEditor, title, 400, 300); + textEditor.addListener("textChanged", e => { + win.close(); + const newDescription = e.getData(); + this.__patchFunction("description", newDescription); + }, this); + textEditor.addListener("cancel", () => { + win.close(); + }, this); + }, + + __patchFunction: function(fieldKey, value) { + this.getFunction().patchFunction({[fieldKey]: value}) + .then(functionData => { + this.fireDataEvent("updateFunction", functionData); + qx.event.message.Bus.getInstance().dispatchByName("updateFunction", functionData); + }) + .catch(err => osparc.FlashMessenger.logError(err)); + } + } +}); diff --git a/services/static-webserver/client/source/class/osparc/info/FunctionUtils.js b/services/static-webserver/client/source/class/osparc/info/FunctionUtils.js new file mode 100644 index 000000000000..d76c848cf80e --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/info/FunctionUtils.js @@ -0,0 +1,103 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + + +qx.Class.define("osparc.info.FunctionUtils", { + type: "static", + + statics: { + /** + * @param func {osparc.data.model.Function} Function Model + */ + createTitle: function(func) { + const title = osparc.info.Utils.createTitle(); + func.bind("title", title, "value"); + return title; + }, + + /** + * @param func {osparc.data.model.Function} Function Model + * @param maxHeight {Number} description's maxHeight + */ + createDescription: function(func, maxHeight) { + const description = new osparc.ui.markdown.Markdown(); + func.bind("description", description, "value", { + converter: desc => desc ? desc : "No description" + }); + const scrollContainer = new qx.ui.container.Scroll(); + if (maxHeight) { + scrollContainer.setMaxHeight(maxHeight); + } + scrollContainer.add(description); + return scrollContainer; + }, + + /** + * @param func {osparc.data.model.Function} Function Model + */ + createOwner: function(func) { + const owner = new qx.ui.basic.Label(); + const canIWrite = func.canIWrite(); + owner.setValue(canIWrite ? "My Function" : "Read Only"); + return owner; + }, + + /** + * @param func {osparc.data.model.Function} Function Model + */ + createCreationDate: function(func) { + const creationDate = new qx.ui.basic.Label(); + func.bind("creationDate", creationDate, "value", { + converter: date => osparc.utils.Utils.formatDateAndTime(date) + }); + return creationDate; + }, + + /** + * @param func {osparc.data.model.Function} Function Model + */ + createLastChangeDate: function(func) { + const lastChangeDate = new qx.ui.basic.Label(); + func.bind("lastChangeDate", lastChangeDate, "value", { + converter: date => osparc.utils.Utils.formatDateAndTime(date) + }); + return lastChangeDate; + }, + + /** + * @param func {osparc.data.model.Function} Function Model + * @param maxWidth {Number} thumbnail's maxWidth + * @param maxHeight {Number} thumbnail's maxHeight + */ + createThumbnail: function(func, maxWidth, maxHeight) { + const thumbnail = osparc.info.Utils.createThumbnail(maxWidth, maxHeight); + const noThumbnail = "osparc/no_photography_black_24dp.svg"; + func.bind("thumbnail", thumbnail, "source", { + converter: thumb => thumb ? thumb : noThumbnail, + onUpdate: (source, target) => { + if (source.getThumbnail() === "") { + target.getChildControl("image").set({ + minWidth: 120, + minHeight: 139 + }); + } + } + }); + return thumbnail; + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/info/MergedLarge.js b/services/static-webserver/client/source/class/osparc/info/MergedLarge.js index ff95c2ea350f..baefb1a75140 100644 --- a/services/static-webserver/client/source/class/osparc/info/MergedLarge.js +++ b/services/static-webserver/client/source/class/osparc/info/MergedLarge.js @@ -296,7 +296,6 @@ qx.Class.define("osparc.info.MergedLarge", { }, __createDescription: function() { - const maxHeight = 400; const descriptionLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(5).set({ alignY: "middle" })); @@ -306,7 +305,8 @@ qx.Class.define("osparc.info.MergedLarge", { }); descriptionLayout.add(label); - const descriptionContainer = osparc.info.StudyUtils.createDescriptionMD(this.getStudy(), maxHeight); + const maxHeight = 400; + const descriptionContainer = osparc.info.StudyUtils.createDescription(this.getStudy(), maxHeight); descriptionLayout.add(descriptionContainer); return descriptionLayout; @@ -317,13 +317,7 @@ qx.Class.define("osparc.info.MergedLarge", { resourcesLayout.exclude(); let promise = null; if (this.getNode().getNodeId()) { - const params = { - url: { - studyId: this.getStudy().getUuid(), - nodeId: this.getNode().getNodeId() - } - }; - promise = osparc.data.Resources.get("nodesInStudyResources", params); + promise = osparc.store.Study.getInstance().getNodeResources(this.getStudy().getUuid(), this.getNode().getNodeId()); } else { promise = osparc.store.Services.getResources(this.getNode().getKey(), this.getNode().getVersion()) } diff --git a/services/static-webserver/client/source/class/osparc/info/ServiceLarge.js b/services/static-webserver/client/source/class/osparc/info/ServiceLarge.js index 7a38aa727372..988b9f1b805b 100644 --- a/services/static-webserver/client/source/class/osparc/info/ServiceLarge.js +++ b/services/static-webserver/client/source/class/osparc/info/ServiceLarge.js @@ -96,17 +96,13 @@ qx.Class.define("osparc.info.ServiceLarge", { _rebuildLayout: function() { this._removeAll(); - const vBox = new qx.ui.container.Composite(new qx.ui.layout.VBox(15)); + const vBox = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)); const deprecated = this.__createDeprecated(); if (deprecated) { vBox.add(deprecated); } - const description = this.__createDescription(); - const editInTitle = this.__createViewWithEdit(description.getChildren()[0], this.__openDescriptionEditor); - description.addAt(editInTitle, 0); - const copyMetadataButton = new qx.ui.form.Button(this.tr("Copy Raw metadata"), "@FontAwesome5Solid/copy/12").set({ allowGrowX: false }); @@ -114,10 +110,9 @@ qx.Class.define("osparc.info.ServiceLarge", { if ( this.getService()["descriptionUi"] && - !osparc.service.Utils.canIWrite(this.getService()["accessRights"]) && - description.getChildren().length > 1 + !osparc.data.model.Service.canIWrite(this.getService()["accessRights"]) ) { - // Show also the copy Id buttons too + // In case of service instance, show also the copy Id buttons too const buttonsLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); if (this.getNodeId()) { const studyAlias = osparc.product.Utils.getStudyAlias({firstUpperCase: true}); @@ -135,14 +130,12 @@ qx.Class.define("osparc.info.ServiceLarge", { buttonsLayout.add(copyNodeIdButton); vBox.add(buttonsLayout); } - // Also copyMetadataButton if tester - if (osparc.data.Permissions.getInstance().isTester()) { - buttonsLayout.add(copyMetadataButton); - vBox.add(buttonsLayout); - } + // Show description only - vBox.add(description.getChildren()[1]); + const description = this.__createDescription(); + vBox.add(description); } else { + // Icon and title const hBox = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); const icon = this.__createIcon(); const iconLayout = this.__createViewWithEdit(icon, this.__openIconEditor); @@ -152,47 +145,28 @@ qx.Class.define("osparc.info.ServiceLarge", { hBox.add(titleLayout); vBox.add(hBox); - const extraInfo = this.__extraInfo(); - const extraInfoLayout = this.__createExtraInfo(extraInfo); - const bounds = this.getBounds(); - const offset = 30; - const maxThumbnailHeight = extraInfo.length*20; - let widgetWidth = bounds ? bounds.width - offset : 500 - offset; - let thumbnailWidth = widgetWidth - 2 * osparc.info.CardLarge.PADDING - osparc.info.CardLarge.EXTRA_INFO_WIDTH; - thumbnailWidth = Math.min(thumbnailWidth - 20, osparc.info.CardLarge.THUMBNAIL_MAX_WIDTH); - const thumbnail = this.__createThumbnail(thumbnailWidth, maxThumbnailHeight); - const thumbnailLayout = this.__createViewWithEdit(thumbnail, this.__openThumbnailEditor); - thumbnailLayout.getLayout().set({ - alignX: "center" - }); - const infoAndThumbnail = new qx.ui.container.Composite(new qx.ui.layout.HBox(3).set({ - alignX: "center" - })); - infoAndThumbnail.add(extraInfoLayout); - infoAndThumbnail.add(thumbnailLayout, { - flex: 1 - }); - vBox.add(infoAndThumbnail); - - if (osparc.service.Utils.canIWrite(this.getService()["accessRights"])) { - const descriptionUi = this.__createDescriptionUi(); - if (descriptionUi) { - vBox.add(descriptionUi); - } - } - vBox.add(description); + // Rest of information + const infoElements = this.__infoElements(); + const isStudy = false; + const infoLayout = osparc.info.Utils.infoElementsToLayout(infoElements, isStudy); + vBox.add(infoLayout); + // Resources info if not billable if (!osparc.desktop.credits.Utils.areWalletsEnabled()) { const resources = this.__createResources(); if (resources) { vBox.add(resources); } } - vBox.add(copyMetadataButton); } + // Copy metadata button + vBox.add(copyMetadataButton); + + // All in a scroll container const scrollContainer = new qx.ui.container.Scroll(); scrollContainer.add(vBox); + this._add(scrollContainer, { flex: 1 }); @@ -203,7 +177,7 @@ qx.Class.define("osparc.info.ServiceLarge", { alignY: "middle" })); layout.add(view); - if (osparc.service.Utils.canIWrite(this.getService()["accessRights"])) { + if (osparc.data.model.Service.canIWrite(this.getService()["accessRights"])) { const editBtn = osparc.utils.Utils.getEditButton(); editBtn.addListener("execute", () => cb.call(this), this); layout.add(editBtn); @@ -246,108 +220,99 @@ qx.Class.define("osparc.info.ServiceLarge", { return title; }, - __extraInfo: function() { - const extraInfo = []; + __infoElements: function() { + const canIWrite = osparc.data.model.Service.canIWrite(this.getService()["accessRights"]); + + const infoLayout = { + "THUMBNAIL": { + view: this.__createThumbnail(), + action: { + button: osparc.utils.Utils.getEditButton(canIWrite), + callback: canIWrite ? this.__openThumbnailEditor : null, + ctx: this, + }, + }, + "KEY": { + label: this.tr("Key"), + view: this.__createKey(), + action: { + button: osparc.utils.Utils.getCopyButton(), + callback: this.__copyKeyToClipboard, + ctx: this, + }, + }, + "VERSION": { + label: this.tr("Version"), + view: this.__createDisplayVersion(), + action: { + button: canIWrite ? osparc.utils.Utils.getEditButton() : null, + callback: this.__openVersionDisplayEditor, + ctx: this, + }, + }, + "DATE": { + label: this.tr("Released Date"), + view: this.__createReleasedDate(), + action: null, + }, + "CONTACT": { + label: this.tr("Contact"), + view: this.__createContact(), + action: null, + }, + "AUTHORS": { + label: this.tr("Authors"), + view: this.__createAuthors(), + action: null, + }, + "ACCESS_RIGHTS": { + label: this.tr("Access"), + view: this.__createAccessRights(), + action: { + button: canIWrite ? osparc.utils.Utils.getEditButton() : null, + callback: this.isOpenOptions() ? this.__openAccessRights : "openAccessRights", + ctx: this, + }, + }, + "DESCRIPTION": { + view: this.__createDescription(), + action: { + button: osparc.utils.Utils.getEditButton(canIWrite), + callback: canIWrite ? this.__openDescriptionEditor : null, + ctx: this, + }, + }, + }; if (this.getNodeId()) { - extraInfo.push({ - label: this.tr("SERVICE ID"), + infoLayout["SERVICE_ID"] = { + label: this.tr("Service ID"), view: this.__createNodeId(), action: { button: osparc.utils.Utils.getCopyButton(), callback: this.__copyNodeIdToClipboard, ctx: this - } - }); + }, + }; } - extraInfo.push({ - label: this.tr("KEY"), - view: this.__createKey(), - action: { - button: osparc.utils.Utils.getCopyButton(), - callback: this.__copyKeyToClipboard, - ctx: this - } - }); - - if (osparc.data.Permissions.getInstance().isTester() || osparc.service.Utils.canIWrite(this.getService()["accessRights"])) { - extraInfo.push({ - label: this.tr("INTEGRATION VERSION"), + if (osparc.data.Permissions.getInstance().isTester() || canIWrite) { + infoLayout["INTEGRATION_VERSION"] = { + label: this.tr("Integration Version"), view: this.__createIntegrationVersion(), - action: null - }); - } - - extraInfo.push({ - label: this.tr("VERSION"), - view: this.__createDisplayVersion(), - action: { - button: osparc.service.Utils.canIWrite(this.getService()["accessRights"]) ? osparc.utils.Utils.getEditButton() : null, - callback: this.__openVersionDisplayEditor, - ctx: this - } - }, { - label: this.tr("RELEASE DATE"), - view: this.__createReleasedDate(), - action: null - }, { - label: this.tr("CONTACT"), - view: this.__createContact(), - action: null - }, { - label: this.tr("AUTHORS"), - view: this.__createAuthors(), - action: null - }, { - label: this.tr("ACCESS RIGHTS"), - view: this.__createAccessRights(), - action: { - button: osparc.service.Utils.canIWrite(this.getService()["accessRights"]) ? osparc.utils.Utils.getEditButton() : null, - callback: this.isOpenOptions() ? this.__openAccessRights : "openAccessRights", - ctx: this - } - }); - - if ( - osparc.product.Utils.showClassifiers() && - this.getService()["classifiers"] - ) { - extraInfo.push({ - label: this.tr("CLASSIFIERS"), - view: this.__createClassifiers(), - action: { - button: osparc.service.Utils.canIWrite(this.getService()["accessRights"]) ? osparc.utils.Utils.getEditButton() : null, - callback: this.isOpenOptions() ? this.__openClassifiers : "openClassifiers", - ctx: this - } - }); + action: null, + }; } - if ( - osparc.product.Utils.showQuality() && - this.getService()["quality"] && - osparc.metadata.Quality.isEnabled(this.getService()["quality"]) - ) { - extraInfo.push({ - label: this.tr("QUALITY"), - view: this.__createQuality(), - action: { - button: osparc.service.Utils.canIWrite(this.getService()["accessRights"]) ? osparc.utils.Utils.getEditButton() : null, - callback: this.isOpenOptions() ? this.__openQuality : "openQuality", - ctx: this - } - }); + if (canIWrite) { + infoLayout["DESCRIPTION_ONLY"] = { + label: this.tr("Description only"), + view: this.__createDescriptionUi(), + action: null, + }; } - return extraInfo; - }, - - __createExtraInfo: function(extraInfo) { - const moreInfo = osparc.info.Utils.extraInfosToGrid(extraInfo).set({ - width: osparc.info.CardLarge.EXTRA_INFO_WIDTH - }); - return moreInfo; + return infoLayout; }, __createNodeId: function() { @@ -382,15 +347,9 @@ qx.Class.define("osparc.info.ServiceLarge", { return osparc.info.ServiceUtils.createAccessRights(this.getService()); }, - __createClassifiers: function() { - return osparc.info.ServiceUtils.createClassifiers(this.getService()); - }, - - __createQuality: function() { - return osparc.info.ServiceUtils.createQuality(this.getService()); - }, - - __createThumbnail: function(maxWidth, maxHeight = 160) { + __createThumbnail: function() { + let maxWidth = 190; + let maxHeight = 220; // make sure maxs are not larger than the mins const minWidth = Math.max(120, maxWidth); const minHeight = Math.max(139, maxHeight); @@ -418,9 +377,7 @@ qx.Class.define("osparc.info.ServiceLarge", { __createDescriptionUi: function() { const cbAutoPorts = new qx.ui.form.CheckBox().set({ - label: this.tr("Show Description only"), toolTipText: this.tr("From all the metadata shown in this view,\nonly the Description will be shown to Users."), - iconPosition: "right", }); cbAutoPorts.setValue(Boolean(this.getService()["descriptionUi"])); cbAutoPorts.addListener("changeValue", e => { @@ -438,13 +395,7 @@ qx.Class.define("osparc.info.ServiceLarge", { resourcesLayout.exclude(); let promise = null; if (this.getNodeId()) { - const params = { - url: { - studyId: this.getStudyId(), - nodeId: this.getNodeId() - } - }; - promise = osparc.data.Resources.get("nodesInStudyResources", params); + promise = osparc.store.Study.getInstance().getNodeResources(this.getStudyId(), this.getNodeId()); } else { promise = osparc.store.Services.getResources(this.getService()["key"], this.getService()["version"]) } @@ -513,33 +464,6 @@ qx.Class.define("osparc.info.ServiceLarge", { }, this); }, - __openClassifiers: function() { - const title = this.tr("Classifiers"); - let classifiers = null; - if (osparc.service.Utils.canIWrite(this.getService()["accessRights"])) { - classifiers = new osparc.metadata.ClassifiersEditor(this.getService()); - const win = osparc.ui.window.Window.popUpInWindow(classifiers, title, 400, 400); - classifiers.addListener("updateClassifiers", e => { - win.close(); - const updatedServiceData = e.getData(); - this.setService(updatedServiceData); - this.fireDataEvent("updateService", updatedServiceData); - }, this); - } else { - classifiers = new osparc.metadata.ClassifiersViewer(this.getService()); - osparc.ui.window.Window.popUpInWindow(classifiers, title, 400, 400); - } - }, - - __openQuality: function() { - const qualityEditor = osparc.info.ServiceUtils.openQuality(this.getService()); - qualityEditor.addListener("updateQuality", e => { - const updatedServiceData = e.getData(); - this.setService(updatedServiceData); - this.fireDataEvent("updateService", updatedServiceData); - }); - }, - __openThumbnailEditor: function() { const title = this.tr("Edit Thumbnail"); const thumbnailEditor = new osparc.editor.ThumbnailEditor(this.getService()["thumbnail"]); diff --git a/services/static-webserver/client/source/class/osparc/info/ServiceUtils.js b/services/static-webserver/client/source/class/osparc/info/ServiceUtils.js index 790851ea0c5f..54b44fa0cc8c 100644 --- a/services/static-webserver/client/source/class/osparc/info/ServiceUtils.js +++ b/services/static-webserver/client/source/class/osparc/info/ServiceUtils.js @@ -20,6 +20,13 @@ qx.Class.define("osparc.info.ServiceUtils", { type: "static", statics: { + RESOURCES_INFO: { + "limit": { + label: qx.locale.Manager.tr("Limit"), + tooltip: qx.locale.Manager.tr("Runtime check:
The service can consume a maximum of 'limit' resources - if it attempts to use more resources than this limit, it will be stopped") + } + }, + /** * @param label {String} label */ @@ -95,16 +102,21 @@ qx.Class.define("osparc.info.ServiceUtils", { */ createAuthors: function(serviceData) { const authors = new qx.ui.basic.Label().set({ - rich: true + rich: true, + wrap: true, + maxWidth: 220, }); - serviceData["authors"].forEach(author => { - const oldVal = authors.getValue(); - const oldTTT = authors.getToolTipText(); + if (serviceData["authors"]) { authors.set({ - value: (oldVal ? oldVal : "") + `${author["name"]}
`, - toolTipText: (oldTTT ? oldTTT : "") + `${author["email"]} - ${author["affiliation"]}
` + value: serviceData["authors"].map(author => author["name"]).join(", "), }); - }); + serviceData["authors"].forEach(author => { + const oldTTT = authors.getToolTipText(); + authors.set({ + toolTipText: (oldTTT ? oldTTT : "") + `${author["email"]} - ${author["affiliation"]}
` + }); + }); + } return authors; }, @@ -112,20 +124,27 @@ qx.Class.define("osparc.info.ServiceUtils", { * @param serviceData {Object} Serialized Service Object */ createAccessRights: function(serviceData) { - let permissions = ""; - const myGID = osparc.auth.Data.getInstance().getGroupId(); - const ar = serviceData["accessRights"]; - if (myGID in ar) { - if (ar[myGID]["write"]) { - permissions = qx.locale.Manager.tr("Write"); - } else if (ar[myGID]["execute"]) { - permissions = qx.locale.Manager.tr("Execute"); + const allMyGIds = osparc.store.Groups.getInstance().getAllMyGroupIds(); + const accessRights = serviceData["accessRights"]; + const permissions = new Set(); + allMyGIds.forEach(gId => { + if (gId in accessRights) { + if (accessRights[gId]["write"]) { + permissions.add("write"); + } else if (accessRights[gId]["execute"]) { + permissions.add("read"); + } } + }); + const accessRightsLabel = new qx.ui.basic.Label(); + if (permissions.has("write")) { + accessRightsLabel.setValue(osparc.data.Roles.SERVICES["write"].label); + } else if (permissions.has("read")) { + accessRightsLabel.setValue(osparc.data.Roles.SERVICES["read"].label); } else { - permissions = qx.locale.Manager.tr("Public"); + accessRightsLabel.setValue(qx.locale.Manager.tr("Public")); } - const accessRights = new qx.ui.basic.Label(permissions); - return accessRights; + return accessRightsLabel; }, /** @@ -165,18 +184,8 @@ qx.Class.define("osparc.info.ServiceUtils", { /** * @param serviceData {Object} Serialized Service Object - * @param maxHeight {Number} description's maxHeight */ createDescription: function(serviceData) { - const descriptionLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(5).set({ - alignY: "middle" - })); - - const label = new qx.ui.basic.Label(qx.locale.Manager.tr("Description")).set({ - font: "text-13" - }); - descriptionLayout.add(label); - const description = new osparc.ui.markdown.Markdown(); // display markdown link content if that's the case if ( @@ -194,19 +203,15 @@ qx.Class.define("osparc.info.ServiceUtils", { console.error(err); description.setValue(serviceData["description"]); }); - } else { + } else if (serviceData["description"]) { description.setValue(serviceData["description"]); + } else { + description.setValue(this.tr("No description")); } - descriptionLayout.add(description); + const scrollContainer = new qx.ui.container.Scroll(); + scrollContainer.add(description); - return descriptionLayout; - }, - - RESOURCES_INFO: { - "limit": { - label: qx.locale.Manager.tr("Limit"), - tooltip: qx.locale.Manager.tr("Runtime check:
The service can consume a maximum of 'limit' resources - if it attempts to use more resources than this limit, it will be stopped") - } + return scrollContainer; }, createResourcesInfo: function() { diff --git a/services/static-webserver/client/source/class/osparc/info/StudyLarge.js b/services/static-webserver/client/source/class/osparc/info/StudyLarge.js index 4b2a67ce2b79..8061b5e47d1e 100644 --- a/services/static-webserver/client/source/class/osparc/info/StudyLarge.js +++ b/services/static-webserver/client/source/class/osparc/info/StudyLarge.js @@ -60,9 +60,9 @@ qx.Class.define("osparc.info.StudyLarge", { if ( this.__canIWrite() && this.getStudy().getTemplateType() && - osparc.data.Permissions.getInstance().isTester() + osparc.data.Permissions.getInstance().isProductOwner() ) { - // let testers change the template type + // let product owners change the template type const hBox = new qx.ui.container.Composite(new qx.ui.layout.HBox(5).set({ alignY: "middle", })); @@ -95,17 +95,20 @@ qx.Class.define("osparc.info.StudyLarge", { if (selected) { saveBtn.setFetching(true); const templateType = selected.getModel(); - osparc.store.Study.patchTemplateType(this.getStudy().getUuid(), templateType) + osparc.store.Study.getInstance().patchTemplateType(this.getStudy().serialize(), templateType) .then(() => osparc.FlashMessenger.logAs(this.tr("Template type updated, please reload"), "INFO")) + .catch(err => osparc.FlashMessenger.logError(err)) .finally(() => saveBtn.setFetching(false)); } }, this); } const infoElements = this.__infoElements(); - const infoLayout = osparc.info.StudyUtils.infoElementsToLayout(infoElements); + const isStudy = true; + const infoLayout = osparc.info.Utils.infoElementsToLayout(infoElements, isStudy); vBox.add(infoLayout); + // Copy Id button let text = osparc.product.Utils.getStudyAlias({firstUpperCase: true}) + " Id"; if (this.getStudy().getTemplateType()) { text = osparc.product.Utils.getTemplateAlias({firstUpperCase: true}) + " Id"; @@ -119,6 +122,7 @@ qx.Class.define("osparc.info.StudyLarge", { copyIdButton.addListener("execute", () => osparc.utils.Utils.copyTextToClipboard(this.getStudy().getUuid())); vBox.add(copyIdButton); + // All in a scroll container const scrollContainer = new qx.ui.container.Scroll(); scrollContainer.add(vBox); @@ -128,63 +132,62 @@ qx.Class.define("osparc.info.StudyLarge", { }, __infoElements: function() { + const canIWrite = this.__canIWrite(); + const infoLayout = { "TITLE": { - label: this.tr("Title:"), view: osparc.info.StudyUtils.createTitle(this.getStudy()), action: { - button: osparc.utils.Utils.getEditButton(this.__canIWrite()), - callback: this.__canIWrite() ? this.__openTitleEditor : null, + button: osparc.utils.Utils.getEditButton(canIWrite), + callback: canIWrite ? this.__openTitleEditor : null, ctx: this } }, "THUMBNAIL": { - label: this.tr("Thumbnail:"), view: this.__createThumbnail(), action: { - button: osparc.utils.Utils.getEditButton(this.__canIWrite()), - callback: this.__canIWrite() ? this.__openThumbnailEditor : null, + button: osparc.utils.Utils.getEditButton(canIWrite), + callback: canIWrite ? this.__openThumbnailEditor : null, ctx: this } }, "DESCRIPTION": { - label: this.tr("Description:"), - view: osparc.info.StudyUtils.createDescriptionMD(this.getStudy(), 150), + view: osparc.info.StudyUtils.createDescription(this.getStudy()), action: { - button: osparc.utils.Utils.getEditButton(this.__canIWrite()), - callback: this.__canIWrite() ? this.__openDescriptionEditor : null, + button: osparc.utils.Utils.getEditButton(canIWrite), + callback: canIWrite ? this.__openDescriptionEditor : null, ctx: this } }, "AUTHOR": { - label: this.tr("Author:"), + label: this.tr("Author"), view: osparc.info.StudyUtils.createOwner(this.getStudy()), action: null }, "ACCESS_RIGHTS": { - label: this.tr("Access:"), + label: this.tr("Access"), view: osparc.info.StudyUtils.createAccessRights(this.getStudy()), action: { - button: osparc.utils.Utils.getLinkButton(this.__canIWrite()), + button: osparc.utils.Utils.getLinkButton(canIWrite), callback: this.isOpenOptions() ? this.__openAccessRights : "openAccessRights", ctx: this } }, "CREATED": { - label: this.tr("Created:"), + label: this.tr("Created"), view: osparc.info.StudyUtils.createCreationDate(this.getStudy()), action: null }, "MODIFIED": { - label: this.tr("Modified:"), + label: this.tr("Modified"), view: osparc.info.StudyUtils.createLastChangeDate(this.getStudy()), action: null }, "TAGS": { - label: this.tr("Tags:"), + label: this.tr("Tags"), view: osparc.info.StudyUtils.createTags(this.getStudy()), action: { - button: osparc.utils.Utils.getLinkButton(this.__canIWrite()), + button: osparc.utils.Utils.getLinkButton(canIWrite), callback: this.isOpenOptions() ? this.__openTagsEditor : "openTags", ctx: this } @@ -211,7 +214,7 @@ qx.Class.define("osparc.info.StudyLarge", { infoLayout["CLASSIFIERS"] = { label: this.tr("Classifiers:"), view: osparc.info.StudyUtils.createClassifiers(this.getStudy()), - action: (this.getStudy().getClassifiers().length || this.__canIWrite()) ? { + action: (this.getStudy().getClassifiers().length || canIWrite) ? { button: osparc.utils.Utils.getLinkButton(), callback: this.isOpenOptions() ? this.__openClassifiers : "openClassifiers", ctx: this @@ -233,11 +236,9 @@ qx.Class.define("osparc.info.StudyLarge", { return infoLayout; }, - __createStudyId: function() { - return osparc.info.StudyUtils.createUuid(this.getStudy()); - }, - - __createThumbnail: function(maxWidth = 190, maxHeight = 220) { + __createThumbnail: function() { + const maxWidth = 190; + const maxHeight = 220; const thumb = osparc.info.StudyUtils.createThumbnail(this.getStudy(), maxWidth, maxHeight); thumb.set({ maxWidth: 120, diff --git a/services/static-webserver/client/source/class/osparc/info/StudyMedium.js b/services/static-webserver/client/source/class/osparc/info/StudyMedium.js index b31cddd39588..ef6aa7691005 100644 --- a/services/static-webserver/client/source/class/osparc/info/StudyMedium.js +++ b/services/static-webserver/client/source/class/osparc/info/StudyMedium.js @@ -73,15 +73,8 @@ qx.Class.define("osparc.info.StudyMedium", { const extraInfoLayout = this.__createExtraInfo(extraInfo); this._add(extraInfoLayout); - const descriptionLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(8)).set({ - marginTop: -10 // bring it closer to the extra info - }); - const title = new qx.ui.basic.Label(this.tr("DESCRIPTION")).set({ - paddingLeft: 14 // align it with the other titles - }); - descriptionLayout.add(title); - descriptionLayout.add(osparc.info.StudyUtils.createDescriptionMD(this.getStudy())); - this._add(descriptionLayout); + const description = osparc.info.StudyUtils.createDescription(this.getStudy()); + this._add(description); }, __createMenuButton: function() { @@ -162,7 +155,7 @@ qx.Class.define("osparc.info.StudyMedium", { __openStudyDetails: function() { const studyDetails = new osparc.info.StudyLarge(this.getStudy()); - const title = this.tr("Study Information"); + const title = this.tr("Project Information"); const width = osparc.info.CardLarge.WIDTH; const height = osparc.info.CardLarge.HEIGHT; osparc.ui.window.Window.popUpInWindow(studyDetails, title, width, height).set({ diff --git a/services/static-webserver/client/source/class/osparc/info/StudyUtils.js b/services/static-webserver/client/source/class/osparc/info/StudyUtils.js index 6a809cd5c738..16b8f39af2dc 100644 --- a/services/static-webserver/client/source/class/osparc/info/StudyUtils.js +++ b/services/static-webserver/client/source/class/osparc/info/StudyUtils.js @@ -84,21 +84,31 @@ qx.Class.define("osparc.info.StudyUtils", { * @param study {osparc.data.model.Study} Study Model */ createAccessRights: function(study) { - const accessRights = new qx.ui.basic.Label(); - let permissions = ""; - const myGID = osparc.auth.Data.getInstance().getGroupId(); - const ar = study.getAccessRights(); - if (myGID in ar) { - if (ar[myGID]["delete"]) { - permissions = qx.locale.Manager.tr("Owner"); - } else if (ar[myGID]["write"]) { - permissions = qx.locale.Manager.tr("Editor"); - } else if (ar[myGID]["read"]) { - permissions = qx.locale.Manager.tr("User"); + const allMyGIds = osparc.store.Groups.getInstance().getAllMyGroupIds(); + const accessRights = study.getAccessRights(); + const permissions = new Set(); + allMyGIds.forEach(gId => { + if (gId in accessRights) { + if (accessRights[gId]["delete"]) { + permissions.add("delete"); + } else if (accessRights[gId]["write"]) { + permissions.add("write"); + } else if (accessRights[gId]["read"]) { + permissions.add("read"); + } } + }); + const accessRightsLabel = new qx.ui.basic.Label(); + if (permissions.has("delete")) { + accessRightsLabel.setValue(osparc.data.Roles.STUDY["delete"].label); + } else if (permissions.has("write")) { + accessRightsLabel.setValue(osparc.data.Roles.STUDY["write"].label); + } else if (permissions.has("read")) { + accessRightsLabel.setValue(osparc.data.Roles.STUDY["read"].label); + } else { + accessRightsLabel.setValue(qx.locale.Manager.tr("Public")); } - accessRights.setValue(permissions); - return accessRights; + return accessRightsLabel; }, /** @@ -179,10 +189,10 @@ qx.Class.define("osparc.info.StudyUtils", { * @param study {osparc.data.model.Study} Study Model * @param maxHeight {Number} description's maxHeight */ - createDescriptionMD: function(study, maxHeight) { + createDescription: function(study, maxHeight) { const description = new osparc.ui.markdown.Markdown(); study.bind("description", description, "value", { - converter: desc => desc ? desc : "Add description" + converter: desc => desc ? desc : "No description" }); const scrollContainer = new qx.ui.container.Scroll(); if (maxHeight) { @@ -225,7 +235,7 @@ qx.Class.define("osparc.info.StudyUtils", { const addTags = model => { tagsContainer.removeAll(); - const noTagsLabel = new qx.ui.basic.Label(qx.locale.Manager.tr("Add tags")); + const noTagsLabel = new qx.ui.basic.Label(qx.locale.Manager.tr("No tags")); tagsContainer.add(noTagsLabel); osparc.store.Tags.getInstance().getTags().filter(tag => model.getTags().includes(tag.getTagId())) .forEach(selectedTag => { @@ -242,152 +252,6 @@ qx.Class.define("osparc.info.StudyUtils", { return tagsContainer; }, - __titleWithEditLayout: function(data, titleWidth = 75) { - const titleLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox(5)); - const hasButton = Boolean(data.action && data.action.button); - // use the width for aligning the buttons - const title = new qx.ui.basic.Label(data.label).set({ - allowGrowX: true, - maxWidth: hasButton ? titleWidth : titleWidth + 35 // spacer for the button - }); - titleLayout.add(title, { - flex: 1 - }); - if (hasButton) { - const button = data.action.button; - titleLayout.add(button); - button.addListener("execute", () => { - const cb = data.action.callback; - if (typeof cb === "string") { - data.action.ctx.fireEvent(cb); - } else { - cb.call(data.action.ctx); - } - }, this); - } - return titleLayout; - }, - - infoElementsToLayout: function(extraInfos) { - const positions = { - TITLE: { - column: 0, - row: 0, - }, - THUMBNAIL: { - column: 0, - row: 1, - }, - DESCRIPTION: { - column: 0, - row: 2, - }, - AUTHOR: { - inline: true, - column: 0, - row: 0, - }, - CREATED: { - inline: true, - column: 0, - row: 1, - }, - MODIFIED: { - inline: true, - column: 0, - row: 2, - }, - ACCESS_RIGHTS: { - inline: true, - column: 0, - row: 3, - }, - TAGS: { - inline: true, - column: 0, - row: 4, - }, - QUALITY: { - inline: true, - column: 0, - row: 5, - }, - CLASSIFIERS: { - inline: true, - column: 0, - row: 6, - }, - LOCATION: { - inline: true, - column: 0, - row: 7, - }, - }; - - const mainInfoGrid = new qx.ui.layout.Grid(15, 5); - mainInfoGrid.setColumnAlign(0, "left", "top"); - mainInfoGrid.setColumnFlex(0, 1); - const mainInfoLayout = new qx.ui.container.Composite(mainInfoGrid); - - const extraInfoGrid = new qx.ui.layout.Grid(15, 5); - const extraInfoLayout = new qx.ui.container.Composite(extraInfoGrid); - extraInfoGrid.setColumnFlex(0, 1); - - let row = 0; - let row2 = 0; - Object.keys(positions).forEach(key => { - if (key in extraInfos) { - const extraInfo = extraInfos[key]; - const gridInfo = positions[key]; - - if (gridInfo.inline) { - const titleLayout = this.__titleWithEditLayout(extraInfo); - if (extraInfo.action && extraInfo.action.button) { - extraInfo.action.button.set({ - marginRight: 15 - }); - } - titleLayout.add(extraInfo.view, { - flex: 1 - }); - extraInfoLayout.add(titleLayout, { - row: row2, - column: gridInfo.column - }); - row2++; - extraInfoGrid.setRowHeight(row2, 5); // spacer - row2++; - } else { - const titleLayout = this.__titleWithEditLayout(extraInfo); - mainInfoLayout.add(titleLayout, { - row, - column: gridInfo.column - }); - row++; - mainInfoLayout.add(extraInfo.view, { - row, - column: gridInfo.column - }); - row++; - mainInfoGrid.setRowHeight(row, 5); // spacer - row++; - } - } - }); - - - const container = new qx.ui.container.Composite(new qx.ui.layout.VBox()); - const box1 = this.__createSectionBox(qx.locale.Manager.tr("Details")); - box1.add(mainInfoLayout); - container.addAt(box1, 0); - - const box2 = this.__createSectionBox(qx.locale.Manager.tr("Meta details")); - box2.add(extraInfoLayout); - container.addAt(box2, 1); - - return container; - }, - /** * @param studyData {Object} Serialized Study Object */ diff --git a/services/static-webserver/client/source/class/osparc/info/Utils.js b/services/static-webserver/client/source/class/osparc/info/Utils.js index 82696e7d9bff..44670e55e576 100644 --- a/services/static-webserver/client/source/class/osparc/info/Utils.js +++ b/services/static-webserver/client/source/class/osparc/info/Utils.js @@ -42,6 +42,147 @@ qx.Class.define("osparc.info.Utils", { return image; }, + infoElementsToLayout: function(infoElements, isStudy = true) { + const container = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)); + + const decorateAction = action => { + action.button.set({ + alignY: "middle", + }); + action.button.addListener("execute", () => { + const cb = action.callback; + if (typeof cb === "string") { + action.ctx.fireEvent(cb); + } else { + cb.call(action.ctx); + } + }, this); + }; + + if ("TITLE" in infoElements) { + const extraInfo = infoElements["TITLE"]; + const titleLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); + + if (extraInfo.action && extraInfo.action.button) { + decorateAction(extraInfo.action); + titleLayout.add(extraInfo.action.button); + } + + if (extraInfo.view) { + titleLayout.add(extraInfo.view, { + flex: 1, + }); + } + + container.add(titleLayout); + } + + + const centerLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); + + if ("THUMBNAIL" in infoElements) { + const extraInfo = infoElements["THUMBNAIL"]; + const thumbnailLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox(8)); + + if (extraInfo.action && extraInfo.action.button) { + decorateAction(extraInfo.action); + thumbnailLayout.add(extraInfo.action.button); + } + + if (extraInfo.view) { + thumbnailLayout.add(extraInfo.view, { + flex: 1, + }); + } + + centerLayout.add(thumbnailLayout); + } + + const gridKeys = isStudy ? [ + "AUTHOR", + "ACCESS_RIGHTS", + "CREATED", + "MODIFIED", + "TAGS", + "LOCATION", + ] : [ + "SERVICE_ID", + "KEY", + "INTEGRATION_VERSION", + "VERSION", + "DATE", + "CONTACT", + "AUTHORS", + "ACCESS_RIGHTS", + "DESCRIPTION_ONLY", + ]; + + const grid = new qx.ui.layout.Grid(6, 6); + grid.setColumnAlign(0, "right", "middle"); // titles + const gridLayout = new qx.ui.container.Composite(grid); + + let row = 0; + gridKeys.forEach(key => { + if (key in infoElements) { + const infoElement = infoElements[key]; + + let col = 0; + if (infoElement.label) { + const title = new qx.ui.basic.Label(infoElement.label).set({ + alignX: "right", + }); + gridLayout.add(title, { + row, + column: col + 0, + }); + } + col++; + + if (infoElement.action && infoElement.action.button) { + decorateAction(infoElement.action); + gridLayout.add(infoElement.action.button, { + row, + column: col + 1, + }); + } + col++; + + if (infoElement.view) { + gridLayout.add(infoElement.view, { + row, + column: col + 2, + }); + } + col++; + row++; + } + }); + centerLayout.add(gridLayout, { + flex: 1, + }); + container.add(centerLayout); + + if ("DESCRIPTION" in infoElements) { + const infoElement = infoElements["DESCRIPTION"]; + const descriptionLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); + + if (infoElement.action && infoElement.action.button) { + decorateAction(infoElement.action); + descriptionLayout.add(infoElement.action.button); + } + + if (infoElement.view) { + descriptionLayout.add(infoElement.view, { + flex: 1, + }); + } + + container.add(descriptionLayout); + } + + return container; + }, + extraInfosToGrid: function(extraInfos) { const grid = new qx.ui.layout.Grid(8, 5); grid.setColumnAlign(0, "right", "middle"); diff --git a/services/static-webserver/client/source/class/osparc/io/rest/AbstractResource.js b/services/static-webserver/client/source/class/osparc/io/rest/AbstractResource.js index 2fcc25e653c2..24a402fe1462 100644 --- a/services/static-webserver/client/source/class/osparc/io/rest/AbstractResource.js +++ b/services/static-webserver/client/source/class/osparc/io/rest/AbstractResource.js @@ -26,8 +26,8 @@ qx.Class.define("osparc.io.rest.AbstractResource", { API: "/v0", AUTHENTICATION: null, - setAutheticationHeader: function(usernameOrToken, password=null) { - osparc.io.rest.AbstractResource.AUTHENTICATION = new qx.io.request.authentication.Basic(usernameOrToken, password); + setAutheticationHeader: function(userNameOrToken, password=null) { + osparc.io.rest.AbstractResource.AUTHENTICATION = new qx.io.request.authentication.Basic(userNameOrToken, password); } }, diff --git a/services/static-webserver/client/source/class/osparc/io/rest/Resource.js b/services/static-webserver/client/source/class/osparc/io/rest/Resource.js index 6a68dc551a49..3f6203f62beb 100644 --- a/services/static-webserver/client/source/class/osparc/io/rest/Resource.js +++ b/services/static-webserver/client/source/class/osparc/io/rest/Resource.js @@ -34,6 +34,9 @@ qx.Class.define("osparc.io.rest.Resource", { }, { key: "X-Simcore-Products-Name", value: qx.core.Environment.get("product.name") + }, { + key: "X-Client-Session-Id", + value: osparc.utils.Utils.getClientSessionID() }]; if (this.AUTHENTICATION !== undefined && this.AUTHENTICATION !== null) { @@ -51,8 +54,8 @@ qx.Class.define("osparc.io.rest.Resource", { statics: { AUTHENTICATION: null, - setAutheticationHeader: function(usernameOrToken, password=null) { - osparc.io.rest.Resource.AUTHENTICATION = new qx.io.request.authentication.Basic(usernameOrToken, password); + setAutheticationHeader: function(userNameOrToken, password=null) { + osparc.io.rest.Resource.AUTHENTICATION = new qx.io.request.authentication.Basic(userNameOrToken, password); } }, diff --git a/services/static-webserver/client/source/class/osparc/jobs/ActivityCenterWindow.js b/services/static-webserver/client/source/class/osparc/jobs/ActivityCenterWindow.js index 692934925858..ec1e3c85b71c 100644 --- a/services/static-webserver/client/source/class/osparc/jobs/ActivityCenterWindow.js +++ b/services/static-webserver/client/source/class/osparc/jobs/ActivityCenterWindow.js @@ -59,8 +59,8 @@ qx.Class.define("osparc.jobs.ActivityCenterWindow", { runsBrowser.addListener("runSelected", e => { const data = e.getData(); - const project = data["rowData"]; - subRunsBrowser.setProject(project); + const collectionRunData = data["rowData"]; + subRunsBrowser.setCollectionRun(collectionRunData); this.getChildControl("title").setValue(this.tr("Tasks")); stack.setSelection([subRunsBrowser]); }); @@ -72,7 +72,6 @@ qx.Class.define("osparc.jobs.ActivityCenterWindow", { }); this.addListener("close", () => { - runsBrowser.stopInterval(); subRunsBrowser.stopInterval(); }); }, diff --git a/services/static-webserver/client/source/class/osparc/jobs/ActivityOverview.js b/services/static-webserver/client/source/class/osparc/jobs/ActivityOverview.js index 15681a8265b9..a7253b0c66a1 100644 --- a/services/static-webserver/client/source/class/osparc/jobs/ActivityOverview.js +++ b/services/static-webserver/client/source/class/osparc/jobs/ActivityOverview.js @@ -56,12 +56,12 @@ qx.Class.define("osparc.jobs.ActivityOverview", { this.__runsTable.addListener("runSelected", e => { const data = e.getData(); - const project = data["rowData"]; - const projectUuid = project["projectUuid"]; + const collectionRunData = data["rowData"]; + const collectionRunId = collectionRunData["collectionRunId"]; // Hacky-hacky for (let i=0; i i) { + if (rowData["collectionRunId"] === collectionRunId && data["rowIdx"] > i) { const msg = this.tr("Only the latest run's tasks are available"); osparc.FlashMessenger.logAs(msg, "WARNING"); return; @@ -72,7 +72,7 @@ qx.Class.define("osparc.jobs.ActivityOverview", { tasksLayout.remove(this.__subRunsTable); this.__subRunsTable = null; } - const subRunsTable = this.__subRunsTable = new osparc.jobs.SubRunsTable(project["projectUuid"]); + const subRunsTable = this.__subRunsTable = new osparc.jobs.SubRunsTable(collectionRunData["collectionRunId"]); tasksLayout.add(subRunsTable, { flex: 1 }); @@ -106,12 +106,11 @@ qx.Class.define("osparc.jobs.ActivityOverview", { runsHistoryLayout.add(introText); const projectUuid = projectData["uuid"]; - const includeChildren = true; const runningOnly = false; - const runsTable = this.__runsTable = new osparc.jobs.RunsTable(projectUuid, includeChildren, runningOnly); + const runsTable = this.__runsTable = new osparc.jobs.RunsTable(projectUuid, runningOnly); const columnModel = runsTable.getTableColumnModel(); // Hide project name column - columnModel.setColumnVisible(osparc.jobs.RunsTable.COLS.PROJECT_NAME.column, false); + columnModel.setColumnVisible(osparc.jobs.RunsTable.COLS.NAME.column, false); // Hide cancel column columnModel.setColumnVisible(osparc.jobs.RunsTable.COLS.ACTION_CANCEL.column, false); runsHistoryLayout.add(runsTable, { diff --git a/services/static-webserver/client/source/class/osparc/jobs/Info.js b/services/static-webserver/client/source/class/osparc/jobs/Info.js index 2ae9a811eed5..2757a5cf84d0 100644 --- a/services/static-webserver/client/source/class/osparc/jobs/Info.js +++ b/services/static-webserver/client/source/class/osparc/jobs/Info.js @@ -24,14 +24,12 @@ qx.Class.define("osparc.jobs.Info", { this._setLayout(new qx.ui.layout.VBox()); - const divId = "job-info-viewer"; - const htmlEmbed = osparc.wrapper.JsonFormatter.getInstance().createContainer(divId); - this._add(htmlEmbed, { + const jsonViewer = new osparc.widget.JsonFormatterWidget(info); + const scroll = new qx.ui.container.Scroll(); + scroll.add(jsonViewer); + this._add(scroll, { flex: 1 }); - this.addListener("appear", () => { - osparc.wrapper.JsonFormatter.getInstance().setJson(info, divId); - }); }, statics: { @@ -42,21 +40,4 @@ qx.Class.define("osparc.jobs.Info", { return win; } }, - - members: { - _createChildControlImpl: function(id) { - let control; - switch (id) { - case "job-info-viewer": { - control = new osparc.ui.basic.JsonTreeWidget(); - const container = new qx.ui.container.Scroll(); - container.add(control); - this._add(container); - break; - } - } - - return control || this.base(arguments, id); - }, - } }) diff --git a/services/static-webserver/client/source/class/osparc/jobs/JobsButton.js b/services/static-webserver/client/source/class/osparc/jobs/JobsButton.js index 74e82a69450f..ce969b847001 100644 --- a/services/static-webserver/client/source/class/osparc/jobs/JobsButton.js +++ b/services/static-webserver/client/source/class/osparc/jobs/JobsButton.js @@ -23,18 +23,25 @@ qx.Class.define("osparc.jobs.JobsButton", { this._setLayout(new qx.ui.layout.Canvas()); + osparc.utils.Utils.setIdToWidget(this, "jobsButton"); + this.set({ - width: 30, - alignX: "center", - cursor: "pointer", toolTipText: this.tr("Activity Center"), }); - this.addListener("tap", () => osparc.jobs.ActivityCenterWindow.openWindow(), this); + this.addListener("tap", () => { + osparc.jobs.ActivityCenterWindow.openWindow(); + this.__fetchNJobs(); + }, this); + + this.__fetchNJobs(); - const jobsStore = osparc.store.Jobs.getInstance(); - jobsStore.addListener("changeJobsActive", e => this.__updateJobsButton(e.getData()), this); - jobsStore.fetchJobsLatest(); + const socket = osparc.wrapper.WebSocket.getInstance(); + if (socket.isConnected()) { + this.__attachSocketListener(); + } else { + socket.addListener("connect", () => this.__attachSocketListener()); + } }, members: { @@ -43,40 +50,91 @@ qx.Class.define("osparc.jobs.JobsButton", { switch (id) { case "icon": { control = new qx.ui.basic.Image("@FontAwesome5Solid/tasks/22"); - const logoContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox().set({ alignY: "middle" - })); + })).set({ + paddingLeft: 5, + }); logoContainer.add(control); - this._add(logoContainer, { height: "100%" }); break; } - case "number": - control = new qx.ui.basic.Label().set({ - backgroundColor: "background-main-1", - font: "text-12" + case "is-active-icon-outline": + control = new qx.ui.basic.Image("@FontAwesome5Solid/circle/12").set({ + textColor: osparc.navigation.NavigationBar.BG_COLOR, }); - control.getContentElement().setStyles({ - "border-radius": "4px" + this._add(control, { + bottom: -4, + right: -4, + }); + break; + case "is-active-icon": + control = new qx.ui.basic.Image("@FontAwesome5Solid/circle/8").set({ + textColor: "strong-main", }); this._add(control, { - bottom: 8, - right: 4 + bottom: -2, + right: -2, }); break; } return control || this.base(arguments, id); }, - __updateJobsButton: function(nActiveJobs) { + __fetchNJobs: function() { + const jobsStore = osparc.store.Jobs.getInstance(); + const runningOnly = true; + const offset = 0; + const limit = 1; + const orderBy = undefined; // use default order + const filters = undefined; // use default filters + const resolveWResponse = true; + jobsStore.fetchJobsLatest(runningOnly, offset, limit, orderBy, filters, resolveWResponse) + .then(resp => { + // here we have the real number of jobs running + this.__updateJobsButton(Boolean(resp["_meta"]["total"])); + }); + }, + + __attachSocketListener: function() { + const socket = osparc.wrapper.WebSocket.getInstance(); + + socket.on("projectStateUpdated", data => { + if (osparc.study.Utils.state.isPipelineRunning(data["data"])) { + this.__updateJobsButton(true); + } + }, this); + }, + + __updateJobsButton: function(isActive) { this.getChildControl("icon"); - const number = this.getChildControl("number"); + [ + this.getChildControl("is-active-icon-outline"), + this.getChildControl("is-active-icon"), + ].forEach(control => { + control.set({ + visibility: isActive ? "visible" : "excluded" + }); + }); + + // Start or restart timer when isActive is true + if (isActive) { + this.__startRefreshTimer(); + } + }, + + __startRefreshTimer: function() { + // Stop existing timer if running + if (this.__refreshTimer) { + this.__refreshTimer.stop(); + this.__refreshTimer.dispose(); + } - const nJobs = nActiveJobs > osparc.store.Jobs.SERVER_MAX_LIMIT ? (osparc.store.Jobs.SERVER_MAX_LIMIT + "+") : nActiveJobs; - number.setValue(nJobs.toString()); + this.__refreshTimer = new qx.event.Timer(20000); + this.__refreshTimer.addListener("interval", () => this.__fetchNJobs(), this); + this.__refreshTimer.start(); }, } }); diff --git a/services/static-webserver/client/source/class/osparc/jobs/RunsBrowser.js b/services/static-webserver/client/source/class/osparc/jobs/RunsBrowser.js index c5ab4355df94..5e0bf883067a 100644 --- a/services/static-webserver/client/source/class/osparc/jobs/RunsBrowser.js +++ b/services/static-webserver/client/source/class/osparc/jobs/RunsBrowser.js @@ -24,6 +24,8 @@ qx.Class.define("osparc.jobs.RunsBrowser", { this._setLayout(new qx.ui.layout.VBox(10)); + const reloadButton = this.getChildControl("reload-button"); + reloadButton.addListener("execute", () => this.reloadRuns()); this.getChildControl("intro-label"); const jobsFilter = this.getChildControl("jobs-filter"); const runningCB = this.getChildControl("running-only-cb"); @@ -35,8 +37,6 @@ qx.Class.define("osparc.jobs.RunsBrowser", { }); runningCB.bind("value", runsTable, "runningOnly"); - - this.__reloadInterval = setInterval(() => this.reloadRuns(), 10*1000); }, events: { @@ -49,13 +49,19 @@ qx.Class.define("osparc.jobs.RunsBrowser", { _createChildControlImpl: function(id) { let control; switch (id) { - case "header-filter": + case "header-toolbar": control = new qx.ui.container.Composite(new qx.ui.layout.HBox(5)); this._add(control); break; + case "reload-button": + control = new qx.ui.form.Button(this.tr("Reload"), "@FontAwesome5Solid/sync-alt/14"); + this.getChildControl("header-toolbar").add(control); + break; case "intro-label": - control = new qx.ui.basic.Label(this.tr("Select a Run to check the details")); - this.getChildControl("header-filter").add(control); + control = new qx.ui.basic.Label(this.tr("Select a Run to check the details")).set({ + alignY: "middle", + }); + this.getChildControl("header-toolbar").add(control); break; case "jobs-filter": control = new osparc.filter.TextFilter("text", "jobsList").set({ @@ -66,7 +72,7 @@ qx.Class.define("osparc.jobs.RunsBrowser", { placeholder: qx.locale.Manager.tr("Filter by name or ID"), }); control.hide(); // @matusdrobuliak66: remove this when the backend is ready - this.getChildControl("header-filter").add(control, { + this.getChildControl("header-toolbar").add(control, { flex: 1 }); break; @@ -75,13 +81,12 @@ qx.Class.define("osparc.jobs.RunsBrowser", { value: true, label: qx.locale.Manager.tr("Active only"), }); - this.getChildControl("header-filter").add(control); + this.getChildControl("header-toolbar").add(control); break; case "runs-table": { const projectUuid = null; - const includeChildren = false; const runningOnly = true; - control = new osparc.jobs.RunsTable(projectUuid, includeChildren, runningOnly); + control = new osparc.jobs.RunsTable(projectUuid, runningOnly); control.addListener("runSelected", e => this.fireDataEvent("runSelected", e.getData())); this._add(control); break; @@ -95,11 +100,5 @@ qx.Class.define("osparc.jobs.RunsBrowser", { const runsTable = this.getChildControl("runs-table"); runsTable.reloadRuns(); }, - - stopInterval: function() { - if (this.__reloadInterval) { - clearInterval(this.__reloadInterval); - } - }, } }) diff --git a/services/static-webserver/client/source/class/osparc/jobs/RunsTable.js b/services/static-webserver/client/source/class/osparc/jobs/RunsTable.js index 78a5743bf948..16ab7282baa6 100644 --- a/services/static-webserver/client/source/class/osparc/jobs/RunsTable.js +++ b/services/static-webserver/client/source/class/osparc/jobs/RunsTable.js @@ -19,16 +19,16 @@ qx.Class.define("osparc.jobs.RunsTable", { extend: qx.ui.table.Table, - construct: function(projectUuid = null, includeChildren = false, runningOnly = true) { + construct: function(projectId = null, runningOnly = true) { this.base(arguments); this.set({ - projectUuid, + projectId, runningOnly, }); - const model = new osparc.jobs.RunsTableModel(projectUuid, includeChildren); - this.bind("projectUuid", model, "projectUuid"); + const model = new osparc.jobs.RunsTableModel(projectId); + this.bind("projectId", model, "projectId"); this.bind("runningOnly", model, "runningOnly"); this.setTableModel(model); @@ -39,27 +39,47 @@ qx.Class.define("osparc.jobs.RunsTable", { }); const columnModel = this.getTableColumnModel(); - columnModel.setColumnVisible(this.self().COLS.PROJECT_UUID.column, false); + columnModel.setColumnVisible(this.self().COLS.COLLECTION_RUN_ID.column, false); + columnModel.setColumnVisible(this.self().COLS.PROJECT_IDS.column, false); Object.values(this.self().COLS).forEach(col => columnModel.setColumnWidth(col.column, col.width)); - const iconPathStop = "osparc/icons/circle-xmark-text.svg"; - const fontButtonRendererStop = new osparc.ui.table.cellrenderer.ImageButtonRenderer("cancel", iconPathStop); + const shouldShowCancel = cellInfo => { + if (cellInfo && cellInfo.rowData && cellInfo.rowData["state"]) { + return [ + "Running", + ].includes(cellInfo.rowData["state"]); + } + return false; + } + const iconPathStop = osparc.ui.switch.ThemeSwitcher.isLight() ? "osparc/icons/circle-xmark-black.svg" : "osparc/icons/circle-xmark-white.svg"; + const fontButtonRendererStop = new osparc.ui.table.cellrenderer.ImageButtonRenderer("cancel", iconPathStop, shouldShowCancel); columnModel.setDataCellRenderer(this.self().COLS.ACTION_CANCEL.column, fontButtonRendererStop); - const iconPathInfo = "osparc/icons/circle-info-text.svg"; - const fontButtonRendererInfo = new osparc.ui.table.cellrenderer.ImageButtonRenderer("info", iconPathInfo); + const jobsStore =osparc.store.Jobs.getInstance(); + const shouldShowInfo = cellInfo => { + if (cellInfo && cellInfo.rowData && cellInfo.rowData["collectionRunId"]) { + const job = jobsStore.getJob(cellInfo.rowData["collectionRunId"]); + if (!job) { + return false; + } + return Object.keys(job.getInfo()).length > 0; + } + return false; + } + const iconPathInfo = osparc.ui.switch.ThemeSwitcher.isLight() ? "osparc/icons/circle-info-black.svg" : "osparc/icons/circle-info-white.svg"; + const fontButtonRendererInfo = new osparc.ui.table.cellrenderer.ImageButtonRenderer("info", iconPathInfo, shouldShowInfo); columnModel.setDataCellRenderer(this.self().COLS.ACTION_INFO.column, fontButtonRendererInfo); this.__attachHandlers(); }, properties: { - projectUuid: { + projectId: { check: "String", init: null, nullable: true, - event: "changeProjectUuid", + event: "changeProjectId", }, runningOnly: { @@ -76,54 +96,60 @@ qx.Class.define("osparc.jobs.RunsTable", { statics: { COLS: { - PROJECT_UUID: { - id: "projectUuid", + COLLECTION_RUN_ID: { + id: "collectionRunId", column: 0, - label: qx.locale.Manager.tr("Project Id"), + label: qx.locale.Manager.tr("Collection Run Id"), width: 200 }, - PROJECT_NAME: { - id: "projectName", + PROJECT_IDS: { + id: "projectIds", column: 1, - label: qx.locale.Manager.tr("Project"), + label: qx.locale.Manager.tr("Project Ids"), + width: 200 + }, + NAME: { + id: "name", + column: 2, + label: qx.locale.Manager.tr("Name"), width: 150, }, STATE: { id: "state", - column: 2, + column: 3, label: qx.locale.Manager.tr("Status"), width: 150, }, SUBMIT: { id: "submit", - column: 3, + column: 4, label: qx.locale.Manager.tr("Queued"), width: 130, sortableMap: "submitted_at", }, START: { id: "start", - column: 4, + column: 5, label: qx.locale.Manager.tr("Started"), width: 130, sortableMap: "started_at", }, END: { id: "end", - column: 5, + column: 6, label: qx.locale.Manager.tr("Ended"), width: 130, sortableMap: "ended_at", }, ACTION_CANCEL: { id: "action_cancel", - column: 6, + column: 7, label: qx.locale.Manager.tr("Cancel"), width: 50 }, ACTION_INFO: { id: "action_info", - column: 7, + column: 8, label: qx.locale.Manager.tr("Info"), width: 50 }, @@ -140,8 +166,9 @@ qx.Class.define("osparc.jobs.RunsTable", { this.addListener("cellTap", e => { const rowIdx = e.getRow(); const target = e.getOriginalTarget(); - if (target.closest(".qx-material-button") && (target.tagName === "IMG" || target.tagName === "DIV")) { - const action = target.closest(".qx-material-button").getAttribute("data-action"); + const closestItems = osparc.ui.table.cellrenderer.ImageButtonRenderer.getClosestItems(target); + if (closestItems && (target.tagName === "IMG" || target.tagName === "DIV")) { + const action = closestItems.getAttribute("data-action"); if (action) { this.__handleButtonClick(action, rowIdx); } @@ -163,21 +190,18 @@ qx.Class.define("osparc.jobs.RunsTable", { const rowData = this.getTableModel().getRowData(row); switch (action) { case "info": { - const job = osparc.store.Jobs.getInstance().getJob(rowData["projectUuid"]); + const job = osparc.store.Jobs.getInstance().getJob(rowData["collectionRunId"]); if (!job) { return; } - const allInfo = { - "image": job.getInfo() ? osparc.utils.Utils.deepCloneObject(job.getInfo()) : {}, - "customMetadata": job.getCustomMetadata() ? osparc.utils.Utils.deepCloneObject(job.getCustomMetadata()) : {}, - } - const runInfo = new osparc.jobs.Info(allInfo); + const info = job.getInfo() ? osparc.utils.Utils.deepCloneObject(job.getInfo()) : {} + const runInfo = new osparc.jobs.Info(info); const win = osparc.jobs.Info.popUpInWindow(runInfo); - win.setCaption(rowData["projectName"]); + win.setCaption(rowData["name"]); break; } case "cancel": { - this.__cancelRun(rowData); + this.__cancelRunCollection(rowData); break; } default: @@ -185,8 +209,8 @@ qx.Class.define("osparc.jobs.RunsTable", { } }, - __cancelRun: function(rowData) { - const msg = this.tr("Are you sure you want to cancel") + " " + rowData["projectName"] + "?"; + __cancelRunCollection: function(rowData) { + const msg = this.tr("Are you sure you want to cancel") + " " + rowData["name"] + "?"; const confirmationWin = new osparc.ui.window.Confirmation(msg).set({ caption: this.tr("Cancel Run"), confirmText: this.tr("Cancel"), @@ -199,13 +223,17 @@ qx.Class.define("osparc.jobs.RunsTable", { confirmationWin.open(); confirmationWin.addListener("close", () => { if (confirmationWin.getConfirmed()) { - const params = { - url: { - "studyId": rowData["projectUuid"], - }, - }; - osparc.data.Resources.fetch("runPipeline", "stopPipeline", params) - .then(() => osparc.FlashMessenger.logAs(this.tr("Stopping pipeline"), "INFO")) + const promises = []; + rowData["projectIds"].forEach(projectId => { + const params = { + url: { + "studyId": projectId, + }, + }; + promises.push(osparc.data.Resources.fetch("runPipeline", "stopPipeline", params)) + }); + Promise.all(promises) + .then(() => osparc.FlashMessenger.logAs(this.tr("Stopping Run"), "INFO")) .catch(err => osparc.FlashMessenger.logError(err)); } }, this); diff --git a/services/static-webserver/client/source/class/osparc/jobs/RunsTableModel.js b/services/static-webserver/client/source/class/osparc/jobs/RunsTableModel.js index f11bb85e402c..8c2602863608 100644 --- a/services/static-webserver/client/source/class/osparc/jobs/RunsTableModel.js +++ b/services/static-webserver/client/source/class/osparc/jobs/RunsTableModel.js @@ -19,13 +19,11 @@ qx.Class.define("osparc.jobs.RunsTableModel", { extend: qx.ui.table.model.Remote, - construct: function(projectUuid = null, includeChildren = false) { + construct: function(projectId = null) { this.base(arguments); - this.__includeChildren = includeChildren; - this.set({ - projectUuid, + projectId, }); const jobsCols = osparc.jobs.RunsTable.COLS; @@ -41,11 +39,11 @@ qx.Class.define("osparc.jobs.RunsTableModel", { }, properties: { - projectUuid: { + projectId: { check: "String", init: null, nullable: true, - event: "changeProjectUuid", + event: "changeProjectId", apply: "reloadData", }, @@ -79,13 +77,7 @@ qx.Class.define("osparc.jobs.RunsTableModel", { }, }, - statics: { - SERVER_MAX_LIMIT: 49, - }, - members: { - __includeChildren: false, - // overridden sortByColumn(columnIndex, ascending) { const jobsCols = osparc.jobs.RunsTable.COLS; @@ -104,8 +96,8 @@ qx.Class.define("osparc.jobs.RunsTableModel", { const orderBy = this.getOrderBy(); const resolveWResponse = true; let promise; - if (this.getProjectUuid()) { - promise = osparc.store.Jobs.getInstance().fetchJobsHistory(this.getProjectUuid(), this.__includeChildren, offset, limit, orderBy, resolveWResponse); + if (this.getProjectId()) { + promise = osparc.store.Jobs.getInstance().fetchJobsHistory(this.getProjectId(), offset, limit, orderBy, resolveWResponse); } else { const filters = this.getFilterString() ? { text: this.getFilterString() } : null; promise = osparc.store.Jobs.getInstance().fetchJobsLatest(this.getRunningOnly(), offset, limit, orderBy, filters, resolveWResponse); @@ -126,10 +118,10 @@ qx.Class.define("osparc.jobs.RunsTableModel", { const lastRow = Math.min(qxLastRow, this._rowCount - 1); // Returns a request promise with given offset and limit const getFetchPromise = (offset, limit) => { - const orderBy = this.getOrderBy(); + const orderBy = this.getOrderBy(); let promise; - if (this.getProjectUuid()) { - promise = osparc.store.Jobs.getInstance().fetchJobsHistory(this.getProjectUuid(), this.__includeChildren, offset, limit, orderBy); + if (this.getProjectId()) { + promise = osparc.store.Jobs.getInstance().fetchJobsHistory(this.getProjectId(), offset, limit, orderBy); } else { const filters = this.getFilterString() ? { text: this.getFilterString() } : null; promise = osparc.store.Jobs.getInstance().fetchJobsLatest(this.getRunningOnly(), offset, limit, orderBy, filters); @@ -140,8 +132,9 @@ qx.Class.define("osparc.jobs.RunsTableModel", { const jobsCols = osparc.jobs.RunsTable.COLS; jobs.forEach(job => { data.push({ - [jobsCols.PROJECT_UUID.id]: job.getProjectUuid(), - [jobsCols.PROJECT_NAME.id]: job.getProjectName(), + [jobsCols.COLLECTION_RUN_ID.id]: job.getCollectionRunId(), + [jobsCols.PROJECT_IDS.id]: job.getProjectIds(), + [jobsCols.NAME.id]: job.getName(), [jobsCols.STATE.id]: osparc.data.Job.STATUS_LABELS[job.getState()] || job.getState(), [jobsCols.SUBMIT.id]: job.getSubmittedAt() ? osparc.utils.Utils.formatDateAndTime(job.getSubmittedAt()) : "-", [jobsCols.START.id]: job.getStartedAt() ? osparc.utils.Utils.formatDateAndTime(job.getStartedAt()) : "-", @@ -153,15 +146,13 @@ qx.Class.define("osparc.jobs.RunsTableModel", { }; // Divides the model row request into several server requests to comply with the number of rows server limit + const serverMaxLimit = osparc.store.Jobs.SERVER_MAX_LIMIT; const reqLimit = lastRow - firstRow + 1; // Number of requested rows - let nRequests = Math.ceil(reqLimit / this.self().SERVER_MAX_LIMIT); + let nRequests = Math.ceil(reqLimit / serverMaxLimit); if (nRequests > 1) { const requests = []; - for (let i=firstRow; i <= lastRow; i += this.self().SERVER_MAX_LIMIT) { - // fetch the first page only - if (i < 1) { - requests.push(getFetchPromise(i, i > lastRow - this.self().SERVER_MAX_LIMIT + 1 ? reqLimit % this.self().SERVER_MAX_LIMIT : this.self().SERVER_MAX_LIMIT)) - } + for (let i=firstRow; i <= lastRow; i += serverMaxLimit) { + requests.push(getFetchPromise(i, i > lastRow - serverMaxLimit + 1 ? reqLimit % serverMaxLimit : serverMaxLimit)); } Promise.all(requests) .then(responses => this._onRowDataLoaded(responses.flat())) diff --git a/services/static-webserver/client/source/class/osparc/jobs/SubRunsBrowser.js b/services/static-webserver/client/source/class/osparc/jobs/SubRunsBrowser.js index 3a899d335ad9..2fc4700524e9 100644 --- a/services/static-webserver/client/source/class/osparc/jobs/SubRunsBrowser.js +++ b/services/static-webserver/client/source/class/osparc/jobs/SubRunsBrowser.js @@ -66,15 +66,15 @@ qx.Class.define("osparc.jobs.SubRunsBrowser", { return titleLayout; }, - setProject: function(project) { + setCollectionRun: function(collectionRunData) { if (this.__subRunsTable) { this._remove(this.__subRunsTable); this.__subRunsTable = null; } - this.__titleLabel.setValue(project["projectName"]) + this.__titleLabel.setValue(collectionRunData["name"]) - const subRunsTable = this.__subRunsTable = new osparc.jobs.SubRunsTable(project["projectUuid"]); + const subRunsTable = this.__subRunsTable = new osparc.jobs.SubRunsTable(collectionRunData["collectionRunId"]); this._add(subRunsTable, { flex: 1 }); diff --git a/services/static-webserver/client/source/class/osparc/jobs/SubRunsTable.js b/services/static-webserver/client/source/class/osparc/jobs/SubRunsTable.js index aabc10d47710..21cbafb841fe 100644 --- a/services/static-webserver/client/source/class/osparc/jobs/SubRunsTable.js +++ b/services/static-webserver/client/source/class/osparc/jobs/SubRunsTable.js @@ -19,10 +19,10 @@ qx.Class.define("osparc.jobs.SubRunsTable", { extend: qx.ui.table.Table, - construct: function(projectUuid) { + construct: function(collectionRunId) { this.base(arguments); - const model = new osparc.jobs.SubRunsTableModel(projectUuid); + const model = new osparc.jobs.SubRunsTableModel(collectionRunId); this.setTableModel(model); this.set({ @@ -32,16 +32,16 @@ qx.Class.define("osparc.jobs.SubRunsTable", { }); const columnModel = this.getTableColumnModel(); - columnModel.setColumnVisible(this.self().COLS.PROJECT_UUID.column, false); + columnModel.setColumnVisible(this.self().COLS.COLLECTION_RUN_ID.column, false); columnModel.setColumnVisible(this.self().COLS.NODE_ID.column, false); Object.values(this.self().COLS).forEach(col => columnModel.setColumnWidth(col.column, col.width)); - const iconPathInfo = "osparc/icons/circle-info-text.svg"; + const iconPathInfo = osparc.ui.switch.ThemeSwitcher.isLight() ? "osparc/icons/circle-info-black.svg" : "osparc/icons/circle-info-white.svg"; const fontButtonRendererInfo = new osparc.ui.table.cellrenderer.ImageButtonRenderer("info", iconPathInfo); columnModel.setDataCellRenderer(this.self().COLS.INFO.column, fontButtonRendererInfo); - const iconPathLogs = "osparc/icons/file-download-text.svg"; + const iconPathLogs = osparc.ui.switch.ThemeSwitcher.isLight() ? "osparc/icons/file-download-black.svg" : "osparc/icons/file-download-white.svg"; const fontButtonRendererLogs = new osparc.ui.table.cellrenderer.ImageButtonRenderer("logs", iconPathLogs); columnModel.setDataCellRenderer(this.self().COLS.LOGS.column, fontButtonRendererLogs); @@ -50,10 +50,10 @@ qx.Class.define("osparc.jobs.SubRunsTable", { statics: { COLS: { - PROJECT_UUID: { - id: "projectUuid", + COLLECTION_RUN_ID: { + id: "collectionRunId", column: 0, - label: qx.locale.Manager.tr("Project Id"), + label: qx.locale.Manager.tr("Collection Run Id"), width: 200 }, NODE_ID: { @@ -62,10 +62,10 @@ qx.Class.define("osparc.jobs.SubRunsTable", { label: qx.locale.Manager.tr("Node Id"), width: 200 }, - NODE_NAME: { - id: "nodeName", + NAME: { + id: "name", column: 2, - label: qx.locale.Manager.tr("Node"), + label: qx.locale.Manager.tr("Name"), width: 100 }, APP: { @@ -136,8 +136,9 @@ qx.Class.define("osparc.jobs.SubRunsTable", { this.addListener("cellTap", e => { const row = e.getRow(); const target = e.getOriginalTarget(); - if (target.closest(".qx-material-button") && (target.tagName === "IMG" || target.tagName === "DIV")) { - const action = target.closest(".qx-material-button").getAttribute("data-action"); + const closestItems = osparc.ui.table.cellrenderer.ImageButtonRenderer.getClosestItems(target); + if (closestItems && (target.tagName === "IMG" || target.tagName === "DIV")) { + const action = closestItems.getAttribute("data-action"); if (action) { this.__handleButtonClick(action, row); } @@ -152,7 +153,7 @@ qx.Class.define("osparc.jobs.SubRunsTable", { const rowData = this.getTableModel().getRowData(row); switch (action) { case "info": { - const job = osparc.store.Jobs.getInstance().getJob(rowData["projectUuid"]); + const job = osparc.store.Jobs.getInstance().getJob(rowData["collectionRunId"]); if (!job) { return; } @@ -162,11 +163,11 @@ qx.Class.define("osparc.jobs.SubRunsTable", { } const jobInfo = new osparc.jobs.Info(subJob.getImage()); const win = osparc.jobs.Info.popUpInWindow(jobInfo); - win.setCaption(rowData["nodeName"]); + win.setCaption(rowData["name"]); break; } case "logs": { - const job = osparc.store.Jobs.getInstance().getJob(rowData["projectUuid"]); + const job = osparc.store.Jobs.getInstance().getJob(rowData["collectionRunId"]); if (!job) { return; } @@ -176,7 +177,7 @@ qx.Class.define("osparc.jobs.SubRunsTable", { } const logDownloadLink = subJob.getLogDownloadLink() if (logDownloadLink) { - osparc.utils.Utils.downloadLink(logDownloadLink, "GET", rowData["nodeName"] + ".zip"); + osparc.utils.Utils.downloadLink(logDownloadLink, "GET", rowData["name"] + ".zip"); } else { osparc.FlashMessenger.logAs(this.tr("No logs available"), "WARNING"); } diff --git a/services/static-webserver/client/source/class/osparc/jobs/SubRunsTableModel.js b/services/static-webserver/client/source/class/osparc/jobs/SubRunsTableModel.js index 2c43fa59ed95..56ec069ff28e 100644 --- a/services/static-webserver/client/source/class/osparc/jobs/SubRunsTableModel.js +++ b/services/static-webserver/client/source/class/osparc/jobs/SubRunsTableModel.js @@ -19,7 +19,7 @@ qx.Class.define("osparc.jobs.SubRunsTableModel", { extend: qx.ui.table.model.Remote, - construct: function(projectUuid) { + construct: function(collectionRunId) { this.base(arguments); const subJobsCols = osparc.jobs.SubRunsTable.COLS; @@ -33,11 +33,11 @@ qx.Class.define("osparc.jobs.SubRunsTableModel", { this.setColumnSortable(col.column, Boolean(col.sortableMap)); }); - this.setProjectUuid(projectUuid); + this.setCollectionRunId(collectionRunId); }, properties: { - projectUuid: { + collectionRunId: { check: "String", nullable: true, }, @@ -57,10 +57,6 @@ qx.Class.define("osparc.jobs.SubRunsTableModel", { }, }, - statics: { - SERVER_MAX_LIMIT: 49, - }, - members: { // overridden sortByColumn(columnIndex, ascending) { @@ -75,7 +71,7 @@ qx.Class.define("osparc.jobs.SubRunsTableModel", { // overridden _loadRowCount() { - osparc.store.Jobs.getInstance().fetchSubJobs(this.getProjectUuid(), this.getOrderBy()) + osparc.store.Jobs.getInstance().fetchSubJobs(this.getCollectionRunId(), this.getOrderBy()) .then(subJobs => { this._onRowCountLoaded(subJobs.length) }) @@ -91,7 +87,7 @@ qx.Class.define("osparc.jobs.SubRunsTableModel", { const lastRow = Math.min(qxLastRow, this._rowCount - 1); // Returns a request promise with given offset and limit const getFetchPromise = () => { - return osparc.store.Jobs.getInstance().fetchSubJobs(this.getProjectUuid(), this.getOrderBy()) + return osparc.store.Jobs.getInstance().fetchSubJobs(this.getCollectionRunId(), this.getOrderBy()) .then(subJobs => { const data = []; const subJobsCols = osparc.jobs.SubRunsTable.COLS; @@ -115,9 +111,9 @@ qx.Class.define("osparc.jobs.SubRunsTableModel", { duration = `${String(diffHours).padStart(2, "0")}:${String(diffMinutes).padStart(2, "0")}:${String(diffSeconds).padStart(2, "0")}`; } data.push({ - [subJobsCols.PROJECT_UUID.id]: subJob.getProjectUuid(), + [subJobsCols.COLLECTION_RUN_ID.id]: subJob.getCollectionRunId(), [subJobsCols.NODE_ID.id]: subJob.getNodeId(), - [subJobsCols.NODE_NAME.id]: subJob.getNodeName(), + [subJobsCols.NAME.id]: subJob.getName(), [subJobsCols.APP.id]: appName + ":" + displayVersion, [subJobsCols.STATE.id]: osparc.data.Job.STATUS_LABELS[subJob.getState()] || subJob.getState(), [subJobsCols.PROGRESS.id]: subJob.getProgress() * 100 + "%", @@ -132,12 +128,13 @@ qx.Class.define("osparc.jobs.SubRunsTableModel", { }; // Divides the model row request into several server requests to comply with the number of rows server limit + const serverMaxLimit = osparc.store.Jobs.SERVER_MAX_LIMIT; const reqLimit = lastRow - firstRow + 1; // Number of requested rows - const nRequests = Math.ceil(reqLimit / this.self().SERVER_MAX_LIMIT); + const nRequests = Math.ceil(reqLimit / serverMaxLimit); if (nRequests > 1) { const requests = []; - for (let i=firstRow; i <= lastRow; i += this.self().SERVER_MAX_LIMIT) { - requests.push(getFetchPromise(i, i > lastRow - this.self().SERVER_MAX_LIMIT + 1 ? reqLimit % this.self().SERVER_MAX_LIMIT : this.self().SERVER_MAX_LIMIT)) + for (let i=firstRow; i <= lastRow; i += serverMaxLimit) { + requests.push(getFetchPromise(i, i > lastRow - serverMaxLimit + 1 ? reqLimit % serverMaxLimit : serverMaxLimit)) } Promise.all(requests) .then(responses => this._onRowDataLoaded(responses.flat())) diff --git a/services/static-webserver/client/source/class/osparc/metadata/ClassifiersEditor.js b/services/static-webserver/client/source/class/osparc/metadata/ClassifiersEditor.js index e1337f16f38d..e717d1e07c1c 100644 --- a/services/static-webserver/client/source/class/osparc/metadata/ClassifiersEditor.js +++ b/services/static-webserver/client/source/class/osparc/metadata/ClassifiersEditor.js @@ -146,7 +146,7 @@ qx.Class.define("osparc.metadata.ClassifiersEditor", { const newClassifiers = this.__classifiersTree.getCheckedClassifierIDs(); if (osparc.utils.Resources.isStudy(this.__resourceData) || osparc.utils.Resources.isTemplate(this.__resourceData)) { - osparc.store.Study.patchStudyData(this.__resourceData, "classifiers", newClassifiers) + osparc.store.Study.getInstance().patchStudyData(this.__resourceData, "classifiers", newClassifiers) .then(() => { osparc.FlashMessenger.logAs(this.tr("Classifiers successfully edited")); saveBtn.setFetching(false); diff --git a/services/static-webserver/client/source/class/osparc/metadata/QualityEditor.js b/services/static-webserver/client/source/class/osparc/metadata/QualityEditor.js index 2178057ae2d9..7ac8377433eb 100644 --- a/services/static-webserver/client/source/class/osparc/metadata/QualityEditor.js +++ b/services/static-webserver/client/source/class/osparc/metadata/QualityEditor.js @@ -469,7 +469,7 @@ qx.Class.define("osparc.metadata.QualityEditor", { .catch(err => osparc.FlashMessenger.logError(err, this.tr("There was an issue while updating the Quality Assessment."))) .finally(() => btn.setFetching(false)); } else { - osparc.store.Study.patchStudyData(this.__resourceData, "quality", newQuality) + osparc.store.Study.getInstance().patchStudyData(this.__resourceData, "quality", newQuality) .then(() => { this.__initResourceData(this.__resourceData); this.fireDataEvent("updateQuality", this.__resourceData); @@ -484,7 +484,7 @@ qx.Class.define("osparc.metadata.QualityEditor", { const myGid = osparc.auth.Data.getInstance().getGroupId(); if (myGid) { if (osparc.utils.Resources.isService(this.__resourceData)) { - return osparc.service.Utils.canIWrite(this.__resourceData["accessRights"]); + return osparc.data.model.Service.canIWrite(this.__resourceData["accessRights"]); } return osparc.data.model.Study.canIWrite(this.__resourceData["accessRights"]); } diff --git a/services/static-webserver/client/source/class/osparc/metadata/ServicesInStudy.js b/services/static-webserver/client/source/class/osparc/metadata/ServicesInStudy.js index bf1cc7a2773c..cec837790089 100644 --- a/services/static-webserver/client/source/class/osparc/metadata/ServicesInStudy.js +++ b/services/static-webserver/client/source/class/osparc/metadata/ServicesInStudy.js @@ -77,7 +77,7 @@ qx.Class.define("osparc.metadata.ServicesInStudy", { } this.setEnabled(false); - osparc.store.Study.patchNodeData(this._studyData, nodeId, patchData) + osparc.store.Study.getInstance().patchNodeData(this._studyData, nodeId, patchData) .then(() => { this.fireDataEvent("updateService", this._studyData); this._populateLayout(); diff --git a/services/static-webserver/client/source/class/osparc/metadata/ServicesInStudyUpdate.js b/services/static-webserver/client/source/class/osparc/metadata/ServicesInStudyUpdate.js index 4b8c971c763a..2b195208729f 100644 --- a/services/static-webserver/client/source/class/osparc/metadata/ServicesInStudyUpdate.js +++ b/services/static-webserver/client/source/class/osparc/metadata/ServicesInStudyUpdate.js @@ -27,21 +27,23 @@ qx.Class.define("osparc.metadata.ServicesInStudyUpdate", { UPDATE_BUTTON: Object.keys(osparc.metadata.ServicesInStudy.GRID_POS).length+2 }, - colorVersionLabel: function(versionLabel, metadata) { + paintChip: function(versionChip, metadata) { const isDeprecated = osparc.service.Utils.isDeprecated(metadata); const isRetired = osparc.service.Utils.isRetired(metadata); if (isDeprecated) { - versionLabel.set({ - textColor: "text-on-warning", // because the background is always yellow - backgroundColor: osparc.service.StatusUI.getColor("deprecated"), + versionChip.set({ + statusColor: osparc.ui.basic.Chip.STATUS.WARNING, toolTipText: qx.locale.Manager.tr("This service is deprecated. Please update.") }); } else if (isRetired) { - versionLabel.set({ - textColor: "text-on-warning", // because the background is always red - backgroundColor: osparc.service.StatusUI.getColor("retired"), + versionChip.set({ + statusColor: osparc.ui.basic.Chip.STATUS.ERROR, toolTipText: qx.locale.Manager.tr("This service has been retired. Please update.") }); + } else { + versionChip.set({ + statusColor: osparc.ui.basic.Chip.STATUS.SUCCESS, + }); } } }, @@ -60,7 +62,7 @@ qx.Class.define("osparc.metadata.ServicesInStudyUpdate", { osparc.store.Services.getStudyServices(this._studyData["uuid"]) .then(resp => { const services = resp["services"]; - if (osparc.study.Utils.getCantExecuteServices(services).length) { + if (osparc.study.Utils.getCantReadServices(services).length) { msg += this.tr("Some services are inaccessible. Please contact the service owner:"); msg += "

"; } @@ -157,16 +159,18 @@ qx.Class.define("osparc.metadata.ServicesInStudyUpdate", { i++; const node = workbench[nodeId]; const metadata = osparc.store.Services.getMetadata(node["key"], node["version"]); - const currentVersionLabel = new qx.ui.basic.Label(osparc.service.Utils.extractVersionDisplay(metadata)).set({ + const currentVersionChip = new osparc.ui.basic.Chip(osparc.service.Utils.extractVersionDisplay(metadata)); + currentVersionChip.getChildControl("label").set({ font: "text-14" }); - this.self().colorVersionLabel(currentVersionLabel, metadata); - this._servicesGrid.add(currentVersionLabel, { + this.self().paintChip(currentVersionChip, metadata); + this._servicesGrid.add(currentVersionChip, { row: i, column: this.self().GRID_POS.CURRENT_VERSION }); - const compatibleVersionLabel = new qx.ui.basic.Label().set({ + const compatibleVersionChip = new osparc.ui.basic.Chip(); + compatibleVersionChip.getChildControl("label").set({ font: "text-14" }); const latestCompatible = osparc.store.Services.getLatestCompatible(node["key"], node["version"]); @@ -178,16 +182,18 @@ qx.Class.define("osparc.metadata.ServicesInStudyUpdate", { if (node["key"] !== latestMetadata["key"]) { label = latestMetadata["name"] + ":" + label; } - compatibleVersionLabel.setValue(label); + compatibleVersionChip.setLabel(label); + this.self().paintChip(compatibleVersionChip, latestMetadata); }) .catch(err => console.error(err)); } else if (metadata) { // up to date - compatibleVersionLabel.setValue(metadata["version"]); + compatibleVersionChip.setLabel(metadata["version"]); + this.self().paintChip(compatibleVersionChip, metadata); } else { - compatibleVersionLabel.setValue(this.tr("Unknown")); + compatibleVersionChip.setLabel(this.tr("Unknown")); } - this._servicesGrid.add(compatibleVersionLabel, { + this._servicesGrid.add(compatibleVersionChip, { row: i, column: this.self().GRID_POS.COMPATIBLE_VERSION }); diff --git a/services/static-webserver/client/source/class/osparc/navigation/BreadcrumbsSlideshow.js b/services/static-webserver/client/source/class/osparc/navigation/BreadcrumbsSlideshow.js index e69e8b66ded0..a1830a0e15e6 100644 --- a/services/static-webserver/client/source/class/osparc/navigation/BreadcrumbsSlideshow.js +++ b/services/static-webserver/client/source/class/osparc/navigation/BreadcrumbsSlideshow.js @@ -104,12 +104,12 @@ qx.Class.define("osparc.navigation.BreadcrumbsSlideshow", { if (node.isFilePicker()) { osparc.service.StatusUI.setupFilePickerIcon(node, statusIcon); } else { - const check = node.isDynamic() ? "interactive" : "output"; - node.getStatus().bind(check, statusIcon, "source", { + const checkProp = node.isDynamic() ? "interactive" : "output"; + node.getStatus().bind(checkProp, statusIcon, "source", { converter: output => osparc.service.StatusUI.getIconSource(output), onUpdate: (_, target) => osparc.service.StatusUI.updateCircleAnimation(target) }); - node.getStatus().bind(check, statusIcon, "textColor", { + node.getStatus().bind(checkProp, statusIcon, "textColor", { converter: output => osparc.service.StatusUI.getColor(output) }, this); } diff --git a/services/static-webserver/client/source/class/osparc/navigation/NavigationBar.js b/services/static-webserver/client/source/class/osparc/navigation/NavigationBar.js index f9f046dd0b36..32b15b48a3b6 100644 --- a/services/static-webserver/client/source/class/osparc/navigation/NavigationBar.js +++ b/services/static-webserver/client/source/class/osparc/navigation/NavigationBar.js @@ -50,10 +50,17 @@ qx.Class.define("osparc.navigation.NavigationBar", { paddingLeft: 10, paddingRight: 10, height: this.self().HEIGHT, - backgroundColor: "background-main-1", + backgroundColor: this.self().BG_COLOR, }); osparc.utils.Utils.setIdToWidget(this, "navigationBar"); + + const socket = osparc.wrapper.WebSocket.getInstance(); + if (socket.isConnected()) { + this.__listenToProjectStateUpdated(); + } else { + socket.addListener("connect", () => this.__listenToProjectStateUpdated()); + } }, events: { @@ -72,6 +79,7 @@ qx.Class.define("osparc.navigation.NavigationBar", { }, statics: { + BG_COLOR: "background-main-1", HEIGHT: 50, SMALL_SCREEN_BREAKPOINT: 800, @@ -81,6 +89,15 @@ qx.Class.define("osparc.navigation.NavigationBar", { minWidth: 30, minHeight: 30 }, + + RIGHT_BUTTON_OPTS: { + cursor: "pointer", + alignX: "center", + alignY: "middle", + allowGrowX: false, + allowGrowY: false, + padding: 4, + }, }, members: { @@ -112,12 +129,19 @@ qx.Class.define("osparc.navigation.NavigationBar", { converter: s => s ? "visible" : "excluded" }); + this.getChildControl("saving-study-icon"); + // center-items this.getChildControl("read-only-info"); // right-items + if (osparc.utils.DisabledPlugins.isRTCEnabled()) { + this.getChildControl("avatar-group"); + } this.getChildControl("tasks-button"); - this.getChildControl("jobs-button"); + if (osparc.product.Utils.showComputationalActivity()) { + this.getChildControl("jobs-button"); + } this.getChildControl("notifications-button"); this.getChildControl("expiration-icon"); this.getChildControl("help"); @@ -133,26 +157,24 @@ qx.Class.define("osparc.navigation.NavigationBar", { switch (id) { case "left-items": control = new qx.ui.container.Composite(new qx.ui.layout.HBox(20).set({ + alignX: "left", alignY: "middle", - alignX: "left" })); - this._addAt(control, 0); + this._addAt(control, 0, { flex: 1 }); break; case "center-items": control = new qx.ui.container.Composite(new qx.ui.layout.HBox(10).set({ + alignX: "center", alignY: "middle", - alignX: "center" })); - this._addAt(control, 1, { - flex: 1 - }); + this._addAt(control, 1); break; case "right-items": - control = new qx.ui.container.Composite(new qx.ui.layout.HBox(10).set({ + control = new qx.ui.container.Composite(new qx.ui.layout.HBox(6).set({ + alignX: "right", alignY: "middle", - alignX: "right" })); - this._addAt(control, 2); + this._addAt(control, 2, { flex: 1 }); break; case "logo": control = osparc.navigation.LogoOnOff.getInstance().set({ @@ -200,6 +222,16 @@ qx.Class.define("osparc.navigation.NavigationBar", { control.addListener("openLogger", () => this.fireEvent("openLogger")); this.getChildControl("left-items").add(control); break; + case "saving-study-icon": + control = new qx.ui.basic.Atom().set({ + icon: "@FontAwesome5Solid/cloud-upload-alt/14", + label: this.tr("Saving..."), + font: "text-12", + opacity: 0.8, + visibility: "excluded", + }); + this.getChildControl("left-items").add(control); + break; case "read-only-info": { control = new qx.ui.basic.Atom().set({ label: this.tr("Read only"), @@ -219,16 +251,33 @@ qx.Class.define("osparc.navigation.NavigationBar", { this.getChildControl("center-items").add(control); break; } + case "avatar-group": { + const maxWidth = osparc.WindowSizeTracker.getInstance().isCompactVersion() ? 80 : 150; + control = new osparc.ui.basic.AvatarGroup(26, "right", maxWidth).set({ + hideMyself: true, + alignY: "middle", + visibility: "excluded", + }); + this.getChildControl("right-items").add(control); + break; + } case "tasks-button": - control = new osparc.task.TasksButton(); + control = new osparc.task.TasksButton().set({ + visibility: "excluded", + ...this.self().RIGHT_BUTTON_OPTS + }); this.getChildControl("right-items").add(control); break; case "jobs-button": - control = new osparc.jobs.JobsButton(); + control = new osparc.jobs.JobsButton().set({ + ...this.self().RIGHT_BUTTON_OPTS + }); this.getChildControl("right-items").add(control); break; case "notifications-button": - control = new osparc.notification.NotificationsButton(); + control = new osparc.notification.NotificationsButton().set({ + ...this.self().RIGHT_BUTTON_OPTS + }); this.getChildControl("right-items").add(control); break; case "expiration-icon": { @@ -258,13 +307,16 @@ qx.Class.define("osparc.navigation.NavigationBar", { break; } case "help": - control = this.__createHelpMenuBtn(); - control.set(this.self().BUTTON_OPTIONS); + control = this.__createHelpBtn().set({ + ...this.self().RIGHT_BUTTON_OPTS + }); osparc.utils.Utils.setIdToWidget(control, "helpNavigationBtn"); this.getChildControl("right-items").add(control); break; case "credits-button": - control = new osparc.desktop.credits.CreditsIndicatorButton(); + control = new osparc.desktop.credits.CreditsIndicatorButton().set({ + ...this.self().RIGHT_BUTTON_OPTS + }); this.getChildControl("right-items").add(control); break; case "log-in-button": { @@ -295,28 +347,26 @@ qx.Class.define("osparc.navigation.NavigationBar", { return control || this.base(arguments, id); }, - __createHelpMenuBtn: function() { - const menu = new qx.ui.menu.Menu().set({ - position: "top-right", - appearance: "menu-wider", - }); - const menuButton = new qx.ui.form.MenuButton(null, "@FontAwesome5Regular/question-circle/22", menu).set({ + __listenToProjectStateUpdated: function() { + const socket = osparc.wrapper.WebSocket.getInstance(); + socket.on("projectStateUpdated", data => { + if (osparc.utils.DisabledPlugins.isRTCEnabled()) { + if (this.getStudy() && data["project_uuid"] === this.getStudy().getUuid()) { + const projectState = data["data"]; + const currentUserGroupIds = osparc.study.Utils.state.getCurrentGroupIds(projectState); + const avatarGroup = this.getChildControl("avatar-group"); + avatarGroup.setUserGroupIds(currentUserGroupIds); + } + } + }, this); + }, + + __createHelpBtn: function() { + const helpButton = new qx.ui.form.Button(null, "@FontAwesome5Regular/question-circle/24").set({ backgroundColor: "transparent" }); - - osparc.utils.Utils.setIdToWidget(menu, "helpNavigationMenu"); - - // quick starts and manuals - osparc.store.Support.addQuickStartToMenu(menu); - osparc.store.Support.addGuidedToursToMenu(menu); - osparc.store.Support.addManualButtonsToMenu(menu, menuButton); - menu.addSeparator(); - - // feedback - osparc.store.Support.addSupportButtonsToMenu(menu, menuButton); - osparc.store.Support.addReleaseNotesToMenu(menu); - - return menuButton; + helpButton.addListener("execute", () => osparc.support.SupportCenter.openWindow()); + return helpButton; }, __createLoginBtn: function() { @@ -335,15 +385,30 @@ qx.Class.define("osparc.navigation.NavigationBar", { }, __applyStudy: function(study) { - const readOnlyInfo = this.getChildControl("read-only-info") + const savingStudyIcon = this.getChildControl("saving-study-icon"); + const readOnlyInfo = this.getChildControl("read-only-info"); if (study) { this.getChildControl("study-title-options").setStudy(study); + study.bind("savePending", savingStudyIcon, "visibility", { + converter: value => value && ["workbench", "pipeline"].includes(study.getUi().getMode()) ? "visible" : "excluded" + }); study.bind("readOnly", readOnlyInfo, "visibility", { converter: value => value ? "visible" : "excluded" }); } else { + savingStudyIcon.exclude(); readOnlyInfo.exclude(); } + + if (osparc.utils.DisabledPlugins.isRTCEnabled()) { + const avatarGroup = this.getChildControl("avatar-group"); + if (study) { + avatarGroup.show(); + } else { + avatarGroup.exclude(); + avatarGroup.setUserGroupIds([]); + } + } }, __navBarResized: function() { diff --git a/services/static-webserver/client/source/class/osparc/navigation/StudyTitleWOptions.js b/services/static-webserver/client/source/class/osparc/navigation/StudyTitleWOptions.js index 1b86bfab8564..363fc816d2e1 100644 --- a/services/static-webserver/client/source/class/osparc/navigation/StudyTitleWOptions.js +++ b/services/static-webserver/client/source/class/osparc/navigation/StudyTitleWOptions.js @@ -68,6 +68,14 @@ qx.Class.define("osparc.navigation.StudyTitleWOptions", { }); }); break; + case "study-menu-share": + control = new qx.ui.menu.Button().set({ + label: this.tr("Share..."), + icon: "@FontAwesome5Solid/share-alt/14", + ...this.self().BUTTON_OPTIONS + }); + control.addListener("execute", () => this.__openAccessRights()); + break; case "study-menu-reload": control = new qx.ui.menu.Button().set({ label: this.tr("Reload"), @@ -89,15 +97,6 @@ qx.Class.define("osparc.navigation.StudyTitleWOptions", { }); control.addListener("execute", () => this.__convertToPipelineClicked(), this); break; - case "study-menu-restore": - control = new qx.ui.menu.Button().set({ - label: this.tr("Restore"), - icon: osparc.theme.common.Image.URLS["window-restore"] + "/20", - }); - control.addListener("execute", () => { - this.getStudy().getUi().setMode("workbench"); - }); - break; case "study-menu-open-logger": control = new qx.ui.menu.Button().set({ label: this.tr("Platform Logs..."), @@ -109,10 +108,12 @@ qx.Class.define("osparc.navigation.StudyTitleWOptions", { const optionsMenu = new qx.ui.menu.Menu(); optionsMenu.setAppearance("menu-wider"); optionsMenu.add(this.getChildControl("study-menu-info")); + optionsMenu.add(this.getChildControl("study-menu-share")); optionsMenu.add(this.getChildControl("study-menu-reload")); optionsMenu.add(this.getChildControl("study-menu-conversations")); - optionsMenu.add(this.getChildControl("study-menu-convert-to-pipeline")); - optionsMenu.add(this.getChildControl("study-menu-restore")); + if (osparc.product.Utils.showConvertToPipeline()) { + optionsMenu.add(this.getChildControl("study-menu-convert-to-pipeline")); + } optionsMenu.add(this.getChildControl("study-menu-open-logger")); control = new qx.ui.form.MenuButton().set({ appearance: "fab-button", @@ -130,6 +131,7 @@ qx.Class.define("osparc.navigation.StudyTitleWOptions", { inputFont: "text-14", maxWidth: 300 }); + osparc.utils.Utils.setIdToWidget(control, "studyTitleRenamer"); control.addListener("editValue", e => { const newLabel = e.getData(); this.getStudy().setName(newLabel); @@ -140,6 +142,17 @@ qx.Class.define("osparc.navigation.StudyTitleWOptions", { return control || this.base(arguments, id); }, + __openAccessRights: function() { + const studyData = this.getStudy().serialize(); + studyData["resourceType"] = this.getStudy().getTemplateType() ? "template" : "study"; + const collaboratorsView = osparc.info.StudyUtils.openAccessRights(studyData); + collaboratorsView.addListener("updateAccessRights", e => { + const updatedData = e.getData(); + this.getStudy().setAccessRights(updatedData["accessRights"]); + this.fireDataEvent("updateStudy", updatedData); + }, this); + }, + __reloadIFrame: function() { const nodes = this.getStudy().getWorkbench().getNodes(); if (Object.keys(nodes).length === 1) { @@ -152,6 +165,9 @@ qx.Class.define("osparc.navigation.StudyTitleWOptions", { const editTitle = this.getChildControl("edit-title-label"); study.bind("name", editTitle, "value"); + const shareButton = this.getChildControl("study-menu-share"); + shareButton.setEnabled(osparc.data.model.Study.canIWrite(study.getAccessRights())); + const reloadButton = this.getChildControl("study-menu-reload"); study.getUi().bind("mode", reloadButton, "visibility", { converter: mode => mode === "standalone" ? "visible" : "excluded" @@ -162,15 +178,12 @@ qx.Class.define("osparc.navigation.StudyTitleWOptions", { converter: mode => mode === "standalone" ? "visible" : "excluded" }); - const convertToPipelineButton = this.getChildControl("study-menu-convert-to-pipeline"); - study.getUi().bind("mode", convertToPipelineButton, "visibility", { - converter: mode => mode === "standalone" ? "visible" : "excluded" - }); - - const restoreButton = this.getChildControl("study-menu-restore"); - study.getUi().bind("mode", restoreButton, "visibility", { - converter: mode => mode === "standalone" ? "visible" : "excluded" - }); + if (osparc.product.Utils.showConvertToPipeline()) { + const convertToPipelineButton = this.getChildControl("study-menu-convert-to-pipeline"); + study.getUi().bind("mode", convertToPipelineButton, "visibility", { + converter: mode => mode === "standalone" ? "visible" : "excluded" + }); + } const loggerButton = this.getChildControl("study-menu-open-logger"); study.getUi().bind("mode", loggerButton, "visibility", { diff --git a/services/static-webserver/client/source/class/osparc/navigation/UserMenu.js b/services/static-webserver/client/source/class/osparc/navigation/UserMenu.js index dfe8898e0e5e..90e15c7caf27 100644 --- a/services/static-webserver/client/source/class/osparc/navigation/UserMenu.js +++ b/services/static-webserver/client/source/class/osparc/navigation/UserMenu.js @@ -51,9 +51,9 @@ qx.Class.define("osparc.navigation.UserMenu", { control.addListener("execute", () => osparc.desktop.account.MyAccountWindow.openWindow(), this); this.add(control); break; - case "admin-center": - control = new qx.ui.menu.Button(this.tr("Admin Center")); - control.addListener("execute", () => osparc.admin.AdminCenterWindow.openWindow(), this); + case "tester-center": + control = new qx.ui.menu.Button(this.tr("Tester Center")); + control.addListener("execute", () => osparc.tester.TesterCenterWindow.openWindow(), this); this.add(control); break; case "po-center": @@ -61,9 +61,9 @@ qx.Class.define("osparc.navigation.UserMenu", { control.addListener("execute", () => osparc.po.POCenterWindow.openWindow(), this); this.add(control); break; - case "tester-center": - control = new qx.ui.menu.Button(this.tr("Tester Center")); - control.addListener("execute", () => osparc.tester.TesterCenterWindow.openWindow(), this); + case "admin-center": + control = new qx.ui.menu.Button(this.tr("Admin Center")); + control.addListener("execute", () => osparc.admin.AdminCenterWindow.openWindow(), this); this.add(control); break; case "billing-center": @@ -85,11 +85,24 @@ qx.Class.define("osparc.navigation.UserMenu", { control.addListener("execute", () => osparc.desktop.organizations.OrganizationsWindow.openWindow(), this); this.add(control); break; + case "help-button": + control = new qx.ui.menu.Button().set({ + label: qx.locale.Manager.tr("Help & Support"), + icon: "@FontAwesome5Solid/question-circle/16", + }); + control.addListener("execute", () => osparc.support.SupportCenter.openWindow()); + this.add(control); + break; case "market": control = new qx.ui.menu.Button(this.tr("The Shop")); control.addListener("execute", () => osparc.vipMarket.MarketWindow.openWindow()); this.add(control); break; + case "rocket-preview": + control = new qx.ui.menu.Button(this.tr("Rocket Preview")); + control.addListener("execute", () => osparc.wrapper.RocketPreview.openWindow()); + this.add(control); + break; case "about": control = new qx.ui.menu.Button(this.tr("About oSPARC")); osparc.utils.Utils.setIdToWidget(control, "userMenuAboutBtn"); @@ -99,7 +112,7 @@ qx.Class.define("osparc.navigation.UserMenu", { case "about-product": { control = new qx.ui.menu.Button(this.tr("About Product")); osparc.utils.Utils.setIdToWidget(control, "userMenuAboutProductBtn"); - const displayName = osparc.store.StaticInfo.getInstance().getDisplayName(); + const displayName = osparc.store.StaticInfo.getDisplayName(); control.getChildControl("label").setRich(true); control.setLabel(this.tr("About ") + displayName); control.addListener("execute", () => osparc.product.AboutProduct.getInstance().open()); @@ -144,14 +157,14 @@ qx.Class.define("osparc.navigation.UserMenu", { this.getChildControl("log-in"); } else { this.getChildControl("user-center"); - if (osparc.data.Permissions.getInstance().isAdmin()) { - this.getChildControl("admin-center"); + if (osparc.data.Permissions.getInstance().isTester()) { + this.getChildControl("tester-center"); } if (osparc.data.Permissions.getInstance().isProductOwner()) { this.getChildControl("po-center"); } - if (osparc.data.Permissions.getInstance().isTester()) { - this.getChildControl("tester-center"); + if (osparc.data.Permissions.getInstance().isAdmin()) { + this.getChildControl("admin-center"); } if (osparc.desktop.credits.Utils.areWalletsEnabled()) { this.getChildControl("billing-center"); @@ -169,6 +182,10 @@ qx.Class.define("osparc.navigation.UserMenu", { this.getChildControl("market"); } + if (osparc.utils.Utils.isDevelopmentPlatform() && osparc.wrapper.RocketPreview.existsBuild()) { + this.getChildControl("rocket-preview"); + } + this.getChildControl("about"); if (osparc.product.Utils.showAboutProduct()) { this.getChildControl("about-product"); @@ -196,14 +213,14 @@ qx.Class.define("osparc.navigation.UserMenu", { this.getChildControl("log-in"); } else { this.getChildControl("user-center"); - if (osparc.data.Permissions.getInstance().isAdmin()) { - this.getChildControl("admin-center"); + if (osparc.data.Permissions.getInstance().isTester()) { + this.getChildControl("tester-center"); } if (osparc.data.Permissions.getInstance().isProductOwner()) { this.getChildControl("po-center"); } - if (osparc.data.Permissions.getInstance().isTester()) { - this.getChildControl("tester-center"); + if (osparc.data.Permissions.getInstance().isAdmin()) { + this.getChildControl("admin-center"); } if (osparc.desktop.credits.Utils.areWalletsEnabled()) { this.getChildControl("billing-center"); @@ -213,14 +230,7 @@ qx.Class.define("osparc.navigation.UserMenu", { this.addSeparator(); // quick starts and manuals - osparc.store.Support.addQuickStartToMenu(this); - osparc.store.Support.addGuidedToursToMenu(this); - osparc.store.Support.addManualButtonsToMenu(this); - this.addSeparator(); - - // feedbacks - osparc.store.Support.addSupportButtonsToMenu(this); - osparc.store.Support.addReleaseNotesToMenu(this); + this.getChildControl("help-button"); this.addSeparator(); this.getChildControl("theme-switcher"); diff --git a/services/static-webserver/client/source/class/osparc/navigation/UserMenuButton.js b/services/static-webserver/client/source/class/osparc/navigation/UserMenuButton.js index 777834d2739e..e0647ed985b8 100644 --- a/services/static-webserver/client/source/class/osparc/navigation/UserMenuButton.js +++ b/services/static-webserver/client/source/class/osparc/navigation/UserMenuButton.js @@ -39,9 +39,7 @@ qx.Class.define("osparc.navigation.UserMenuButton", { this.getContentElement().setStyles({ "border-radius": "20px" }); - this.getChildControl("icon").getContentElement().setStyles({ - "border-radius": "16px" - }); + this.getChildControl("icon").setDecorator("circled"); osparc.utils.Utils.setIdToWidget(this, "userMenuBtn"); const store = osparc.store.Store.getInstance(); @@ -51,7 +49,7 @@ qx.Class.define("osparc.navigation.UserMenuButton", { const preferencesSettings = osparc.Preferences.getInstance(); preferencesSettings.addListener("changeCreditsWarningThreshold", () => this.__updateHaloColor()); - const myUsername = authData.getUsername() || "Username"; + const myUserName = authData.getUserName() || "UserName"; const myEmail = authData.getEmail() || "bizzy@itis.ethz.ch"; const icon = this.getChildControl("icon"); authData.bind("role", this, "icon", { @@ -65,7 +63,7 @@ qx.Class.define("osparc.navigation.UserMenuButton", { icon.getContentElement().setStyles({ "margin-left": "-4px" }); - return osparc.utils.Avatar.emailToThumbnail(myEmail, myUsername, 32); + return osparc.utils.Avatar.emailToThumbnail(myEmail, myUserName, 32); } }); }, diff --git a/services/static-webserver/client/source/class/osparc/node/BootOptionsView.js b/services/static-webserver/client/source/class/osparc/node/BootOptionsView.js index 291c028422d8..02ba8b5f7530 100644 --- a/services/static-webserver/client/source/class/osparc/node/BootOptionsView.js +++ b/services/static-webserver/client/source/class/osparc/node/BootOptionsView.js @@ -38,7 +38,7 @@ qx.Class.define("osparc.node.BootOptionsView", { const buttonsLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); - const nodeMetadata = node.getMetaData(); + const nodeMetadata = node.getMetadata(); const workbenchData = node.getWorkbench().serialize(); const nodeId = node.getNodeId(); const bootModeSB = osparc.data.model.Node.getBootModesSelectBox(nodeMetadata, workbenchData, nodeId); @@ -50,13 +50,22 @@ qx.Class.define("osparc.node.BootOptionsView", { if (selection.length) { buttonsLayout.setEnabled(false); const newBootModeId = selection[0].bootModeId; - node.setBootOptions({ + const data = { "boot_mode": newBootModeId + }; + node.setBootOptions(data); + node.fireDataEvent("projectDocumentChanged", { + "op": "replace", + "path": `/workbench/${nodeId}/bootOptions`, + "value": data, + "osparc-resource": "node", }); - node.fireEvent("updateStudyDocument"); + // add timeout to make sure the node is saved before starting it setTimeout(() => { buttonsLayout.setEnabled(true); - node.requestStartNode(); + if (!node.getStudy().getDisableServiceAutoStart()) { + node.requestStartNode(); + } }, osparc.desktop.StudyEditor.AUTO_SAVE_INTERVAL); } }, this); diff --git a/services/static-webserver/client/source/class/osparc/node/LifeCycleView.js b/services/static-webserver/client/source/class/osparc/node/LifeCycleView.js index ea6ed0fbb289..b64672cb88a5 100644 --- a/services/static-webserver/client/source/class/osparc/node/LifeCycleView.js +++ b/services/static-webserver/client/source/class/osparc/node/LifeCycleView.js @@ -65,7 +65,7 @@ qx.Class.define("osparc.node.LifeCycleView", { const node = this.getNode(); if (node.isDeprecated()) { - const deprecateDateLabel = new qx.ui.basic.Label(osparc.service.Utils.getDeprecationDateText(node.getMetaData())).set({ + const deprecateDateLabel = new qx.ui.basic.Label(osparc.service.Utils.getDeprecationDateText(node.getMetadata())).set({ rich: true }); this._add(deprecateDateLabel); @@ -111,16 +111,32 @@ qx.Class.define("osparc.node.LifeCycleView", { updateButton.addListener("execute", () => { updateButton.setFetching(true); const latestCompatible = osparc.store.Services.getLatestCompatible(node.getKey(), node.getVersion()); + const newData = {}; if (node.getKey() !== latestCompatible["key"]) { - node.setKey(latestCompatible["key"]); + newData["key"] = latestCompatible["key"]; } if (node.getVersion() !== latestCompatible["version"]) { - node.setVersion(latestCompatible["version"]); + newData["version"] = latestCompatible["version"]; } - node.fireEvent("updateStudyDocument"); + node.set(newData); + const nodeId = node.getNodeId(); + node.fireDataEvent("projectDocumentChanged", [{ + "op": "replace", + "path": `/workbench/${nodeId}/key`, + "value": latestCompatible["key"], + "osparc-resource": "node", + }, { + "op": "replace", + "path": `/workbench/${nodeId}/version`, + "value": latestCompatible["version"], + "osparc-resource": "node", + }]); + // add timeout to make sure the node is saved before starting it setTimeout(() => { updateButton.setFetching(false); - node.requestStartNode(); + if (!node.getStudy().getDisableServiceAutoStart()) { + node.requestStartNode(); + } }, osparc.desktop.StudyEditor.AUTO_SAVE_INTERVAL); }); diff --git a/services/static-webserver/client/source/class/osparc/node/ParameterEditor.js b/services/static-webserver/client/source/class/osparc/node/ParameterEditor.js index a762a75af38a..b1993171f0c9 100644 --- a/services/static-webserver/client/source/class/osparc/node/ParameterEditor.js +++ b/services/static-webserver/client/source/class/osparc/node/ParameterEditor.js @@ -30,7 +30,7 @@ qx.Class.define("osparc.node.ParameterEditor", { statics: { getParameterOutputType: function(node) { - const metadata = node.getMetaData(); + const metadata = node.getMetadata(); return osparc.service.Utils.getParameterType(metadata); }, diff --git a/services/static-webserver/client/source/class/osparc/node/TierSelectionView.js b/services/static-webserver/client/source/class/osparc/node/TierSelectionView.js index b4432cedecde..944abc528b24 100644 --- a/services/static-webserver/client/source/class/osparc/node/TierSelectionView.js +++ b/services/static-webserver/client/source/class/osparc/node/TierSelectionView.js @@ -46,7 +46,7 @@ qx.Class.define("osparc.node.TierSelectionView", { tiersLayout.add(tierBox); const node = this.getNode(); - osparc.store.Pricing.getInstance().fetchPricingPlansService(node.getKey(), node.getVersion()) + osparc.store.Services.getPricingPlan(node.getKey(), node.getVersion()) .then(pricingPlans => { if (pricingPlans && "pricingUnits" in pricingPlans && pricingPlans["pricingUnits"].length) { const pricingUnits = pricingPlans["pricingUnits"].map(pricingUnitData => { @@ -59,16 +59,10 @@ qx.Class.define("osparc.node.TierSelectionView", { }); const studyId = node.getStudy().getUuid(); const nodeId = node.getNodeId(); - const unitParams = { - url: { - studyId, - nodeId - } - }; - osparc.data.Resources.fetch("studies", "getPricingUnit", unitParams) - .then(preselectedPricingUnit => { - if (preselectedPricingUnit && preselectedPricingUnit["pricingUnitId"]) { - const tierFound = tierBox.getSelectables().find(t => t.getModel() === preselectedPricingUnit["pricingUnitId"]); + osparc.store.Study.getInstance().getSelectedPricingUnit(studyId, nodeId) + .then(selectedPricingUnit => { + if (selectedPricingUnit && selectedPricingUnit["pricingUnitId"]) { + const tierFound = tierBox.getSelectables().find(t => t.getModel() === selectedPricingUnit["pricingUnitId"]); if (tierFound) { tierBox.setSelection([tierFound]); } else { @@ -102,7 +96,8 @@ qx.Class.define("osparc.node.TierSelectionView", { if (selection.length) { tierBox.setEnabled(false); const selectedUnitId = selection[0].getModel(); - osparc.study.NodePricingUnits.patchPricingUnitSelection(studyId, nodeId, pricingPlans["pricingPlanId"], selectedUnitId) + const selectedUnit = pricingUnits.find(pUnit => pUnit.getPricingUnitId() === selectedUnitId) + osparc.store.Study.getInstance().updateSelectedPricingUnit(studyId, nodeId, pricingPlans["pricingPlanId"], selectedUnit) .finally(() => { tierBox.setEnabled(true); showSelectedTier(selectedUnitId); diff --git a/services/static-webserver/client/source/class/osparc/node/UpdateResourceLimitsView.js b/services/static-webserver/client/source/class/osparc/node/UpdateResourceLimitsView.js index 507eef686dd0..6226cbca4bea 100644 --- a/services/static-webserver/client/source/class/osparc/node/UpdateResourceLimitsView.js +++ b/services/static-webserver/client/source/class/osparc/node/UpdateResourceLimitsView.js @@ -43,13 +43,7 @@ qx.Class.define("osparc.node.UpdateResourceLimitsView", { this._add(resourcesLayout); const node = this.getNode(); - const params = { - url: { - studyId: node.getStudy().getUuid(), - nodeId: node.getNodeId() - } - }; - osparc.data.Resources.get("nodesInStudyResources", params) + osparc.store.Study.getInstance().getNodeResources(node.getStudy().getUuid(), node.getNodeId()) .then(serviceResources => { resourcesLayout.show(); const gridLayout = resourcesLayout.getChildren()[1]; @@ -145,17 +139,8 @@ qx.Class.define("osparc.node.UpdateResourceLimitsView", { } }); const node = this.getNode(); - const params = { - url: { - studyId: node.getStudy().getUuid(), - nodeId: node.getNodeId() - }, - data: updatedResources - }; - osparc.data.Resources.fetch("nodesInStudyResources", "put", params) - .then(() => { - osparc.FlashMessenger.logAs(this.tr("Limits have been successfully updated")); - }) + osparc.store.Study.getInstance().updateNodeResources(node.getStudy().getUuid(), node.getNodeId(), updatedResources) + .then(() => osparc.FlashMessenger.logAs(this.tr("Limits have been successfully updated"))) .catch(err => osparc.FlashMessenger.logError(err, this.tr("Something went wrong while updating the limits"))) .finally(() => { this.__saveBtn.setFetching(false); diff --git a/services/static-webserver/client/source/class/osparc/node/slideshow/BaseNodeView.js b/services/static-webserver/client/source/class/osparc/node/slideshow/BaseNodeView.js index a2ee4daab00e..ec9fba511872 100644 --- a/services/static-webserver/client/source/class/osparc/node/slideshow/BaseNodeView.js +++ b/services/static-webserver/client/source/class/osparc/node/slideshow/BaseNodeView.js @@ -165,7 +165,7 @@ qx.Class.define("osparc.node.slideshow.BaseNodeView", { flex: 1 }); - const outputsBtn = this._outputsBtn = new qx.ui.form.ToggleButton().set({ + const outputsBtn = this.__outputsBtn = new qx.ui.form.ToggleButton().set({ width: 110, label: this.tr("Outputs"), icon: "@FontAwesome5Solid/sign-out-alt/14", @@ -198,7 +198,7 @@ qx.Class.define("osparc.node.slideshow.BaseNodeView", { }); mainView.bind("backgroundColor", outputsLayout, "backgroundColor"); mainView.bind("backgroundColor", outputsLayout.getChildControl("frame"), "backgroundColor"); - this._outputsBtn.bind("value", outputsLayout, "visibility", { + this.__outputsBtn.bind("value", outputsLayout, "visibility", { converter: value => value ? "visible" : "excluded" }); hBox.add(outputsLayout); @@ -217,7 +217,7 @@ qx.Class.define("osparc.node.slideshow.BaseNodeView", { __openServiceDetails: function() { const node = this.getNode(); - const metadata = node.getMetaData(); + const metadata = node.getMetadata(); const serviceDetails = new osparc.info.ServiceLarge(metadata, { nodeId: node.getNodeId(), label: node.getLabel(), @@ -266,7 +266,7 @@ qx.Class.define("osparc.node.slideshow.BaseNodeView", { }, getOutputsButton: function() { - return this._outputsBtn; + return this.__outputsBtn; }, getMainView: function() { @@ -277,13 +277,6 @@ qx.Class.define("osparc.node.slideshow.BaseNodeView", { return this._settingsLayout; }, - /** - * @abstract - */ - isSettingsGroupShowable: function() { - throw new Error("Abstract method called!"); - }, - /** * @abstract */ @@ -415,7 +408,7 @@ qx.Class.define("osparc.node.slideshow.BaseNodeView", { node.getStatus().addListener("changeProgress", () => updateProgress(), this); } - node.bind("outputs", this._outputsBtn, "label", { + node.bind("outputs", this.__outputsBtn, "label", { converter: outputsData => { let outputCounter = 0; Object.keys(outputsData).forEach(outKey => { @@ -427,7 +420,17 @@ qx.Class.define("osparc.node.slideshow.BaseNodeView", { return this.tr("Outputs") + ` (${outputCounter})`; } }); - this._outputsBtn.addListener("changeLabel", () => osparc.utils.Utils.makeButtonBlink(this._outputsBtn, 2)); + this.__outputsBtn.addListener("changeLabel", () => { + // new output received + // make button blink + osparc.utils.Utils.makeButtonBlink(this.__outputsBtn, 2); + // and show Flash Message + const outputs = this.getNode().getOutputs(); + if (outputs && Object.keys(outputs).length > 0) { + const flashMsg = this.tr("New Outputs generated"); + osparc.FlashMessenger.getInstance().logAs(flashMsg, "INFO", 2000); + } + }); this._addLogger(); } diff --git a/services/static-webserver/client/source/class/osparc/node/slideshow/FilePickerView.js b/services/static-webserver/client/source/class/osparc/node/slideshow/FilePickerView.js index cd66e03d48aa..82f3c0480f72 100644 --- a/services/static-webserver/client/source/class/osparc/node/slideshow/FilePickerView.js +++ b/services/static-webserver/client/source/class/osparc/node/slideshow/FilePickerView.js @@ -27,11 +27,6 @@ qx.Class.define("osparc.node.slideshow.FilePickerView", { }, members: { - // overridden - isSettingsGroupShowable: function() { - return false; - }, - // overridden _addSettings: function() { return; diff --git a/services/static-webserver/client/source/class/osparc/node/slideshow/NodeView.js b/services/static-webserver/client/source/class/osparc/node/slideshow/NodeView.js index cee5d1fb1307..2db39881a513 100644 --- a/services/static-webserver/client/source/class/osparc/node/slideshow/NodeView.js +++ b/services/static-webserver/client/source/class/osparc/node/slideshow/NodeView.js @@ -38,13 +38,6 @@ qx.Class.define("osparc.node.slideshow.NodeView", { statics: { LOGGER_HEIGHT: 28, - - isPropsFormShowable: function(node) { - if (node && ("getPropsForm" in node) && node.getPropsForm()) { - return node.getPropsForm().hasVisibleInputs(); - } - return false; - }, }, members: { @@ -55,12 +48,23 @@ qx.Class.define("osparc.node.slideshow.NodeView", { this._settingsLayout.removeAll(); const node = this.getNode(); - const propsForm = node.getPropsForm(); - if (propsForm && node.hasInputs()) { - propsForm.addListener("changeChildVisibility", () => this.__checkSettingsVisibility(), this); - this._settingsLayout.add(propsForm); + if ( + node.isComputational() && + node.hasInputs() && + "getPropsForm" in node && + node.getPropsForm() && + node.getPropsForm().hasVisibleInputs() + ) { + this._settingsLayout.add(node.getPropsForm()); + + // lock the inputs if the node is locked + node.getStatus().getLockState().bind("locked", node.getPropsForm(), "enabled", { + converter: locked => !locked + }); } - this.__checkSettingsVisibility(); + + const showSettings = node.isComputational(); + this._settingsLayout.setVisibility(showSettings ? "visible" : "excluded"); this._mainView.add(this._settingsLayout); }, @@ -72,10 +76,7 @@ qx.Class.define("osparc.node.slideshow.NodeView", { const loadingPage = this.getNode().getLoadingPage(); const iFrame = this.getNode().getIFrame(); if (loadingPage && iFrame) { - const node = this.getNode(); - node.getIframeHandler().addListener("iframeChanged", () => this.__iFrameChanged(), this); - iFrame.addListener("load", () => this.__iFrameChanged()); - this.__iFrameChanged(); + osparc.desktop.WorkbenchView.listenToIframeStateChanges(this.getNode(), this._iFrameLayout); } else { // This will keep what comes after at the bottom this._iFrameLayout.add(new qx.ui.core.Spacer(), { @@ -99,7 +100,7 @@ qx.Class.define("osparc.node.slideshow.NodeView", { this._outputsLayout.add(outputsForm); } - this._outputsBtn.set({ + this.getOutputsButton().set({ value: false, enabled: this.getNode().hasOutputs() > 0 }); @@ -128,33 +129,5 @@ qx.Class.define("osparc.node.slideshow.NodeView", { _applyNode: function(node) { this.base(arguments, node); }, - - __checkSettingsVisibility: function() { - const isSettingsGroupShowable = this.isSettingsGroupShowable(); - this._settingsLayout.setVisibility(isSettingsGroupShowable ? "visible" : "excluded"); - }, - - isSettingsGroupShowable: function() { - const node = this.getNode(); - if (node.isComputational()) { - return this.self().isPropsFormShowable(node); - } - return false; - }, - - __iFrameChanged: function() { - this._iFrameLayout.removeAll(); - - const node = this.getNode(); - if (node && node.getIFrame()) { - const loadingPage = node.getLoadingPage(); - const iFrame = node.getIFrame(); - const src = iFrame.getSource(); - const iFrameView = (src === null || src === "about:blank") ? loadingPage : iFrame; - this._iFrameLayout.add(iFrameView, { - flex: 1 - }); - } - } } }); diff --git a/services/static-webserver/client/source/class/osparc/notification/Notification.js b/services/static-webserver/client/source/class/osparc/notification/Notification.js index af219894c0fc..7318bd995d7c 100644 --- a/services/static-webserver/client/source/class/osparc/notification/Notification.js +++ b/services/static-webserver/client/source/class/osparc/notification/Notification.js @@ -54,6 +54,7 @@ qx.Class.define("osparc.notification.Notification", { "NEW_ORGANIZATION", "STUDY_SHARED", "TEMPLATE_SHARED", + "CONVERSATION_NOTIFICATION", "ANNOTATION_NOTE", "WALLET_SHARED" ], diff --git a/services/static-webserver/client/source/class/osparc/notification/NotificationUI.js b/services/static-webserver/client/source/class/osparc/notification/NotificationUI.js index 98bf24ebb7d4..731b632db3a4 100644 --- a/services/static-webserver/client/source/class/osparc/notification/NotificationUI.js +++ b/services/static-webserver/client/source/class/osparc/notification/NotificationUI.js @@ -114,6 +114,60 @@ qx.Class.define("osparc.notification.NotificationUI", { }, __applyNotification: function(notification) { + const icon = this.getChildControl("icon"); + switch (notification.getCategory()) { + case "NEW_ORGANIZATION": + icon.setSource("@FontAwesome5Solid/users/14"); + break; + case "STUDY_SHARED": + icon.setSource("@FontAwesome5Solid/file/14"); + break; + case "TEMPLATE_SHARED": + icon.setSource("@FontAwesome5Solid/copy/14"); + break; + case "CONVERSATION_NOTIFICATION": + icon.setSource("@FontAwesome5Solid/bell/14"); + break; + case "ANNOTATION_NOTE": + icon.setSource("@FontAwesome5Solid/file/14"); + break; + case "WALLET_SHARED": + icon.setSource("@MaterialIcons/account_balance_wallet/14"); + break; + } + + const titleLabel = this.getChildControl("title"); + titleLabel.setValue(notification.getTitle()); + + const descriptionLabel = this.getChildControl("text"); + descriptionLabel.setValue(notification.getText()); + + const date = this.getChildControl("date"); + notification.bind("date", date, "value", { + converter: value => { + if (value) { + return osparc.utils.Utils.formatDateAndTime(new Date(value)); + } + return ""; + } + }); + + const highlight = mouseOn => { + this.set({ + backgroundColor: mouseOn ? "strong-main" : "transparent" + }) + }; + this.addListener("mouseover", () => highlight(true)); + this.addListener("mouseout", () => highlight(false)); + highlight(false); + + // this will trigger calls to the backend, so only make them if necessary + this.addListenerOnce("appear", () => this.__enrichTexts()); + }, + + __enrichTexts: function() { + const notification = this.getNotification(); + let resourceId = null; if (notification.getResourceId()) { resourceId = notification.getResourceId(); @@ -122,35 +176,25 @@ qx.Class.define("osparc.notification.NotificationUI", { const actionablePath = notification.getActionablePath(); resourceId = actionablePath.split("/")[1]; } - const userFromId = notification.getUserFromId(); - const icon = this.getChildControl("icon"); + const userFromId = notification.getUserFromId(); const titleLabel = this.getChildControl("title"); - titleLabel.setValue(notification.getTitle()); const descriptionLabel = this.getChildControl("text"); - descriptionLabel.setValue(notification.getText()); switch (notification.getCategory()) { case "NEW_ORGANIZATION": - icon.setSource("@FontAwesome5Solid/users/14"); if (resourceId) { const org = osparc.store.Groups.getInstance().getOrganization(resourceId); if (org) { - descriptionLabel.setValue("You're now member of '" + org.getLabel() + "'") + descriptionLabel.setValue("You're now member of '" + org.getLabel() + "'"); } else { this.setEnabled(false); } } break; case "STUDY_SHARED": - icon.setSource("@FontAwesome5Solid/file/14"); if (resourceId) { - const params = { - url: { - "studyId": resourceId - } - }; - osparc.data.Resources.fetch("studies", "getOne", params) + osparc.store.Study.getInstance().getOne(resourceId) .then(study => { const studyAlias = osparc.product.Utils.getStudyAlias({ firstUpperCase: true @@ -167,7 +211,6 @@ qx.Class.define("osparc.notification.NotificationUI", { } break; case "TEMPLATE_SHARED": - icon.setSource("@FontAwesome5Solid/copy/14"); if (resourceId) { osparc.store.Templates.fetchTemplate(resourceId) .then(templateData => { @@ -184,15 +227,22 @@ qx.Class.define("osparc.notification.NotificationUI", { } } break; + case "CONVERSATION_NOTIFICATION": + if (resourceId) { + osparc.store.Study.getInstance().getOne(resourceId) + .then(study => titleLabel.setValue(`You were notified in '${study["name"]}'`)) + .catch(() => this.setEnabled(false)); + } + if (userFromId) { + const user = osparc.store.Groups.getInstance().getUserByUserId(userFromId); + if (user) { + descriptionLabel.setValue(user.getLabel() + " wants you to check the conversation"); + } + } + break; case "ANNOTATION_NOTE": - icon.setSource("@FontAwesome5Solid/file/14"); if (resourceId) { - const params = { - url: { - "studyId": resourceId - } - }; - osparc.data.Resources.fetch("studies", "getOne", params) + osparc.store.Study.getInstance().getOne(resourceId) .then(study => titleLabel.setValue(`Note added in '${study["name"]}'`)) .catch(() => this.setEnabled(false)); } @@ -204,28 +254,8 @@ qx.Class.define("osparc.notification.NotificationUI", { } break; case "WALLET_SHARED": - icon.setSource("@MaterialIcons/account_balance_wallet/14"); break; } - - const date = this.getChildControl("date"); - notification.bind("date", date, "value", { - converter: value => { - if (value) { - return osparc.utils.Utils.formatDateAndTime(new Date(value)); - } - return ""; - } - }); - - const highlight = mouseOn => { - this.set({ - backgroundColor: mouseOn ? "strong-main" : "transparent" - }) - }; - this.addListener("mouseover", () => highlight(true)); - this.addListener("mouseout", () => highlight(false)); - highlight(false); }, __notificationTapped: function() { @@ -250,6 +280,7 @@ qx.Class.define("osparc.notification.NotificationUI", { break; case "TEMPLATE_SHARED": case "STUDY_SHARED": + case "CONVERSATION_NOTIFICATION": case "ANNOTATION_NOTE": this.__openStudyDetails(resourceId, notification); break; @@ -276,24 +307,25 @@ qx.Class.define("osparc.notification.NotificationUI", { }, __openStudyDetails: function(studyId, notification) { - const params = { - url: { - "studyId": studyId - } - }; - osparc.data.Resources.fetch("studies", "getOne", params) + osparc.store.Study.getInstance().getOne(studyId) .then(studyData => { if (studyData) { const studyDataCopy = osparc.data.model.Study.deepCloneStudyObject(studyData); studyDataCopy["resourceType"] = notification.getCategory() === "TEMPLATE_SHARED" ? "template" : "study"; - const resourceDetails = new osparc.dashboard.ResourceDetails(studyDataCopy); - const win = osparc.dashboard.ResourceDetails.popUpInWindow(resourceDetails); + const { + resourceDetails, + window, + } = osparc.dashboard.ResourceDetails.popUpInWindow(studyDataCopy); + resourceDetails.addListener("openStudy", () => { if (notification.getCategory() === "STUDY_SHARED") { - const openCB = () => win.close(); + const openCB = () => window.close(); osparc.dashboard.ResourceBrowserBase.startStudyById(studyId, openCB); } }); + if (notification.getCategory() === "CONVERSATION_NOTIFICATION") { + resourceDetails.addListener("pagesAdded", () => resourceDetails.openConversations()); + } } }) .catch(err => { diff --git a/services/static-webserver/client/source/class/osparc/notification/Notifications.js b/services/static-webserver/client/source/class/osparc/notification/Notifications.js index 2d5a2de9318b..8ee9273958a1 100644 --- a/services/static-webserver/client/source/class/osparc/notification/Notifications.js +++ b/services/static-webserver/client/source/class/osparc/notification/Notifications.js @@ -85,6 +85,21 @@ qx.Class.define("osparc.notification.Notifications", { }; }, + __newConversationNotificationObj: function(userId, studyId) { + const baseNotification = this.__newNotificationBase(userId); + const specNotification = { + "category": "CONVERSATION_NOTIFICATION", + "actionable_path": "study/"+studyId, + "resource_id": studyId, + "title": "New notification", + "text": "You were notified in a conversation" + }; + return { + ...baseNotification, + ...specNotification + }; + }, + __newAnnotationNoteObj: function(userId, studyId) { const baseNotification = this.__newNotificationBase(userId); const specNotification = { @@ -122,7 +137,7 @@ qx.Class.define("osparc.notification.Notifications", { return osparc.data.Resources.fetch("notifications", "post", params); }, - postNewStudy: function(userId, studyId) { + pushStudyShared: function(userId, studyId) { const params = { data: this.__newStudyObj(userId, studyId) }; @@ -136,7 +151,14 @@ qx.Class.define("osparc.notification.Notifications", { return osparc.data.Resources.fetch("notifications", "post", params); }, - postNewAnnotationNote: function(userId, studyId) { + pushConversationNotification: function(userId, studyId) { + const params = { + data: this.__newConversationNotificationObj(userId, studyId) + }; + return osparc.data.Resources.fetch("notifications", "post", params); + }, + + pushNewAnnotationNote: function(userId, studyId) { const params = { data: this.__newAnnotationNoteObj(userId, studyId) }; diff --git a/services/static-webserver/client/source/class/osparc/notification/NotificationsButton.js b/services/static-webserver/client/source/class/osparc/notification/NotificationsButton.js index 3c1cfd121527..fa08ea2d66bf 100644 --- a/services/static-webserver/client/source/class/osparc/notification/NotificationsButton.js +++ b/services/static-webserver/client/source/class/osparc/notification/NotificationsButton.js @@ -25,12 +25,6 @@ qx.Class.define("osparc.notification.NotificationsButton", { osparc.utils.Utils.setIdToWidget(this, "notificationsButton"); - this.set({ - width: 30, - alignX: "center", - cursor: "pointer" - }); - this._createChildControlImpl("icon"); this._createChildControlImpl("number"); @@ -51,7 +45,7 @@ qx.Class.define("osparc.notification.NotificationsButton", { let control; switch (id) { case "icon": { - control = new qx.ui.basic.Image(); + control = new qx.ui.basic.Image("@FontAwesome5Regular/bell/22"); const iconContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox().set({ alignY: "middle", })).set({ @@ -63,6 +57,24 @@ qx.Class.define("osparc.notification.NotificationsButton", { }); break; } + case "is-active-icon-outline": + control = new qx.ui.basic.Image("@FontAwesome5Solid/circle/12").set({ + textColor: osparc.navigation.NavigationBar.BG_COLOR, + }); + this._add(control, { + bottom: -4, + right: -4, + }); + break; + case "is-active-icon": + control = new qx.ui.basic.Image("@FontAwesome5Solid/circle/8").set({ + textColor: "strong-main", + }); + this._add(control, { + bottom: -2, + right: -2, + }); + break; case "number": control = new qx.ui.basic.Label().set({ backgroundColor: "error", @@ -88,16 +100,14 @@ qx.Class.define("osparc.notification.NotificationsButton", { const notifications = notificationManager.getNotifications(); notifications.forEach(notification => notification.addListener("changeRead", () => this.__updateButton(), this)); - const nUnreadNotifications = notifications.filter(notification => notification.getRead() === false).length; - const icon = this.getChildControl("icon"); - icon.set({ - source: nUnreadNotifications > 0 ? "@FontAwesome5Solid/bell/22" : "@FontAwesome5Regular/bell/22", - textColor: nUnreadNotifications > 0 ? "strong-main" : "text" - }); - const number = this.getChildControl("number"); - number.set({ - value: nUnreadNotifications.toString(), - visibility: nUnreadNotifications > 0 ? "visible" : "excluded" + let nUnreadNotifications = notifications.filter(notification => notification.getRead() === false).length; + [ + this.getChildControl("is-active-icon-outline"), + this.getChildControl("is-active-icon"), + ].forEach(control => { + control.set({ + visibility: nUnreadNotifications > 0 ? "visible" : "excluded" + }); }); }, @@ -129,16 +139,7 @@ qx.Class.define("osparc.notification.NotificationsButton", { }, __positionNotificationsContainer: function() { - const bounds = this.getBounds(); - const cel = this.getContentElement(); - if (cel) { - const domEle = cel.getDomElement(); - if (domEle) { - const rect = domEle.getBoundingClientRect(); - bounds.left = parseInt(rect.x); - bounds.top = parseInt(rect.y); - } - } + const bounds = osparc.utils.Utils.getBounds(this); const bottom = bounds.top + bounds.height; const right = bounds.left + bounds.width; this.__notificationsContainer.setPosition(right, bottom); diff --git a/services/static-webserver/client/source/class/osparc/po/Invitations.js b/services/static-webserver/client/source/class/osparc/po/Invitations.js index 0fb58f8aee33..32d1dd9b1c1e 100644 --- a/services/static-webserver/client/source/class/osparc/po/Invitations.js +++ b/services/static-webserver/client/source/class/osparc/po/Invitations.js @@ -61,35 +61,7 @@ qx.Class.define("osparc.po.Invitations", { }, __createInvitationForm: function() { - const form = new qx.ui.form.Form(); - - const userEmail = new qx.ui.form.TextField().set({ - required: true, - placeholder: this.tr("new.user@email.address") - }); - form.add(userEmail, this.tr("User Email")); - - const extraCreditsInUsd = new qx.ui.form.Spinner().set({ - minimum: 0, - maximum: 1000, - value: 100 - }); - form.add(extraCreditsInUsd, this.tr("Welcome Credits (USD)")); - - const withExpiration = new qx.ui.form.CheckBox().set({ - value: false - }); - form.add(withExpiration, this.tr("With expiration")); - - const trialDays = new qx.ui.form.Spinner().set({ - minimum: 1, - maximum: 1000, - value: 1 - }); - withExpiration.bind("value", trialDays, "visibility", { - converter: val => val ? "visible" : "excluded" - }); - form.add(trialDays, this.tr("Trial Days")); + const form = osparc.po.UsersPending.createInvitationForm(true); const generateInvitationBtn = new osparc.ui.form.FetchButton(this.tr("Generate")); generateInvitationBtn.set({ @@ -103,14 +75,15 @@ qx.Class.define("osparc.po.Invitations", { generateInvitationBtn.setFetching(true); const params = { data: { - "guest": userEmail.getValue() + "guest": form.getItems()["email"].getValue() } }; - if (extraCreditsInUsd.getValue() > 0) { - params.data["extraCreditsInUsd"] = extraCreditsInUsd.getValue(); + const extraCreditsInUsd = form.getItems()["credits"].getValue(); + if (extraCreditsInUsd > 0) { + params.data["extraCreditsInUsd"] = extraCreditsInUsd; } - if (withExpiration.getValue()) { - params.data["trialAccountDays"] = trialDays.getValue(); + if (form.getItems()["withExpiration"].getValue()) { + params.data["trialAccountDays"] = form.getItems()["trialDays"].getValue(); } osparc.data.Resources.fetch("invitations", "post", params) .then(data => { diff --git a/services/static-webserver/client/source/class/osparc/po/MessageTemplates.js b/services/static-webserver/client/source/class/osparc/po/MessageTemplates.js deleted file mode 100644 index 6a6bdadd674b..000000000000 --- a/services/static-webserver/client/source/class/osparc/po/MessageTemplates.js +++ /dev/null @@ -1,99 +0,0 @@ -/* ************************************************************************ - - osparc - the simcore frontend - - https://osparc.io - - Copyright: - 2023 IT'IS Foundation, https://itis.swiss - - License: - MIT: https://opensource.org/licenses/MIT - - Authors: - * Odei Maiz (odeimaiz) - -************************************************************************ */ - -qx.Class.define("osparc.po.MessageTemplates", { - extend: osparc.po.BaseView, - - members: { - __messageTemplates: null, - - _buildLayout: function() { - const params = { - url: { - productName: osparc.product.Utils.getProductName() - } - }; - osparc.data.Resources.fetch("productMetadata", "get", params) - .then(respData => { - this.__messageTemplates = respData["templates"]; - this.__buildLayout(); - }); - }, - - __buildLayout: function() { - this._removeAll(); - - const templatesSB = new qx.ui.form.SelectBox().set({ - allowGrowX: false - }); - this._add(templatesSB); - - const htmlViewer = this.__htmlViewer = new osparc.editor.HtmlEditor().set({ - minHeight: 400 - }); - htmlViewer.getChildControl("cancel-button").exclude(); - const container = new qx.ui.container.Scroll(); - container.add(htmlViewer, { - flex: 1 - }); - this._add(container, { - flex: 1 - }); - - templatesSB.addListener("changeSelection", e => { - const selection = e.getData(); - if (selection.length) { - const templateId = selection[0].getModel(); - this.__populateMessage(templateId); - } - }, this); - this.__messageTemplates.forEach(template => { - const lItem = new qx.ui.form.ListItem(template.id, null, template.id); - templatesSB.add(lItem); - }); - htmlViewer.addListener("textChanged", e => { - const newTemplate = e.getData(); - const templateId = templatesSB.getSelection()[0].getModel(); - this.__saveTemplate(templateId, newTemplate); - }); - }, - - __populateMessage: function(templateId) { - const found = this.__messageTemplates.find(template => template.id === templateId); - if (found) { - this.__htmlViewer.setText(found.content); - } - }, - - __saveTemplate: function(templateId, newTemplate) { - const productName = osparc.product.Utils.getProductName(); - const params = { - url: { - productName, - templateId - }, - data: { - content: newTemplate - } - }; - osparc.data.Resources.fetch("productMetadata", "updateEmailTemplate", params) - .then(() => osparc.FlashMessenger.logAs(this.tr("Template updated"), "INFO")) - .catch(err => osparc.FlashMessenger.logError(err)) - .finally(() => this._buildLayout()); - } - } -}); diff --git a/services/static-webserver/client/source/class/osparc/po/POCenter.js b/services/static-webserver/client/source/class/osparc/po/POCenter.js index 331899482b37..8e68800b2f80 100644 --- a/services/static-webserver/client/source/class/osparc/po/POCenter.js +++ b/services/static-webserver/client/source/class/osparc/po/POCenter.js @@ -27,13 +27,10 @@ qx.Class.define("osparc.po.POCenter", { this.addWidgetToTabs(miniProfile); this.__addActiveUsersPage(); - if (osparc.utils.Utils.isDevelopmentPlatform()) { - this.__addPendingUsersPage(); - } + this.__addReviewUsersPage(); this.__addPreRegistrationPage(); this.__addInvitationsPage(); this.__addProductPage(); - this.__addMsgTemplatesPage(); }, members: { @@ -44,8 +41,8 @@ qx.Class.define("osparc.po.POCenter", { this.addTab(title, iconSrc, users); }, - __addPendingUsersPage: function() { - const title = this.tr("Pending Users"); + __addReviewUsersPage: function() { + const title = this.tr("Review Users"); const iconSrc = "@FontAwesome5Solid/user-plus/22"; const usersPending = new osparc.po.UsersPending(); this.addTab(title, iconSrc, usersPending); @@ -71,12 +68,5 @@ qx.Class.define("osparc.po.POCenter", { const productInfo = new osparc.po.ProductInfo(); this.addTab(title, iconSrc, productInfo); }, - - __addMsgTemplatesPage: function() { - const title = this.tr("Message Templates"); - const iconSrc = "@FontAwesome5Solid/envelope-open/22"; - const productInfo = new osparc.po.MessageTemplates(); - this.addTab(title, iconSrc, productInfo); - } } }); diff --git a/services/static-webserver/client/source/class/osparc/po/Users.js b/services/static-webserver/client/source/class/osparc/po/Users.js index de8164957dda..c34850e11e54 100644 --- a/services/static-webserver/client/source/class/osparc/po/Users.js +++ b/services/static-webserver/client/source/class/osparc/po/Users.js @@ -83,7 +83,7 @@ qx.Class.define("osparc.po.Users", { email: userEmail.getValue() } }; - osparc.data.Resources.fetch("poUsers", "search", params) + osparc.data.Resources.fetch("poUsers", "searchByEmail", params) .then(data => { findingStatus.setValue(data.length + this.tr(" user(s) found")); this.__populateFoundUsersLayout(data); diff --git a/services/static-webserver/client/source/class/osparc/po/UsersPending.js b/services/static-webserver/client/source/class/osparc/po/UsersPending.js index b7d704a4bbc4..f4f0c8f5accf 100644 --- a/services/static-webserver/client/source/class/osparc/po/UsersPending.js +++ b/services/static-webserver/client/source/class/osparc/po/UsersPending.js @@ -20,13 +20,21 @@ qx.Class.define("osparc.po.UsersPending", { extend: osparc.po.BaseView, statics: { - createInvitationForm: function() { + createInvitationForm: function(withEmail = false) { const form = new qx.ui.form.Form(); + if (withEmail) { + const userEmail = new qx.ui.form.TextField().set({ + required: true, + placeholder: "new.user@email.address" + }); + form.add(userEmail, qx.locale.Manager.tr("User Email"), null, "email"); + } + const extraCreditsInUsd = new qx.ui.form.Spinner().set({ minimum: 0, maximum: 1000, - value: 100 + value: osparc.product.Utils.getDefaultWelcomeCredits(), }); form.add(extraCreditsInUsd, qx.locale.Manager.tr("Welcome Credits (USD)"), null, "credits"); @@ -48,77 +56,6 @@ qx.Class.define("osparc.po.UsersPending", { return form; }, - createApproveButton: function(email) { - const button = new qx.ui.form.Button(qx.locale.Manager.tr("Approve")); - button.addListener("execute", () => { - const form = this.createInvitationForm(); - const approveBtn = new osparc.ui.form.FetchButton(qx.locale.Manager.tr("Approve")); - approveBtn.set({ - appearance: "form-button" - }); - form.addButton(approveBtn); - const layout = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)); - const invitationForm = new qx.ui.form.renderer.Single(form); - layout.add(invitationForm); - const win = osparc.ui.window.Window.popUpInWindow(layout, email, 350, 150).set({ - clickAwayClose: false, - resizable: false, - showClose: true - }); - win.open(); - approveBtn.addListener("execute", () => { - if (!osparc.data.Permissions.getInstance().canDo("user.invitation.generate", true)) { - return; - } - if (form.validate()) { - approveBtn.setFetching(true); - const params = { - data: { - email, - }, - }; - params.data["invitation"] = {}; - const extraCreditsInUsd = form.getItems()["credits"].getValue(); - if (extraCreditsInUsd > 0) { - params.data["invitation"]["extraCreditsInUsd"] = extraCreditsInUsd; - } - if (form.getItems()["withExpiration"].getValue()) { - params.data["invitation"]["trialAccountDays"] = form.getItems()["trialDays"].getValue(); - } - osparc.data.Resources.fetch("poUsers", "approveUser", params) - .then(() => { - osparc.FlashMessenger.logAs(qx.locale.Manager.tr("User approved"), "INFO"); - }) - .catch(err => osparc.FlashMessenger.logError(err)) - .finally(() => { - approveBtn.setFetching(false); - win.close(); - }); - } - }); - }); - return button; - }, - - createRejectButton: function(email) { - const button = new osparc.ui.form.FetchButton(qx.locale.Manager.tr("Reject")); - button.addListener("execute", () => { - button.setFetching(true); - const params = { - data: { - email, - }, - }; - osparc.data.Resources.fetch("poUsers", "rejectUser", params) - .then(() => { - osparc.FlashMessenger.logAs(qx.locale.Manager.tr("User denied"), "INFO"); - }) - .catch(err => osparc.FlashMessenger.logError(err)) - .finally(() => button.setFetching(false)); - }); - return button; - }, - createResendEmailButton: function(email) { const button = new osparc.ui.form.FetchButton(qx.locale.Manager.tr("Resend Email")); button.addListener("execute", () => { @@ -139,7 +76,7 @@ qx.Class.define("osparc.po.UsersPending", { }, createInfoButton: function(infoMetadata) { - const infoButton = new qx.ui.form.Button(null, "@MaterialIcons/info_outline/16"); + const infoButton = new qx.ui.form.Button(null, "@MaterialIcons/info_outline/14"); infoButton.addListener("execute", () => { const container = new qx.ui.container.Scroll(); container.add(new osparc.ui.basic.JsonTreeWidget(infoMetadata, "pendingUserInfo")); @@ -157,10 +94,7 @@ qx.Class.define("osparc.po.UsersPending", { control = new qx.ui.form.Button(this.tr("Reload")).set({ allowGrowX: false, }); - control.addListener("execute", () => { - this.getChildControl("pending-users-layout").removeAll(); - this.__populatePendingUsersLayout(); - }); + control.addListener("execute", () => this.__reload()); this._add(control); break; case "pending-users-container": @@ -171,6 +105,7 @@ qx.Class.define("osparc.po.UsersPending", { break; case "pending-users-layout": { const grid = new qx.ui.layout.Grid(15, 5); + grid.setColumnMaxWidth(2, 100); // date control = new qx.ui.container.Composite(grid); this.getChildControl("pending-users-container").add(control); break; @@ -182,7 +117,7 @@ qx.Class.define("osparc.po.UsersPending", { _buildLayout: function() { this.getChildControl("reload-button"); this.getChildControl("pending-users-container"); - + this.__addHeader(); this.__populatePendingUsersLayout(); }, @@ -216,73 +151,91 @@ qx.Class.define("osparc.po.UsersPending", { row: 0, column: 3, }); - - pendingUsersLayout.add(new qx.ui.basic.Label(this.tr("Info")).set({ - font: "text-14" - }), { - row: 0, - column: 4, - }); - - pendingUsersLayout.add(new qx.ui.basic.Label(this.tr("Action")).set({ - font: "text-14" - }), { - row: 0, - column: 5, - }); }, __addRows: function(pendingUsers) { const pendingUsersLayout = this.getChildControl("pending-users-layout"); + const grid = pendingUsersLayout.getLayout(); let row = 1; pendingUsers.forEach(pendingUser => { - pendingUsersLayout.add(new qx.ui.basic.Label(pendingUser.firstName + " " + pendingUser.lastName), { + grid.setRowAlign(row, "left", "middle"); + + const fullNameLabel = new qx.ui.basic.Label(pendingUser.firstName + " " + pendingUser.lastName).set({ + selectable: true, + }); + pendingUsersLayout.add(fullNameLabel, { row, column: 0, }); - pendingUsersLayout.add(new qx.ui.basic.Label(pendingUser.email), { + + const emailLabel = new qx.ui.basic.Label(pendingUser.email).set({ + selectable: true, + }); + pendingUsersLayout.add(emailLabel, { row, column: 1, }); - pendingUsersLayout.add(new qx.ui.basic.Label(pendingUser.date ? osparc.utils.Utils.formatDateAndTime(new Date(pendingUser.date)) : "-"), { + + let date = null; + switch (pendingUser.accountRequestStatus) { + case "PENDING": + date = pendingUser.preRegistrationCreated ? osparc.utils.Utils.formatDateAndTime(new Date(pendingUser.preRegistrationCreated)) : "-"; + break; + default: + date = pendingUser.accountRequestReviewedAt ? osparc.utils.Utils.formatDateAndTime(new Date(pendingUser.accountRequestReviewedAt)) : "-"; + break; + } + pendingUsersLayout.add(new qx.ui.basic.Label(date), { row, column: 2, }); - pendingUsersLayout.add(new qx.ui.basic.Label(pendingUser.accountRequestStatus.toLowerCase()), { + + const statusChip = new osparc.ui.basic.Chip().set({ + label: pendingUser.accountRequestStatus.toLowerCase(), + }); + statusChip.getChildControl("label").set({ + font: "text-12", + }); + pendingUsersLayout.add(statusChip, { row, column: 3, }); + const infoButton = this.self().createInfoButton(pendingUser); pendingUsersLayout.add(infoButton, { row, column: 4, }); + const buttonsLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox(5)); pendingUsersLayout.add(buttonsLayout, { row, column: 5, }); - switch (pendingUser.accountRequestStatus) { case "PENDING": { - const approveButton = this.self().createApproveButton(pendingUser.email); + statusChip.setStatusColor(osparc.ui.basic.Chip.STATUS.WARNING); + const approveButton = this.__createApproveButton(pendingUser.email); buttonsLayout.add(approveButton); - const rejectButton = this.self().createRejectButton(pendingUser.email); + const rejectButton = this.__createRejectButton(pendingUser.email); buttonsLayout.add(rejectButton); break; } case "REJECTED": { - const approveButton = this.self().createApproveButton(pendingUser.email); + statusChip.setStatusColor(osparc.ui.basic.Chip.STATUS.ERROR); + const approveButton = this.__createApproveButton(pendingUser.email); + approveButton.setEnabled(false); // avoid changing decision for now buttonsLayout.add(approveButton); break; } case "APPROVED": { - /* + statusChip.setStatusColor(osparc.ui.basic.Chip.STATUS.SUCCESS); const resendEmailButton = this.self().createResendEmailButton(pendingUser.email); + resendEmailButton.setEnabled(false); buttonsLayout.add(resendEmailButton); - */ - const rejectButton = this.self().createRejectButton(pendingUser.email); + const rejectButton = this.__createRejectButton(pendingUser.email); + rejectButton.setEnabled(false); // avoid changing decision for now buttonsLayout.add(rejectButton); break; } @@ -299,12 +252,149 @@ qx.Class.define("osparc.po.UsersPending", { .then(resps => { const pendingUsers = resps[0]; const reviewedUsers = resps[1]; - const pendingUsersLayout = this.getChildControl("pending-users-layout"); - pendingUsersLayout.removeAll(); - this.__addHeader(); + const sortByDate = (a, b) => { + let dateA = new Date(0); // default to epoch if no date is available + if (a.accountRequestStatus === "PENDING" && a.preRegistrationRequestedAt) { + dateA = new Date(a.preRegistrationRequestedAt); + } else if (a.accountRequestReviewedAt) { + dateA = new Date(a.accountRequestReviewedAt); + } + let dateB = new Date(0); // default to epoch if no date is available + if (b.accountRequestStatus === "PENDING" && b.preRegistrationRequestedAt) { + dateB = new Date(b.preRegistrationRequestedAt); + } else if (b.accountRequestReviewedAt) { + dateB = new Date(b.accountRequestReviewedAt); + } + return dateB - dateA; // sort by most recent first + }; + pendingUsers.sort(sortByDate); + reviewedUsers.sort(sortByDate); this.__addRows(pendingUsers.concat(reviewedUsers)); }) .catch(err => osparc.FlashMessenger.logError(err)); - } + }, + + __reload: function() { + this.getChildControl("pending-users-layout").removeAll(); + this.__addHeader(); + this.__populatePendingUsersLayout(); + }, + + __createApproveButton: function(email) { + const button = new qx.ui.form.Button(qx.locale.Manager.tr("Approve")); + button.addListener("execute", () => { + const form = this.self().createInvitationForm(false); + const approveBtn = new osparc.ui.form.FetchButton(qx.locale.Manager.tr("Approve")); + approveBtn.set({ + appearance: "form-button" + }); + form.addButton(approveBtn); + const layout = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)); + const invitationForm = new qx.ui.form.renderer.Single(form); + layout.add(invitationForm); + const win = osparc.ui.window.Window.popUpInWindow(layout, email, 350, 150).set({ + clickAwayClose: false, + resizable: false, + showClose: true + }); + win.open(); + approveBtn.addListener("execute", () => { + if (osparc.data.Permissions.getInstance().canDo("user.invitation.generate", true)) { + if (form.validate()) { + const extraCreditsInUsd = form.getItems()["credits"].getValue(); + let trialAccountDays = 0; + if (form.getItems()["withExpiration"].getValue()) { + trialAccountDays = form.getItems()["trialDays"].getValue(); + } + + let msg = `Are you sure you want to approve ${email}`; + if (extraCreditsInUsd) { + msg += ` with ${extraCreditsInUsd}$ worth credits`; + } + if (trialAccountDays > 0) { + msg += ` and ${trialAccountDays} days of trial`; + } + msg += "?"; + const confWin = new osparc.ui.window.Confirmation(msg).set({ + caption: "Approve User", + confirmText: "Approve", + confirmAction: "create" + }); + confWin.center(); + confWin.open(); + confWin.addListener("close", () => { + if (confWin.getConfirmed()) { + approveBtn.setFetching(true); + this.__approveUser(email, form) + .then(() => { + osparc.FlashMessenger.logAs("User approved", "INFO"); + this.__reload(); + }) + .catch(err => osparc.FlashMessenger.logError(err)) + .finally(() => { + approveBtn.setFetching(false); + win.close(); + }); + } + }); + } + } + }); + }); + return button; + }, + + __createRejectButton: function(email) { + const button = new osparc.ui.form.FetchButton("Reject"); + button.addListener("execute", () => { + const msg = `Are you sure you want to reject ${email}.
The operation cannot be reverted`; + const win = new osparc.ui.window.Confirmation(msg).set({ + caption: "Reject User", + confirmText: "Reject", + confirmAction: "delete", + }); + win.center(); + win.open(); + win.addListener("close", () => { + if (win.getConfirmed()) { + button.setFetching(true); + this.__rejectUser(email) + .then(() => { + osparc.FlashMessenger.logAs(qx.locale.Manager.tr("User denied"), "INFO"); + this.__reload(); + }) + .catch(err => osparc.FlashMessenger.logError(err)) + .finally(() => button.setFetching(false)); + } + }); + }); + return button; + }, + + __approveUser: function(email, form) { + const params = { + data: { + email, + }, + }; + params.data["invitation"] = {}; + const extraCreditsInUsd = form.getItems()["credits"].getValue(); + if (extraCreditsInUsd > 0) { + params.data["invitation"]["extraCreditsInUsd"] = extraCreditsInUsd; + } + if (form.getItems()["withExpiration"].getValue()) { + params.data["invitation"]["trialAccountDays"] = form.getItems()["trialDays"].getValue(); + } + return osparc.data.Resources.fetch("poUsers", "approveUser", params); + }, + + __rejectUser: function(email) { + const params = { + data: { + email, + }, + }; + return osparc.data.Resources.fetch("poUsers", "rejectUser", params); + }, } }); diff --git a/services/static-webserver/client/source/class/osparc/product/AboutProduct.js b/services/static-webserver/client/source/class/osparc/product/AboutProduct.js index 2736f96425a3..c08d1951da88 100644 --- a/services/static-webserver/client/source/class/osparc/product/AboutProduct.js +++ b/services/static-webserver/client/source/class/osparc/product/AboutProduct.js @@ -22,7 +22,7 @@ qx.Class.define("osparc.product.AboutProduct", { construct: function() { this.base(arguments, this.tr("About Product")); - const displayName = osparc.store.StaticInfo.getInstance().getDisplayName(); + const displayName = osparc.store.StaticInfo.getDisplayName(); this.setCaption(this.tr("About ") + displayName); this.set({ @@ -138,8 +138,8 @@ qx.Class.define("osparc.product.AboutProduct", { }, __getMailTo: function() { - const supportEmail = osparc.store.VendorInfo.getInstance().getSupportEmail(); - const productName = osparc.store.StaticInfo.getInstance().getDisplayName(); + const supportEmail = osparc.store.VendorInfo.getSupportEmail(); + const productName = osparc.store.StaticInfo.getDisplayName(); return osparc.store.Support.mailToLink(supportEmail, "Support " + productName, false); }, @@ -147,7 +147,7 @@ qx.Class.define("osparc.product.AboutProduct", { const copyrightLink = new osparc.ui.basic.LinkLabel().set({ font: "link-label-14" }); - const vendor = osparc.store.VendorInfo.getInstance().getVendor(); + const vendor = osparc.store.VendorInfo.getVendor(); if (vendor && "url" in vendor && "copyright" in vendor) { copyrightLink.set({ value: vendor.copyright, diff --git a/services/static-webserver/client/source/class/osparc/product/Utils.js b/services/static-webserver/client/source/class/osparc/product/Utils.js index 9f81c38f71a8..c88a4a3c8ffc 100644 --- a/services/static-webserver/client/source/class/osparc/product/Utils.js +++ b/services/static-webserver/client/source/class/osparc/product/Utils.js @@ -155,6 +155,39 @@ qx.Class.define("osparc.product.Utils", { return resourceType; }, + getInstitutionAlias: function() { + switch (osparc.product.Utils.getProductName()) { + case "s4l": + return { + label: qx.locale.Manager.tr("Company Name"), + key: "company", + required: true, + }; + case "s4lacad": + case "s4ldesktopacad": + return { + label: qx.locale.Manager.tr("University"), + key: "university", + required: true, + }; + case "tiplite": + return { + label: qx.locale.Manager.tr("University"), + key: "university", + }; + case "tis": + return { + label: qx.locale.Manager.tr("Organization"), + key: "organization", + }; + case "osparc": + return { + label: qx.locale.Manager.tr("Research Group/Organization"), + key: "organization", + }; + } + }, + getLogoPath: function(longLogo = true) { let logosPath = null; const colorManager = qx.theme.manager.Color.getInstance(); @@ -218,7 +251,7 @@ qx.Class.define("osparc.product.Utils", { getCreateAccountAction: function() { const config = osparc.store.Store.getInstance().get("config"); if (config["invitation_required"]) { - const vendor = osparc.store.VendorInfo.getInstance().getVendor(); + const vendor = osparc.store.VendorInfo.getVendor(); if (vendor["invitation_form"]) { // If invitation_required (login_settings) and invitation_form (vendor) return "REQUEST_ACCOUNT_FORM"; @@ -230,7 +263,7 @@ qx.Class.define("osparc.product.Utils", { }, // oSPARC only - hasExportCMisEnabled: function() { + showExportCMis: function() { const product = this.getProductName(); return product === "osparc"; }, @@ -264,6 +297,10 @@ qx.Class.define("osparc.product.Utils", { }, showPreferencesTokens: function() { + if (osparc.data.Permissions.getInstance().isTester()) { + return true; + } + if (this.isProduct("s4llite") || this.isProduct("tis") || this.isProduct("tiplite")) { return false; } @@ -285,7 +322,7 @@ qx.Class.define("osparc.product.Utils", { }, showTemplates: function() { - if (osparc.data.Permissions.getInstance().isTester()) { + if (osparc.data.Permissions.getInstance().isProductOwner()) { return true; } @@ -296,7 +333,7 @@ qx.Class.define("osparc.product.Utils", { }, showPublicProjects: function() { - if (osparc.data.Permissions.getInstance().isTester()) { + if (osparc.data.Permissions.getInstance().isProductOwner()) { return true; } @@ -306,23 +343,50 @@ qx.Class.define("osparc.product.Utils", { return true; }, - showQuality: function() { - if (this.isProduct("osparc")) { - return true; + showFunctions: function() { + if (!osparc.data.Permissions.getInstance().checkFunctionPermissions("readFunctions")) { + return false; } - return false; + + return [ + "osparc", + "s4l", + "s4lacad", + ].includes(osparc.product.Utils.getProductName()); + }, + + showQuality: function() { + return this.isProduct("osparc"); }, showClassifiers: function() { - if (this.getProductName().includes("s4l")) { + return this.isProduct("osparc"); + }, + + showConvertToPipeline: function() { + return this.isS4LProduct() || this.isProduct("osparc"); + }, + + showS4LStore: function() { + const licensesEnabled = osparc.utils.DisabledPlugins.isLicensesEnabled(); + return this.isS4LProduct() && licensesEnabled; + }, + + showComputationalActivity: function() { + if (this.isProduct("s4llite") || this.isProduct("tiplite")) { return false; } return true; }, - showS4LStore: function() { - const licensesEnabled = osparc.store.StaticInfo.getInstance().areLicensesEnabled(); - return this.isS4LProduct() && licensesEnabled; + getDefaultWelcomeCredits: function() { + switch (osparc.product.Utils.getProductName()) { + case "s4l": + case "s4lacad": + return 100; + default: + return 0; + } }, getIconUrl: function(asset = "Default.png") { @@ -382,5 +446,9 @@ qx.Class.define("osparc.product.Utils", { hasNewPlusButton: function() { return Boolean(osparc.store.Products.getInstance().getPlusButtonUiConfig()); }, + + groupServices: function() { + return Boolean(osparc.store.Products.getInstance().getGroupedServicesUiConfig()); + }, } }); diff --git a/services/static-webserver/client/source/class/osparc/product/quickStart/s4l/Welcome.js b/services/static-webserver/client/source/class/osparc/product/quickStart/s4l/Welcome.js index 8e4386e22586..75c6c49de496 100644 --- a/services/static-webserver/client/source/class/osparc/product/quickStart/s4l/Welcome.js +++ b/services/static-webserver/client/source/class/osparc/product/quickStart/s4l/Welcome.js @@ -55,7 +55,7 @@ qx.Class.define("osparc.product.quickStart.s4l.Welcome", { }); content.add(intro1); - const welcomeText = this.tr("Welcome onboard ") + osparc.utils.Utils.capitalize(osparc.auth.Data.getInstance().getFriendlyUsername()) + ","; + const welcomeText = this.tr("Welcome onboard ") + osparc.utils.Utils.capitalize(osparc.auth.Data.getInstance().getFriendlyUserName()) + ","; const welcome = osparc.product.quickStart.Utils.createLabel(welcomeText); content.add(welcome); @@ -63,10 +63,10 @@ qx.Class.define("osparc.product.quickStart.s4l.Welcome", { Sim4Life is a revolutionary simulation platform, combining computable human phantoms with the most powerful physics solvers and the most advanced tissue models, for directly analyzing biological real-world phenomena and complex technical devices in a validated biological and anatomical environment.\
\
\ - In order to facilitate the introduction to the platform, we have some Guided Tours that can be found under the User Menu.\ + In order to facilitate the introduction to the platform, we have some Guided Tours that can be found in the Help & Support section.\
\
\ - For more specific technical information, please refer to the Manuals on the Navigation Bar.\ + For more specific technical information, please refer to the Manuals.\ "); const intro2 = osparc.product.quickStart.Utils.createLabel(introText); content.add(intro2); diff --git a/services/static-webserver/client/source/class/osparc/product/quickStart/s4lacad/Welcome.js b/services/static-webserver/client/source/class/osparc/product/quickStart/s4lacad/Welcome.js index 49f5fa773dc6..fc3544d3c472 100644 --- a/services/static-webserver/client/source/class/osparc/product/quickStart/s4lacad/Welcome.js +++ b/services/static-webserver/client/source/class/osparc/product/quickStart/s4lacad/Welcome.js @@ -55,7 +55,7 @@ qx.Class.define("osparc.product.quickStart.s4lacad.Welcome", { }); content.add(intro1); - const welcomeText = this.tr("Welcome onboard ") + osparc.utils.Utils.capitalize(osparc.auth.Data.getInstance().getFriendlyUsername()) + ","; + const welcomeText = this.tr("Welcome onboard ") + osparc.utils.Utils.capitalize(osparc.auth.Data.getInstance().getFriendlyUserName()) + ","; const welcome = osparc.product.quickStart.Utils.createLabel(welcomeText); content.add(welcome); @@ -63,10 +63,10 @@ qx.Class.define("osparc.product.quickStart.s4lacad.Welcome", { Sim4Life is a revolutionary simulation platform, combining computable human phantoms with the most powerful physics solvers and the most advanced tissue models, for directly analyzing biological real-world phenomena and complex technical devices in a validated biological and anatomical environment.\
\
\ - In order to facilitate the introduction to the platform, we have some Guided Tours that can be found under the User Menu.\ + In order to facilitate the introduction to the platform, we have some Guided Tours that can be found in the Help & Support section.\
\
\ - For more specific technical information, please refer to the Manuals on the Navigation Bar.\ + For more specific technical information, please refer to the Manuals.\ "); const intro2 = osparc.product.quickStart.Utils.createLabel(introText); content.add(intro2); diff --git a/services/static-webserver/client/source/class/osparc/product/quickStart/s4llite/Welcome.js b/services/static-webserver/client/source/class/osparc/product/quickStart/s4llite/Welcome.js index 47fd5da29ab4..4312704419a6 100644 --- a/services/static-webserver/client/source/class/osparc/product/quickStart/s4llite/Welcome.js +++ b/services/static-webserver/client/source/class/osparc/product/quickStart/s4llite/Welcome.js @@ -25,7 +25,7 @@ qx.Class.define("osparc.product.quickStart.s4llite.Welcome", { members: { _populateCard: function() { - const welcomeText = this.tr("Welcome onboard ") + osparc.utils.Utils.capitalize(osparc.auth.Data.getInstance().getFriendlyUsername()) + ","; + const welcomeText = this.tr("Welcome onboard ") + osparc.utils.Utils.capitalize(osparc.auth.Data.getInstance().getFriendlyUserName()) + ","; const welcome = osparc.product.quickStart.Utils.createLabel(welcomeText); this._add(welcome); diff --git a/services/static-webserver/client/source/class/osparc/product/quickStart/tis/S4LPostPro.js b/services/static-webserver/client/source/class/osparc/product/quickStart/tis/S4LPostPro.js index a667a6b4b078..b8be1c6d1ea9 100644 --- a/services/static-webserver/client/source/class/osparc/product/quickStart/tis/S4LPostPro.js +++ b/services/static-webserver/client/source/class/osparc/product/quickStart/tis/S4LPostPro.js @@ -22,7 +22,7 @@ qx.Class.define("osparc.product.quickStart.tis.S4LPostPro", { let title = ""; if (osparc.product.Utils.isProduct("tiplite")) { const notAvailableText = this.tr("Not available in"); - title += ` (${notAvailableText} ${osparc.store.StaticInfo.getInstance().getDisplayName()})

`; + title += ` (${notAvailableText} ${osparc.store.StaticInfo.getDisplayName()})

`; } title += this.tr("Sim4Life Post Processing"); this.base(arguments, title); diff --git a/services/static-webserver/client/source/class/osparc/product/quickStart/tis/Welcome.js b/services/static-webserver/client/source/class/osparc/product/quickStart/tis/Welcome.js index 25f3c6444aa1..afbff06ab41e 100644 --- a/services/static-webserver/client/source/class/osparc/product/quickStart/tis/Welcome.js +++ b/services/static-webserver/client/source/class/osparc/product/quickStart/tis/Welcome.js @@ -25,7 +25,7 @@ qx.Class.define("osparc.product.quickStart.tis.Welcome", { members: { _populateCard: function() { - const welcomeText = this.tr("Welcome onboard ") + osparc.utils.Utils.capitalize(osparc.auth.Data.getInstance().getFriendlyUsername()) + ","; + const welcomeText = this.tr("Welcome onboard ") + osparc.utils.Utils.capitalize(osparc.auth.Data.getInstance().getFriendlyUserName()) + ","; const welcome = osparc.product.quickStart.Utils.createLabel(welcomeText); this._add(welcome); diff --git a/services/static-webserver/client/source/class/osparc/service/PricingUnitsList.js b/services/static-webserver/client/source/class/osparc/service/PricingUnitsList.js index 9850b0984552..4149a6adc086 100644 --- a/services/static-webserver/client/source/class/osparc/service/PricingUnitsList.js +++ b/services/static-webserver/client/source/class/osparc/service/PricingUnitsList.js @@ -47,7 +47,7 @@ qx.Class.define("osparc.service.PricingUnitsList", { }, __fetchUnits: function() { - osparc.store.Pricing.getInstance().fetchPricingPlansService(this.__serviceMetadata["key"], this.__serviceMetadata["version"]) + osparc.store.Services.getPricingPlan(this.__serviceMetadata["key"], this.__serviceMetadata["version"]) .then(data => this.__populateList(data["pricingUnits"])) .catch(err => { console.error(err); diff --git a/services/static-webserver/client/source/class/osparc/service/StatusUI.js b/services/static-webserver/client/source/class/osparc/service/StatusUI.js index 2203da004c44..f7397027e794 100644 --- a/services/static-webserver/client/source/class/osparc/service/StatusUI.js +++ b/services/static-webserver/client/source/class/osparc/service/StatusUI.js @@ -192,18 +192,19 @@ qx.Class.define("osparc.service.StatusUI", { // ports case "modified": - return "busy-orange"; + return "failed-red"; case "up-to-date": return "ready-green"; // output case "busy": - case "out-of-date": return "busy-orange"; - /* + case "not-available": + return "workbench-edge"; + case "out-of-date": + return "failed-red"; case "up-to-date": return "ready-green"; - */ default: return "text"; @@ -237,9 +238,7 @@ qx.Class.define("osparc.service.StatusUI", { const chip = new osparc.ui.basic.Chip().set({ label: osparc.service.Utils.DEPRECATED_SERVICE_TEXT, icon: osparc.service.StatusUI.getIconSource("deprecated"), - textColor: "text-on-warning", - backgroundColor: osparc.service.StatusUI.getColor("deprecated"), - allowGrowX: false + statusColor: "warning", }); return chip; }, @@ -248,9 +247,7 @@ qx.Class.define("osparc.service.StatusUI", { const chip = new osparc.ui.basic.Chip().set({ label: osparc.service.Utils.RETIRED_SERVICE_TEXT, icon: osparc.service.StatusUI.getIconSource("retired"), - textColor: "text-on-warning", - backgroundColor: osparc.service.StatusUI.getColor("retired"), - allowGrowX: false + statusColor: "error", }); return chip; } diff --git a/services/static-webserver/client/source/class/osparc/service/Utils.js b/services/static-webserver/client/source/class/osparc/service/Utils.js index 82d7ec021b97..dfa512fdf51d 100644 --- a/services/static-webserver/client/source/class/osparc/service/Utils.js +++ b/services/static-webserver/client/source/class/osparc/service/Utils.js @@ -140,13 +140,6 @@ qx.Class.define("osparc.service.Utils", { return ""; }, - canIWrite: function(serviceAccessRights) { - const groupsStore = osparc.store.Groups.getInstance(); - const orgIDs = groupsStore.getOrganizationIds(); - orgIDs.push(groupsStore.getMyGroupId()); - return osparc.share.CollaboratorsService.canGroupsWrite(serviceAccessRights, orgIDs); - }, - DEPRECATED_SERVICE_TEXT: qx.locale.Manager.tr("Service deprecated"), DEPRECATED_DYNAMIC_INSTRUCTIONS: qx.locale.Manager.tr("Please go back to the dashboard and Update the Service or download its data and upload it to an updated version"), DEPRECATED_COMPUTATIONAL_INSTRUCTIONS: qx.locale.Manager.tr("Please instantiate an updated version"), @@ -156,7 +149,7 @@ qx.Class.define("osparc.service.Utils", { DEPRECATED_AUTOUPDATABLE_INSTRUCTIONS: qx.locale.Manager.tr("Please Stop the Service and then Update it"), RETIRED_AUTOUPDATABLE_INSTRUCTIONS: qx.locale.Manager.tr("Please Update the Service"), - extractVersionFromHistory: function(metadata) { + getHistoryEntry: function(metadata) { if (metadata["history"]) { const found = metadata["history"].find(historyEntry => historyEntry["version"] === metadata["version"]); return found; @@ -164,8 +157,16 @@ qx.Class.define("osparc.service.Utils", { return null; }, + extractReleasedDateFromHistory: function(metadata) { + const historyEntry = this.getHistoryEntry(metadata); + if (historyEntry && historyEntry["released"]) { + return historyEntry["released"]; + } + return null; + }, + isUpdatable: function(metadata) { - const historyEntry = this.extractVersionFromHistory(metadata); + const historyEntry = this.getHistoryEntry(metadata); if (historyEntry && historyEntry["compatibility"] && historyEntry["compatibility"]["canUpdateTo"]) { const latestCompatible = historyEntry["compatibility"]["canUpdateTo"]; return latestCompatible && (metadata["key"] !== latestCompatible["key"] || metadata["version"] !== latestCompatible["version"]); @@ -182,7 +183,7 @@ qx.Class.define("osparc.service.Utils", { // this works for service latest return new Date(metadata["release"]["retired"]); } - const historyEntry = this.extractVersionFromHistory(metadata); + const historyEntry = this.getHistoryEntry(metadata); if (historyEntry && "retired" in historyEntry && historyEntry["retired"]) { return new Date(historyEntry["retired"]); } diff --git a/services/static-webserver/client/source/class/osparc/share/AddCollaborators.js b/services/static-webserver/client/source/class/osparc/share/AddCollaborators.js index 8f21a4e1958c..9dc2a7192c56 100644 --- a/services/static-webserver/client/source/class/osparc/share/AddCollaborators.js +++ b/services/static-webserver/client/source/class/osparc/share/AddCollaborators.js @@ -16,7 +16,7 @@ ************************************************************************ */ /** - * Widget that offers the "Share with..." button to add collaborators to a resource. + * Widget that offers the "Share" button to add collaborators to a resource. * It also provides the "Check Organization..." direct access. * As output, once the user select n gid in the NewCollaboratorsManager pop up window, * an event is fired with the list of collaborators. @@ -60,7 +60,9 @@ qx.Class.define("osparc.share.AddCollaborators", { this._add(control); break; case "share-with": - control = new qx.ui.form.Button(this.tr("Share with...")).set({ + control = new qx.ui.form.Button().set({ + icon: "@FontAwesome5Solid/share-alt/12", + label: this.tr("Share"), appearance: "form-button", alignX: "left", allowGrowX: false @@ -107,7 +109,7 @@ qx.Class.define("osparc.share.AddCollaborators", { message, } = e.getData(); collaboratorsManager.close(); - osparc.store.Study.sendShareEmails(this.__serializedDataCopy, selectedEmails, newAccessRights, message) + osparc.store.Study.getInstance().sendShareEmails(this.__serializedDataCopy, selectedEmails, newAccessRights, message) .then(() => osparc.FlashMessenger.logAs(this.tr("Emails sent"), "INFO")) .catch(err => osparc.FlashMessenger.logError(err)); }, this); diff --git a/services/static-webserver/client/source/class/osparc/share/Collaborators.js b/services/static-webserver/client/source/class/osparc/share/Collaborators.js index 63509d88871b..5ab5c84a5a13 100644 --- a/services/static-webserver/client/source/class/osparc/share/Collaborators.js +++ b/services/static-webserver/client/source/class/osparc/share/Collaborators.js @@ -43,6 +43,15 @@ qx.Class.define("osparc.share.Collaborators", { }, statics: { + sortProductGroupsFirst: function(a, b) { + const collabTypeOrder = osparc.store.Groups.COLLAB_TYPE_ORDER; + const indexA = collabTypeOrder.indexOf(a["collabType"]); + const indexB = collabTypeOrder.indexOf(b["collabType"]); + const posA = indexA === -1 ? Number.MAX_SAFE_INTEGER : indexA; + const posB = indexB === -1 ? Number.MAX_SAFE_INTEGER : indexB; + return posA - posB; + }, + sortByAccessRights: function(aAccessRights, bAccessRights) { if (aAccessRights["delete"] !== bAccessRights["delete"]) { return bAccessRights["delete"] - aAccessRights["delete"]; @@ -57,9 +66,16 @@ qx.Class.define("osparc.share.Collaborators", { }, sortStudyOrServiceCollabs: function(a, b) { + // product related groups first + let sorted = null; + sorted = this.self().sortProductGroupsFirst(a, b); + if (sorted !== 0) { + return sorted; + } + + // then by access rights const aAccessRights = a["accessRights"]; const bAccessRights = b["accessRights"]; - let sorted = null; if ("delete" in aAccessRights) { // studies sorted = this.self().sortByAccessRights(aAccessRights, bAccessRights); @@ -185,7 +201,7 @@ qx.Class.define("osparc.share.Collaborators", { }, __canIShare: function() { - if (this._resourceType === "study" && this._serializedDataCopy["workspaceId"]) { + if (this._serializedDataCopy["workspaceId"] && this._resourceType === "study") { // Access Rights are set at workspace level return false; } @@ -198,8 +214,11 @@ qx.Class.define("osparc.share.Collaborators", { case "hypertool": canIShare = osparc.data.model.Study.canIWrite(this._serializedDataCopy["accessRights"]); break; + case "function": + canIShare = osparc.data.model.Function.canIWrite(this._serializedDataCopy["accessRights"]); + break; case "service": - canIShare = osparc.service.Utils.canIWrite(this._serializedDataCopy["accessRights"]); + canIShare = osparc.data.model.Service.canIWrite(this._serializedDataCopy["accessRights"]); break; case "workspace": canIShare = osparc.share.CollaboratorsWorkspace.canIDelete(this._serializedDataCopy["myAccessRights"]); @@ -224,8 +243,11 @@ qx.Class.define("osparc.share.Collaborators", { case "hypertool": fullOptions = osparc.data.model.Study.canIDelete(this._serializedDataCopy["accessRights"]); break; + case "function": + fullOptions = osparc.data.model.Function.canIWrite(this._serializedDataCopy["accessRights"]); + break; case "service": - fullOptions = osparc.service.Utils.canIWrite(this._serializedDataCopy["accessRights"]); + fullOptions = osparc.data.model.Service.canIWrite(this._serializedDataCopy["accessRights"]); break; case "workspace": fullOptions = osparc.share.CollaboratorsWorkspace.canIDelete(this._serializedDataCopy["myAccessRights"]); @@ -244,17 +266,18 @@ qx.Class.define("osparc.share.Collaborators", { case "template": case "tutorial": case "hypertool": + case "tag": rolesLayout = osparc.data.Roles.createRolesStudyInfo(); break; + case "function": + rolesLayout = osparc.data.Roles.createRolesFunctionInfo(); + break; case "service": rolesLayout = osparc.data.Roles.createRolesServicesInfo(); break; case "workspace": rolesLayout = osparc.data.Roles.createRolesWorkspaceInfo(); break; - case "tag": - rolesLayout = osparc.data.Roles.createRolesStudyInfo(); - break; } return rolesLayout; }, @@ -286,10 +309,13 @@ qx.Class.define("osparc.share.Collaborators", { __createCollaboratorsListSection: function() { const vBox = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)); - const header = new qx.ui.container.Composite(new qx.ui.layout.HBox()); + const header = new qx.ui.container.Composite(new qx.ui.layout.HBox(5)); - const label = new qx.ui.basic.Label(this.tr("Shared with")); - label.set({allowGrowX: true}); + const label = new qx.ui.basic.Label(this.tr("Shared with:")); + label.set({ + allowGrowX: true, + alignY: "middle", + }); header.add(label, { flex: 1 }); @@ -306,7 +332,8 @@ qx.Class.define("osparc.share.Collaborators", { decorator: "no-border", spacing: 3, width: 150, - padding: 0 + padding: 0, + backgroundColor: "transparent", }); const collaboratorsModel = this.__collaboratorsModel = new qx.data.Array(); @@ -323,12 +350,13 @@ qx.Class.define("osparc.share.Collaborators", { ctrl.bindProperty("resourceType", "resourceType", null, item, id); // Resource type ctrl.bindProperty("accessRights", "accessRights", null, item, id); ctrl.bindProperty("showOptions", "showOptions", null, item, id); + // handle separator + ctrl.bindProperty("isSeparator", "enabled", { + converter: val => !val // disable clicks on separator + }, item, id); }, configureItem: item => { - item.getChildControl("thumbnail").getContentElement() - .setStyles({ - "border-radius": "16px" - }); + item.getChildControl("thumbnail").setDecorator("circled"); item.addListener("promoteToEditor", e => { const orgMember = e.getData(); this._promoteToEditor(orgMember, item); @@ -348,7 +376,12 @@ qx.Class.define("osparc.share.Collaborators", { item.addListener("removeMember", e => { const orgMember = e.getData(); if ( - ["study", "template", "tutorial", "hypertool"].includes(this._resourceType) && + [ + "study", + "template", + "tutorial", + "hypertool", + ].includes(this._resourceType) && !osparc.share.CollaboratorsStudy.canCollaboratorBeRemoved(this._serializedDataCopy, orgMember["gid"]) ) { let msg = this.tr("Collaborator can't be removed:"); @@ -364,6 +397,15 @@ qx.Class.define("osparc.share.Collaborators", { } this._deleteMember(orgMember, item); }); + item.addListener("changeEnabled", e => { + if (!e.getData()) { + item.set({ + minHeight: 1, + maxHeight: 1, + decorator: "separator-strong", + }); + } + }); } }); vBox.add(collaboratorsUIList, { @@ -376,7 +418,12 @@ qx.Class.define("osparc.share.Collaborators", { __getLeaveStudyButton: function() { const myGid = osparc.auth.Data.getInstance().getGroupId(); if ( - ["study", "template", "tutorial", "hypertool"].includes(this._resourceType) && + [ + "study", + "template", + "tutorial", + "hypertool", + ].includes(this._resourceType) && osparc.share.CollaboratorsStudy.canCollaboratorBeRemoved(this._serializedDataCopy, myGid) ) { const leaveText = this.tr("Leave") + " " + osparc.product.Utils.getStudyAlias({ @@ -413,16 +460,14 @@ qx.Class.define("osparc.share.Collaborators", { // reload list this.__collaboratorsModel.removeAll(); + const usersStore = osparc.store.Users.getInstance(); const groupsStore = osparc.store.Groups.getInstance(); - const everyoneGIds = [ - groupsStore.getEveryoneProductGroup().getGroupId(), - groupsStore.getEveryoneGroup().getGroupId() - ]; + const everyoneGroupIds = groupsStore.getEveryoneGroupIds(); + const supportGroup = groupsStore.getSupportGroup(); + const allGroups = groupsStore.getAllGroups(); + const showOptions = this.__canIChangePermissions(); const accessRights = this._serializedDataCopy["accessRights"]; const collaboratorsList = []; - const showOptions = this.__canIChangePermissions(); - const allGroups = groupsStore.getAllGroups(); - const usersStore = osparc.store.Users.getInstance(); for (let i=0; i this.__collaboratorsModel.append(qx.data.marshal.Json.createModel(c))); + + // insert a separator between product and non-product groups + const productGroup = [ + osparc.store.Groups.COLLAB_TYPE.EVERYONE, + osparc.store.Groups.COLLAB_TYPE.SUPPORT, + ]; + const hasProductGroup = collaboratorsList.some(c => productGroup.includes(c.collabType)); + const hasNonProductGroup = collaboratorsList.some(c => !productGroup.includes(c.collabType)); + let separatorInserted = false; + collaboratorsList.forEach(c => { + const isProductGroup = productGroup.includes(c.collabType); + // Only insert separator if both sides exist + if (!isProductGroup && hasProductGroup && hasNonProductGroup && !separatorInserted) { + const separator = { + isSeparator: true + }; + this.__collaboratorsModel.append(qx.data.marshal.Json.createModel(separator)); + separatorInserted = true; + } + this.__collaboratorsModel.append(qx.data.marshal.Json.createModel(c)); + }); }, _addEditors: function(gids) { diff --git a/services/static-webserver/client/source/class/osparc/share/CollaboratorsFunction.js b/services/static-webserver/client/source/class/osparc/share/CollaboratorsFunction.js new file mode 100644 index 000000000000..fe6a1222923f --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/share/CollaboratorsFunction.js @@ -0,0 +1,158 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + + +qx.Class.define("osparc.share.CollaboratorsFunction", { + extend: osparc.share.Collaborators, + + /** + * @param functionData {Object} Object containing the serialized function Data + */ + construct: function(functionData) { + this._resourceType = "function"; + const functionDataCopy = osparc.utils.Utils.deepCloneObject(functionData); + + this.base(arguments, functionDataCopy); + }, + + statics: { + canGroupsWrite: function(accessRights, gIds) { + let canWrite = false; + for (let i=0; i { + newCollaborators[gid] = newAccessRights; + }); + osparc.store.Functions.addCollaborators(this._serializedDataCopy, newCollaborators) + .then(() => { + const text = resourceAlias + this.tr(" successfully shared"); + osparc.FlashMessenger.logAs(text); + this.fireDataEvent("updateAccessRights", this._serializedDataCopy); + this._reloadCollaboratorsList(); + }) + .catch(err => osparc.FlashMessenger.logError(err, this.tr("Something went wrong while sharing the ") + resourceAlias)); + }, + + _deleteMember: function(collaborator, item) { + if (item) { + item.setEnabled(false); + } + + return osparc.store.Functions.removeCollaborator(this._serializedDataCopy, collaborator["gid"]) + .then(() => { + this.fireDataEvent("updateAccessRights", this._serializedDataCopy); + osparc.FlashMessenger.logAs(collaborator["name"] + this.tr(" successfully removed")); + this._reloadCollaboratorsList(); + }) + .catch(err => osparc.FlashMessenger.logError(err, this.tr("Something went wrong while removing ") + collaborator["name"])) + .finally(() => { + if (item) { + item.setEnabled(true); + } + }); + }, + + __make: function(collaboratorGId, newAccessRights, successMsg, failureMsg, item) { + item.setEnabled(false); + + osparc.store.Functions.updateCollaborator(this._serializedDataCopy, collaboratorGId, newAccessRights) + .then(() => { + this.fireDataEvent("updateAccessRights", this._serializedDataCopy); + osparc.FlashMessenger.logAs(successMsg); + this._reloadCollaboratorsList(); + }) + .catch(err => osparc.FlashMessenger.logError(err, failureMsg)) + .finally(() => { + if (item) { + item.setEnabled(true); + } + }); + }, + + _promoteToEditor: function(collaborator, item) { + const writeAccessRole = osparc.data.Roles.FUNCTION["write"]; + this.__make( + collaborator["gid"], + writeAccessRole.accessRights, + this.tr(`Successfully promoted to ${writeAccessRole.label}`), + this.tr(`Something went wrong while promoting to ${writeAccessRole.label}`), + item + ); + }, + + _promoteToOwner: function(collaborator, item) { + osparc.FlashMessenger.logAs(this.tr("Operation not available"), "WARNING"); + }, + + _demoteToUser: async function(collaborator, item) { + const readAccessRole = osparc.data.Roles.FUNCTION["read"]; + const groupId = collaborator["gid"]; + const demoteToUser = (gid, itm) => { + this.__make( + gid, + readAccessRole.accessRights, + this.tr(`Successfully demoted to ${readAccessRole.label}`), + this.tr(`Something went wrong while demoting to ${readAccessRole.label}`), + itm + ); + }; + + const organization = osparc.store.Groups.getInstance().getOrganization(groupId); + if (organization) { + const msg = this.tr(`Demoting to ${readAccessRole.label} will remove write access to all the members of the Organization. Are you sure?`); + const win = new osparc.ui.window.Confirmation(msg).set({ + caption: this.tr("Demote"), + confirmAction: "delete", + confirmText: this.tr("Yes") + }); + win.center(); + win.open(); + win.addListener("close", () => { + if (win.getConfirmed()) { + demoteToUser(groupId, item); + } + }, this); + } else { + demoteToUser(groupId, item); + } + }, + + _demoteToEditor: function(collaborator, item) { + osparc.FlashMessenger.logAs(this.tr("Operation not available"), "WARNING"); + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/share/CollaboratorsService.js b/services/static-webserver/client/source/class/osparc/share/CollaboratorsService.js index cb0b848a92f5..02d40a0f5ce2 100644 --- a/services/static-webserver/client/source/class/osparc/share/CollaboratorsService.js +++ b/services/static-webserver/client/source/class/osparc/share/CollaboratorsService.js @@ -54,6 +54,7 @@ qx.Class.define("osparc.share.CollaboratorsService", { return; } + // default access rights const readAccessRole = osparc.data.Roles.SERVICES["read"]; const newAccessRights = this._serializedDataCopy["accessRights"]; gids.forEach(gid => { diff --git a/services/static-webserver/client/source/class/osparc/share/CollaboratorsStudy.js b/services/static-webserver/client/source/class/osparc/share/CollaboratorsStudy.js index a595b47320df..521cd40cf624 100644 --- a/services/static-webserver/client/source/class/osparc/share/CollaboratorsStudy.js +++ b/services/static-webserver/client/source/class/osparc/share/CollaboratorsStudy.js @@ -83,9 +83,10 @@ qx.Class.define("osparc.share.CollaboratorsStudy", { return; } - const readAccessRole = osparc.data.Roles.STUDY["read"]; - const writeAccessRole = osparc.data.Roles.STUDY["write"]; if (!newAccessRights) { + // default access rights + const readAccessRole = osparc.data.Roles.STUDY["read"]; + const writeAccessRole = osparc.data.Roles.STUDY["write"]; newAccessRights = this._resourceType === "study" ? writeAccessRole.accessRights : readAccessRole.accessRights; } const resourceAlias = osparc.product.Utils.resourceTypeToAlias(this._resourceType, {firstUpperCase: true}); @@ -93,7 +94,7 @@ qx.Class.define("osparc.share.CollaboratorsStudy", { gids.forEach(gid => { newCollaborators[gid] = newAccessRights; }); - osparc.store.Study.addCollaborators(this._serializedDataCopy, newCollaborators) + osparc.store.Study.getInstance().addCollaborators(this._serializedDataCopy, newCollaborators) .then(() => { const text = resourceAlias + this.tr(" successfully shared"); osparc.FlashMessenger.logAs(text); @@ -111,7 +112,7 @@ qx.Class.define("osparc.share.CollaboratorsStudy", { item.setEnabled(false); } - return osparc.store.Study.removeCollaborator(this._serializedDataCopy, collaborator["gid"]) + return osparc.store.Study.getInstance().removeCollaborator(this._serializedDataCopy, collaborator["gid"]) .then(() => { this.fireDataEvent("updateAccessRights", this._serializedDataCopy); osparc.FlashMessenger.logAs(collaborator["name"] + this.tr(" successfully removed")); @@ -128,7 +129,7 @@ qx.Class.define("osparc.share.CollaboratorsStudy", { __make: function(collaboratorGId, newAccessRights, successMsg, failureMsg, item) { item.setEnabled(false); - osparc.store.Study.updateCollaborator(this._serializedDataCopy, collaboratorGId, newAccessRights) + osparc.store.Study.getInstance().updateCollaborator(this._serializedDataCopy, collaboratorGId, newAccessRights) .then(() => { this.fireDataEvent("updateAccessRights", this._serializedDataCopy); osparc.FlashMessenger.logAs(successMsg); @@ -217,7 +218,7 @@ qx.Class.define("osparc.share.CollaboratorsStudy", { const uid = potentialCollaborators[gid].getUserId(); switch (this._resourceType) { case "study": - osparc.notification.Notifications.postNewStudy(uid, this._serializedDataCopy["uuid"]); + osparc.notification.Notifications.pushStudyShared(uid, this._serializedDataCopy["uuid"]); break; case "template": case "tutorial": @@ -235,32 +236,7 @@ qx.Class.define("osparc.share.CollaboratorsStudy", { if (gids.length === 0) { return; } - - const promises = []; - gids.forEach(gid => { - const params = { - url: { - "studyId": this._serializedDataCopy["uuid"], - "gid": gid - } - }; - promises.push(osparc.data.Resources.fetch("studies", "checkShareePermissions", params)); - }); - Promise.all(promises) - .then(values => { - const noAccessible = values.filter(value => value["accessible"] === false); - if (noAccessible.length) { - const shareePermissions = new osparc.share.ShareePermissions(noAccessible); - const win = osparc.ui.window.Window.popUpInWindow(shareePermissions, this.tr("Sharee permissions"), 500, 500, "@FontAwesome5Solid/exclamation-triangle/14").set({ - clickAwayClose: false, - resizable: true, - showClose: true - }); - win.getChildControl("icon").set({ - textColor: "warning-yellow" - }); - } - }); + osparc.share.ShareePermissions.checkShareePermissions(this._serializedDataCopy["uuid"], gids); } } }); diff --git a/services/static-webserver/client/source/class/osparc/share/CollaboratorsTag.js b/services/static-webserver/client/source/class/osparc/share/CollaboratorsTag.js index e496db929068..eca1fbfa3e33 100644 --- a/services/static-webserver/client/source/class/osparc/share/CollaboratorsTag.js +++ b/services/static-webserver/client/source/class/osparc/share/CollaboratorsTag.js @@ -48,6 +48,7 @@ qx.Class.define("osparc.share.CollaboratorsTag", { return; } + // default access rights const readAccessRole = osparc.data.Roles.STUDY["read"]; const newCollaborators = {}; gids.forEach(gid => newCollaborators[gid] = readAccessRole.accessRights); diff --git a/services/static-webserver/client/source/class/osparc/share/CollaboratorsWorkspace.js b/services/static-webserver/client/source/class/osparc/share/CollaboratorsWorkspace.js index 87d75b011bb8..21b7543bb2ab 100644 --- a/services/static-webserver/client/source/class/osparc/share/CollaboratorsWorkspace.js +++ b/services/static-webserver/client/source/class/osparc/share/CollaboratorsWorkspace.js @@ -44,6 +44,7 @@ qx.Class.define("osparc.share.CollaboratorsWorkspace", { return; } + // default access rights const writeAccessRole = osparc.data.Roles.WORKSPACE["write"]; const newCollaborators = {}; gids.forEach(gid => newCollaborators[gid] = writeAccessRole.accessRights); diff --git a/services/static-webserver/client/source/class/osparc/share/NewCollaboratorsManager.js b/services/static-webserver/client/source/class/osparc/share/NewCollaboratorsManager.js index c8c21429d733..79ed43ca52f3 100644 --- a/services/static-webserver/client/source/class/osparc/share/NewCollaboratorsManager.js +++ b/services/static-webserver/client/source/class/osparc/share/NewCollaboratorsManager.js @@ -17,11 +17,12 @@ qx.Class.define("osparc.share.NewCollaboratorsManager", { allowMaximize: false, showMinimize: false, showMaximize: false, + resizable: true, autoDestroy: true, modal: true, - width: 350, - maxHeight: 500, - clickAwayClose: true + width: 430, + height: 500, + clickAwayClose: true, }); this.__resourceData = resourceData; @@ -34,7 +35,7 @@ qx.Class.define("osparc.share.NewCollaboratorsManager", { this.__potentialCollaborators = {}; this.__reloadPotentialCollaborators(); - this.__shareWithEmailEnabled = this.__resourceData["resourceType"] === "study"; + this.__shareWithEmailEnabled = osparc.utils.Utils.isDevelopmentPlatform() && this.__resourceData["resourceType"] === "study"; if (preselectCollaboratorGids && preselectCollaboratorGids.length) { preselectCollaboratorGids.forEach(preselectCollaboratorGid => { @@ -55,6 +56,14 @@ qx.Class.define("osparc.share.NewCollaboratorsManager", { "shareWithEmails": "qx.event.type.Data", }, + properties: { + acceptOnlyOne: { + check: "Boolean", + init: false, + event: "changeAcceptOnlyOne" + } + }, + members: { __resourceData: null, __showOrganizations: null, @@ -68,9 +77,9 @@ qx.Class.define("osparc.share.NewCollaboratorsManager", { switch (id) { case "intro-text": { let text = this.__showOrganizations ? - this.tr("Select users or organizations from the list below.") : - this.tr("Select users from the list below."); - text += this.tr("
Search them if they aren't listed."); + this.tr("Select organizations or users from the list or search by name, userName or email.") : + this.tr("Select users from the list or search by name, userName or email."); + text += "
" + this.tr("Keep in mind that users are only searchable based on the information they've chosen to make visible. To make yourself easier to find, adjust your visibility settings in My Account → Privacy."); control = new qx.ui.basic.Label().set({ value: text, rich: true, @@ -112,9 +121,7 @@ qx.Class.define("osparc.share.NewCollaboratorsManager", { break; } case "potential-collaborators-list": { - control = new qx.ui.container.Composite(new qx.ui.layout.VBox()).set({ - minHeight: 160, - }); + control = new qx.ui.container.Composite(new qx.ui.layout.VBox()); const scrollContainer = new qx.ui.container.Scroll(); scrollContainer.add(control); this.add(scrollContainer, { @@ -141,6 +148,7 @@ qx.Class.define("osparc.share.NewCollaboratorsManager", { allowGrowX: false, backgroundColor: "transparent", }); + control.getChildControl("arrow").syncAppearance(); this.getChildControl("access-rights-layout").add(control); break; case "access-rights-helper": { @@ -233,7 +241,7 @@ qx.Class.define("osparc.share.NewCollaboratorsManager", { const text = this.getChildControl("text-filter").getChildControl("textfield").getValue(); osparc.store.Users.getInstance().searchUsers(text) .then(users => { - users.forEach(user => user["collabType"] = 2); + users.forEach(user => user["collabType"] = osparc.store.Groups.COLLAB_TYPE.USER); this.__addPotentialCollaborators(users); }) .catch(err => osparc.FlashMessenger.logError(err)) @@ -276,6 +284,7 @@ qx.Class.define("osparc.share.NewCollaboratorsManager", { __collaboratorButton: function(collaborator) { const collaboratorButton = new osparc.filter.CollaboratorToggleButton(collaborator); + collaborator.button = collaboratorButton; collaboratorButton.groupId = collaborator.getGroupId(); collaboratorButton.subscribeToFilterGroup("collaboratorsManager"); @@ -298,6 +307,7 @@ qx.Class.define("osparc.share.NewCollaboratorsManager", { }; const collaborator = qx.data.marshal.Json.createModel(collaboratorData); const collaboratorButton = new osparc.filter.CollaboratorToggleButton(collaborator); + collaborator.button = collaboratorButton; collaboratorButton.setIconSrc("@FontAwesome5Solid/envelope/14"); collaboratorButton.addListener("changeValue", e => { @@ -309,6 +319,11 @@ qx.Class.define("osparc.share.NewCollaboratorsManager", { __collaboratorSelected: function(selected, collaboratorGidOrEmail, collaborator, collaboratorButton) { if (selected) { + if (this.isAcceptOnlyOne() && Object.keys(this.__selectedCollaborators).length) { + // unselect the previous collaborator + const id = Object.keys(this.__selectedCollaborators)[0]; + this.__selectedCollaborators[id].button.setValue(false); + } this.__selectedCollaborators[collaboratorGidOrEmail] = collaborator; collaboratorButton.unsubscribeToFilterGroup("collaboratorsManager"); } else if (collaborator.getGroupId() in this.__selectedCollaborators) { @@ -322,18 +337,21 @@ qx.Class.define("osparc.share.NewCollaboratorsManager", { const potentialCollaborators = Object.values(this.__potentialCollaborators).concat(foundCollaborators); const potentialCollaboratorList = this.getChildControl("potential-collaborators-list"); + // define the priority order + const collabTypeOrder = [ + osparc.store.Groups.COLLAB_TYPE.EVERYONE, + osparc.store.Groups.COLLAB_TYPE.SUPPORT, + osparc.store.Groups.COLLAB_TYPE.ORGANIZATION, + osparc.store.Groups.COLLAB_TYPE.USER + ]; // sort them first potentialCollaborators.sort((a, b) => { - if (a["collabType"] > b["collabType"]) { - return 1; - } - if (a["collabType"] < b["collabType"]) { - return -1; - } - if (a.getLabel() > b.getLabel()) { - return 1; + const typeDiff = collabTypeOrder.indexOf(a["collabType"]) - collabTypeOrder.indexOf(b["collabType"]); + if (typeDiff !== 0) { + return typeDiff; } - return -1; + // fallback: sort alphabetically by label + return a.getLabel().localeCompare(b.getLabel()); }); let existingCollabs = []; @@ -367,7 +385,7 @@ qx.Class.define("osparc.share.NewCollaboratorsManager", { return; } // maybe, do not list the organizations - if (this.__showOrganizations === false && potentialCollaborator["collabType"] !== 2) { + if (this.__showOrganizations === false && potentialCollaborator["collabType"] !== osparc.store.Groups.COLLAB_TYPE.USER) { return; } potentialCollaboratorList.add(this.__collaboratorButton(potentialCollaborator)); diff --git a/services/static-webserver/client/source/class/osparc/share/RequestServiceAccess.js b/services/static-webserver/client/source/class/osparc/share/RequestServiceAccess.js new file mode 100644 index 000000000000..c80121093d99 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/share/RequestServiceAccess.js @@ -0,0 +1,102 @@ +/* + * oSPARC - The SIMCORE frontend - https://osparc.io + * Copyright: 2025 IT'IS Foundation - https://itis.swiss + * License: MIT - https://opensource.org/licenses/MIT + * Authors: Odei Maiz (odeimaiz) + */ + +qx.Class.define("osparc.share.RequestServiceAccess", { + extend: qx.ui.core.Widget, + + construct: function(cantReadServicesData) { + this.base(arguments); + + this._setLayout(new qx.ui.layout.VBox(25)); + + this.__populateLayout(cantReadServicesData); + }, + + statics: { + openRequestAccess: function(cantReadServicesData) { + const requestServiceAccess = new osparc.share.RequestServiceAccess(cantReadServicesData); + const caption = qx.locale.Manager.tr("Request Apps Access"); + osparc.ui.window.Window.popUpInWindow(requestServiceAccess, caption, 600, 400).set({ + clickAwayClose: false, + resizable: true, + showClose: true + }); + } + }, + + members: { + __populateLayout: function(cantReadServicesData) { + const text = this.tr("In order to open the project, the following users/groups need to give you access to some apps. Please contact the app owner:"); + this._add(new qx.ui.basic.Label().set({ + value: text, + font: "text-14", + rich: true, + wrap: true + })); + + const grid = new qx.ui.layout.Grid(20, 10); + const layout = new qx.ui.container.Composite(grid); + this._add(layout); + + // Header + layout.add(new qx.ui.basic.Label(this.tr("Owner")), { + row: 0, + column: 0 + }); + layout.add(new qx.ui.basic.Label(this.tr("Email")), { + row: 0, + column: 1 + }); + layout.add(new qx.ui.basic.Label(this.tr("App")), { + row: 0, + column: 2 + }); + + // Populate the grid with the cantReadServicesData + cantReadServicesData.forEach((cantReadServiceData, idx) => { + const userGroupId = cantReadServiceData["owner"]; + if (userGroupId) { + const userName = new qx.ui.basic.Label().set({ + rich: true, + selectable: true, + }); + layout.add(userName, { + row: idx+1, + column: 0 + }); + const email = new qx.ui.basic.Label().set({ + rich: true, + selectable: true, + }); + layout.add(email, { + row: idx+1, + column: 1 + }); + const appLabel = new qx.ui.basic.Label().set({ + value: `${cantReadServiceData["key"]}:${osparc.service.Utils.extractVersionDisplay(cantReadServiceData["release"])}`, + rich: true, + selectable: true, + }); + layout.add(appLabel, { + row: idx+1, + column: 2 + }); + + osparc.store.Users.getInstance().getUser(userGroupId) + .then(user => { + userName.setValue(user ? user.getLabel() : this.tr("Unknown user")); + email.setValue(user ? user.getEmail() : "Unknown email"); + }) + .catch(() => { + userName.setValue(this.tr("Unknown user")); + email.setValue("Unknown email"); + }); + } + }); + } + } +}); diff --git a/services/static-webserver/client/source/class/osparc/share/ShareTemplateWith.js b/services/static-webserver/client/source/class/osparc/share/ShareTemplateWith.js index 5a26282b57d8..9c3ebe915e64 100644 --- a/services/static-webserver/client/source/class/osparc/share/ShareTemplateWith.js +++ b/services/static-webserver/client/source/class/osparc/share/ShareTemplateWith.js @@ -53,7 +53,10 @@ qx.Class.define("osparc.share.ShareTemplateWith", { value: this.tr("Make the template accessible to:"), font: "text-14", }); - addCollaborators.getChildControl("share-with").setLabel(this.tr("Share with...")); + addCollaborators.getChildControl("share-with").set({ + icon: "@FontAwesome5Solid/share-alt/12", + label: this.tr("Share"), + }); this._add(addCollaborators); this._add(this.__selectedCollabs); diff --git a/services/static-webserver/client/source/class/osparc/share/ShareePermissions.js b/services/static-webserver/client/source/class/osparc/share/ShareePermissions.js index bc8c92cbe991..cd516e76833f 100644 --- a/services/static-webserver/client/source/class/osparc/share/ShareePermissions.js +++ b/services/static-webserver/client/source/class/osparc/share/ShareePermissions.js @@ -5,6 +5,12 @@ * Authors: Odei Maiz (odeimaiz) */ +/** + * Data structure for showing sharee permissions. Array of objects with the following keys + * - accessible: boolean + * - gid: string // sharee group id + * - inaccessible_services: Array of objects with keys "key" and "version" + */ qx.Class.define("osparc.share.ShareePermissions", { extend: qx.ui.core.Widget, @@ -16,6 +22,37 @@ qx.Class.define("osparc.share.ShareePermissions", { this.__populateLayout(shareesData); }, + statics: { + checkShareePermissions: function(studyId, gids) { + const promises = []; + gids.forEach(gid => { + const params = { + url: { + studyId, + gid, + } + }; + promises.push(osparc.data.Resources.fetch("studies", "checkShareePermissions", params)); + }); + Promise.all(promises) + .then(shareesData => { + const inaccessibleShareesData = shareesData.filter(value => value["accessible"] === false); + if (inaccessibleShareesData.length) { + const shareePermissions = new osparc.share.ShareePermissions(inaccessibleShareesData); + const caption = qx.locale.Manager.tr("Sharee permissions"); + const win = osparc.ui.window.Window.popUpInWindow(shareePermissions, caption, 500, 500, "@FontAwesome5Solid/exclamation-triangle/14").set({ + clickAwayClose: false, + resizable: true, + showClose: true + }); + win.getChildControl("icon").set({ + textColor: "warning-yellow" + }); + } + }); + }, + }, + members: { __populateLayout: function(shareesData) { const text = this.tr("The following users/groups will not be able to open the shared study, because they don't have access to some services. Please contact the service owner(s) to give permission."); @@ -33,7 +70,7 @@ qx.Class.define("osparc.share.ShareePermissions", { this._add(layout); for (let i=0; i { - label.setValue(metadata["name"] + " : " + metadata["version"]) + .then(serviceMetadata => { + label.setValue(serviceMetadata["name"] + " : " + serviceMetadata["version"]) infoButton.addListener("execute", () => { - metadata["resourceType"] = "service"; - const resourceDetails = new osparc.dashboard.ResourceDetails(metadata); - osparc.dashboard.ResourceDetails.popUpInWindow(resourceDetails); + serviceMetadata["resourceType"] = "service"; + osparc.dashboard.ResourceDetails.popUpInWindow(serviceMetadata); }, this); }) diff --git a/services/static-webserver/client/source/class/osparc/snapshots/IterationsView.js b/services/static-webserver/client/source/class/osparc/snapshots/IterationsView.js index fd2c8eaa384e..0d643fa6a225 100644 --- a/services/static-webserver/client/source/class/osparc/snapshots/IterationsView.js +++ b/services/static-webserver/client/source/class/osparc/snapshots/IterationsView.js @@ -71,12 +71,7 @@ qx.Class.define("osparc.snapshots.IterationsView", { if (iterations.length) { const iterationPromises = []; iterations.forEach(iteration => { - const params = { - url: { - "studyId": iteration["workcopy_project_id"] - } - }; - iterationPromises.push(osparc.data.Resources.fetch("studies", "getOne", params)); + iterationPromises.push(osparc.store.Study.getInstance().getOne(iteration["workcopy_project_id"])); }); Promise.all(iterationPromises) .then(values => { @@ -138,7 +133,7 @@ qx.Class.define("osparc.snapshots.IterationsView", { const iteration = new osparc.data.model.Study(iterationData); iteration.setReadOnly(true); iteration.nodeUpdated(dataUpdate); - const iterationDataUpdated = iteration.serialize(false); + const iterationDataUpdated = iteration.serialize(); this.__iterations.splice(idx, 1, iterationDataUpdated); // update maximum once every 2" @@ -185,7 +180,7 @@ qx.Class.define("osparc.snapshots.IterationsView", { this.__iterationsSection.remove(this.__iterationsTable); } - const iterationsTable = this.__iterationsTable = new osparc.snapshots.Iterations(this.__study.serialize(false)); + const iterationsTable = this.__iterationsTable = new osparc.snapshots.Iterations(this.__study.serialize()); iterationsTable.populateTable(this.__iterations); iterationsTable.addListener("cellTap", e => { const selectedRow = e.getRow(); @@ -199,12 +194,7 @@ qx.Class.define("osparc.snapshots.IterationsView", { }, __reloadIteration: function(iterationId) { - const params = { - url: { - "studyId": iterationId - } - }; - osparc.data.Resources.fetch("studies", "getOne", params) + osparc.store.Study.getInstance().getOne(iterationId) .then(data => { const studyData = this.__study.serialize(); studyData["workbench"] = data["workbench"]; diff --git a/services/static-webserver/client/source/class/osparc/store/ConversationsProject.js b/services/static-webserver/client/source/class/osparc/store/ConversationsProject.js new file mode 100644 index 000000000000..fc513663524c --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/store/ConversationsProject.js @@ -0,0 +1,173 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2024 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.store.ConversationsProject", { + extend: qx.core.Object, + type: "singleton", + + events: { + "conversationRenamed": "qx.event.type.Data", + "conversationDeleted": "qx.event.type.Data", + }, + + statics: { + TYPES: { + PROJECT_STATIC: "PROJECT_STATIC", + PROJECT_ANNOTATION: "PROJECT_ANNOTATION", + }, + }, + + members: { + getConversations: function(studyId) { + const params = { + url: { + studyId, + offset: 0, + limit: 42, + } + }; + return osparc.data.Resources.fetch("conversationsStudies", "getConversationsPage", params) + .then(conversations => { + if (conversations.length) { + // Sort conversations by created date, oldest first (the new ones will be next to the plus button) + conversations.sort((a, b) => new Date(a["created"]) - new Date(b["created"])); + } + return conversations; + }) + .catch(err => osparc.FlashMessenger.logError(err)); + }, + + getConversation: function(studyId, conversationId) { + const params = { + url: { + studyId, + conversationId, + } + }; + return osparc.data.Resources.fetch("conversationsStudies", "getConversation", params); + }, + + postConversation: function(studyId, name = "new 1", type = osparc.store.ConversationsProject.TYPES.PROJECT_STATIC) { + const params = { + url: { + studyId, + }, + data: { + name, + type, + } + }; + return osparc.data.Resources.fetch("conversationsStudies", "postConversation", params) + .catch(err => osparc.FlashMessenger.logError(err)); + }, + + deleteConversation: function(studyId, conversationId) { + const params = { + url: { + studyId, + conversationId, + }, + }; + return osparc.data.Resources.fetch("conversationsStudies", "deleteConversation", params) + .then(() => { + this.fireDataEvent("conversationDeleted", { + studyId, + conversationId, + }) + }) + .catch(err => osparc.FlashMessenger.logError(err)); + }, + + renameConversation: function(studyId, conversationId, name) { + const params = { + url: { + studyId, + conversationId, + }, + data: { + name, + } + }; + return osparc.data.Resources.fetch("conversationsStudies", "renameConversation", params) + .then(() => { + this.fireDataEvent("conversationRenamed", { + studyId, + conversationId, + name, + }); + }) + .catch(err => osparc.FlashMessenger.logError(err)); + }, + + postMessage: function(studyId, conversationId, message) { + const params = { + url: { + studyId, + conversationId, + }, + data: { + "content": message, + "type": "MESSAGE", + } + }; + return osparc.data.Resources.fetch("conversationsStudies", "postMessage", params) + .catch(err => osparc.FlashMessenger.logError(err)); + }, + + editMessage: function(studyId, conversationId, messageId, message) { + const params = { + url: { + studyId, + conversationId, + messageId, + }, + data: { + "content": message, + }, + }; + return osparc.data.Resources.fetch("conversationsStudies", "editMessage", params) + .catch(err => osparc.FlashMessenger.logError(err)); + }, + + deleteMessage: function(message) { + const params = { + url: { + studyId: message["projectId"], + conversationId: message["conversationId"], + messageId: message["messageId"], + }, + }; + return osparc.data.Resources.fetch("conversationsStudies", "deleteMessage", params) + .catch(err => osparc.FlashMessenger.logError(err)); + }, + + notifyUser: function(studyId, conversationId, userGroupId) { + const params = { + url: { + studyId, + conversationId, + }, + data: { + "content": userGroupId.toString(), // eventually the backend will accept integers + "type": "NOTIFICATION", + } + }; + return osparc.data.Resources.fetch("conversationsStudies", "postMessage", params) + .catch(err => osparc.FlashMessenger.logError(err)); + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/store/ConversationsSupport.js b/services/static-webserver/client/source/class/osparc/store/ConversationsSupport.js new file mode 100644 index 000000000000..53a2150bbbb2 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/store/ConversationsSupport.js @@ -0,0 +1,217 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.store.ConversationsSupport", { + extend: qx.core.Object, + type: "singleton", + + construct: function() { + this.base(arguments); + + this.__conversationsCached = {}; + }, + + events: { + "conversationCreated": "qx.event.type.Data", + "conversationDeleted": "qx.event.type.Data", + }, + + statics: { + TYPES: { + SUPPORT: "SUPPORT", + }, + }, + + members: { + fetchConversations: function() { + const params = { + url: { + offset: 0, + limit: 42, + } + }; + return osparc.data.Resources.fetch("conversationsSupport", "getConversationsPage", params) + .then(conversationsData => { + const conversations = []; + if (conversationsData.length) { + // Sort conversations by created date, newest first (the new ones will be next to the plus button) + conversationsData.sort((a, b) => new Date(b["created"]) - new Date(a["created"])); + } + conversationsData.forEach(conversationData => { + const conversation = new osparc.data.model.Conversation(conversationData); + this.__addToCache(conversation); + conversations.push(conversation); + }); + return conversations; + }) + .catch(err => osparc.FlashMessenger.logError(err)); + }, + + getConversation: function(conversationId) { + if (conversationId in this.__conversationsCached) { + return Promise.resolve(this.__conversationsCached[conversationId]); + } + + const params = { + url: { + conversationId, + } + }; + return osparc.data.Resources.fetch("conversationsSupport", "getConversation", params) + .then(conversationData => { + const conversation = new osparc.data.model.Conversation(conversationData); + this.__addToCache(conversation); + return conversation; + }); + }, + + postConversation: function(extraContext = {}) { + const url = window.location.href; + extraContext["deployment"] = url; + extraContext["product"] = osparc.product.Utils.getProductName(); + const params = { + data: { + name: "null", + type: osparc.store.ConversationsSupport.TYPES.SUPPORT, + extraContext, + } + }; + return osparc.data.Resources.fetch("conversationsSupport", "postConversation", params) + .then(conversationData => { + const conversation = new osparc.data.model.Conversation(conversationData); + this.__addToCache(conversation); + this.fireDataEvent("conversationCreated", conversation); + return conversationData; + }) + .catch(err => osparc.FlashMessenger.logError(err)); + }, + + deleteConversation: function(conversationId) { + const params = { + url: { + conversationId, + }, + }; + return osparc.data.Resources.fetch("conversationsSupport", "deleteConversation", params) + .then(() => { + this.fireDataEvent("conversationDeleted", { + conversationId, + }) + }) + .catch(err => osparc.FlashMessenger.logError(err)); + }, + + renameConversation: function(conversationId, name) { + const params = { + url: { + conversationId, + }, + data: { + name, + } + }; + return osparc.data.Resources.fetch("conversationsSupport", "patchConversation", params); + }, + + patchExtraContext: function(conversationId, extraContext) { + const params = { + url: { + conversationId, + }, + data: { + extraContext, + } + }; + return osparc.data.Resources.fetch("conversationsSupport", "patchConversation", params); + }, + + fetchLastMessage: function(conversationId) { + if ( + conversationId in this.__conversationsCached && + this.__conversationsCached[conversationId].getLastMessage() + ) { + return Promise.resolve(this.__conversationsCached[conversationId].getLastMessage()); + } + + const params = { + url: { + conversationId, + offset: 0, + limit: 1, + } + }; + return osparc.data.Resources.fetch("conversationsSupport", "getMessagesPage", params) + .then(messagesData => { + if (messagesData && messagesData.length) { + const lastMessage = messagesData[0]; + this.__addMessageToConversation(conversationId, lastMessage); + return lastMessage; + } + return null; + }); + }, + + postMessage: function(conversationId, message) { + const params = { + url: { + conversationId, + }, + data: { + "content": message, + "type": "MESSAGE", + } + }; + return osparc.data.Resources.fetch("conversationsSupport", "postMessage", params) + .catch(err => osparc.FlashMessenger.logError(err)); + }, + + editMessage: function(conversationId, messageId, message) { + const params = { + url: { + conversationId, + messageId, + }, + data: { + "content": message, + }, + }; + return osparc.data.Resources.fetch("conversationsSupport", "editMessage", params) + .catch(err => osparc.FlashMessenger.logError(err)); + }, + + deleteMessage: function(message) { + const params = { + url: { + conversationId: message["conversationId"], + messageId: message["messageId"], + }, + }; + return osparc.data.Resources.fetch("conversationsSupport", "deleteMessage", params) + .catch(err => osparc.FlashMessenger.logError(err)); + }, + + __addToCache: function(conversation) { + this.__conversationsCached[conversation.getConversationId()] = conversation; + }, + + __addMessageToConversation: function(conversationId, messageData) { + if (conversationId in this.__conversationsCached) { + this.__conversationsCached[conversationId].addMessage(messageData); + } + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/store/Functions.js b/services/static-webserver/client/source/class/osparc/store/Functions.js new file mode 100644 index 000000000000..40327d733199 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/store/Functions.js @@ -0,0 +1,241 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.store.Functions", { + type: "static", + + statics: { + __createFunctionData: function(templateData, name, description, defaultInputs = {}, exposedInputs = {}, exposedOutputs = {}) { + const functionData = { + "projectId": templateData["uuid"], + "title": name, + "description": description, + "function_class": osparc.data.model.Function.FUNCTION_CLASS.PROJECT, + "inputSchema": { + "schema_class": "application/schema+json", + "schema_content": { + "type": "object", + "properties": {}, + "required": [] + } + }, + "outputSchema": { + "schema_class": "application/schema+json", + "schema_content": { + "type": "object", + "properties": {}, + "required": [] + } + }, + "defaultInputs": {}, + }; + + const parameters = osparc.study.Utils.extractFunctionableParameters(templateData["workbench"]); + parameters.forEach(parameter => { + const parameterKey = parameter["label"]; + if (exposedInputs[parameterKey]) { + const parameterMetadata = osparc.store.Services.getMetadata(parameter["key"], parameter["version"]); + if (parameterMetadata) { + const type = osparc.service.Utils.getParameterType(parameterMetadata); + functionData["inputSchema"]["schema_content"]["properties"][parameterKey] = { + "type": type, + }; + functionData["inputSchema"]["schema_content"]["required"].push(parameterKey); + } + } + if (parameterKey in defaultInputs) { + functionData["defaultInputs"][parameterKey] = defaultInputs[parameterKey]; + } + }); + + const probes = osparc.study.Utils.extractFunctionableProbes(templateData["workbench"]); + probes.forEach(probe => { + const probeLabel = probe["label"]; + if (exposedOutputs[probeLabel]) { + const probeMetadata = osparc.store.Services.getMetadata(probe["key"], probe["version"]); + if (probeMetadata) { + const type = osparc.service.Utils.getProbeType(probeMetadata); + functionData["outputSchema"]["schema_content"]["properties"][probeLabel] = { + "type": type, + }; + functionData["outputSchema"]["schema_content"]["required"].push(probeLabel); + } + } + }); + + return functionData; + }, + + registerFunction: function(templateData, name, description, defaultInputs, exposedInputs, exposedOutputs) { + const functionData = this.__createFunctionData(templateData, name, description, defaultInputs, exposedInputs, exposedOutputs); + const params = { + data: functionData, + }; + return osparc.data.Resources.fetch("functions", "create", params); + }, + + curateOrderBy: function(orderBy) { + const curatedOrderBy = JSON.parse(orderBy); + switch (curatedOrderBy.field) { + case "last_change_date": + curatedOrderBy.field = "modified_at"; + break; + case "creation_date": + curatedOrderBy.field = "created_at"; + break; + case "name": + // Backend does not currently support sorting by 'name'. + // Fallback: sort by 'modified_at' instead. + // TODO: Remove this workaround once backend supports sorting by 'name'. + curatedOrderBy.field = "modified_at"; + break; + default: + // only those three are supported + curatedOrderBy.field = "modified_at"; + } + return JSON.stringify(curatedOrderBy); + }, + + fetchFunctionsPaginated: function(params, options) { + if ("orderBy" in params["url"]) { + params["url"]["orderBy"] = this.curateOrderBy(params["url"]["orderBy"]); + } + return osparc.data.Resources.fetch("functions", "getPage", params, options) + .then(response => { + const functions = response["data"]; + functions.forEach(func => func["resourceType"] = "function"); + return response; + }); + }, + + searchFunctionsPaginated: function(params, options) { + if ("orderBy" in params["url"]) { + params["url"]["orderBy"] = this.curateOrderBy(params["url"]["orderBy"]); + } + return osparc.data.Resources.fetch("functions", "getPageSearch", params, options) + .then(response => { + const functions = response["data"]; + functions.forEach(func => func["resourceType"] = "function"); + return response; + }); + }, + + fetchFunction: function(functionId) { + const params = { + url: { + "functionId": functionId + } + }; + return osparc.data.Resources.fetch("functions", "getOne", params) + .then(func => { + func["resourceType"] = "function"; + return func; + }) + .catch(error => { + console.error("Error fetching function:", error); + throw error; // Rethrow the error to propagate it to the caller + }); + }, + + patchFunction: function(functionId, functionChanges) { + const params = { + url: { + functionId + }, + data: functionChanges + }; + return osparc.data.Resources.fetch("functions", "patch", params) + .catch(error => { + console.error("Error patching function:", error); + throw error; // Rethrow the error to propagate it to the caller + }); + }, + + deleteFunction: function(functionId, force = false) { + const params = { + url: { + functionId, + force, + } + }; + return osparc.data.Resources.fetch("functions", "delete", params) + .catch(error => { + console.error("Error deleting function:", error); + throw error; // Rethrow the error to propagate it to the caller + }); + }, + + __putCollaborator: function(functionData, gid, newPermissions) { + const params = { + url: { + "functionId": functionData["uuid"], + "gId": gid, + }, + data: newPermissions + }; + return osparc.data.Resources.fetch("functions", "putAccessRights", params) + }, + + addCollaborators: function(functionData, newCollaborators) { + const promises = []; + Object.keys(newCollaborators).forEach(gid => { + promises.push(this.__putCollaborator(functionData, gid, newCollaborators[gid])); + }); + return Promise.all(promises) + .then(() => { + Object.keys(newCollaborators).forEach(gid => { + functionData["accessRights"][gid] = newCollaborators[gid]; + }); + functionData["lastChangeDate"] = new Date().toISOString(); + }) + .catch(err => { + osparc.FlashMessenger.logError(err); + throw err; + }); + }, + + updateCollaborator: function(functionData, gid, newPermissions) { + return this.__putCollaborator(functionData, gid, newPermissions) + .then(() => { + functionData["accessRights"][gid] = newPermissions; + functionData["lastChangeDate"] = new Date().toISOString(); + }) + .catch(err => { + osparc.FlashMessenger.logError(err); + throw err; + }); + }, + + removeCollaborator: function(functionData, gid) { + const params = { + url: { + "functionId": functionData["uuid"], + "gId": gid + } + }; + return osparc.data.Resources.fetch("functions", "deleteAccessRights", params) + .then(() => { + delete functionData["accessRights"][gid]; + functionData["lastChangeDate"] = new Date().toISOString(); + }) + .catch(err => { + osparc.FlashMessenger.logError(err); + throw err; + }); + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/store/Groups.js b/services/static-webserver/client/source/class/osparc/store/Groups.js index e367f0e9cbb3..a91598e6378d 100644 --- a/services/static-webserver/client/source/class/osparc/store/Groups.js +++ b/services/static-webserver/client/source/class/osparc/store/Groups.js @@ -28,17 +28,25 @@ qx.Class.define("osparc.store.Groups", { properties: { everyoneGroup: { check: "osparc.data.model.Group", - init: {} + init: null // this will stay null for guest users }, everyoneProductGroup: { check: "osparc.data.model.Group", - init: {} + init: null // this will stay null for guest users + }, + + supportGroup: { + check: "osparc.data.model.Group", + init: null, // this will stay null for guest users + nullable: true, + event: "changeSupportGroup", }, organizations: { check: "Object", - init: {} + init: {}, + event: "organizationsChanged", }, groupMe: { @@ -47,20 +55,20 @@ qx.Class.define("osparc.store.Groups", { }, }, - events: { - "groupAdded": "qx.event.type.Data", - "groupRemoved": "qx.event.type.Data", - }, - statics: { - curateOrderBy: function(orderBy) { - const curatedOrderBy = osparc.utils.Utils.deepCloneObject(orderBy); - if (curatedOrderBy.field !== "name") { - // only "modified_at" and "name" supported - curatedOrderBy.field = "modified_at"; - } - return curatedOrderBy; + COLLAB_TYPE: { + EVERYONE: "everyone", + SUPPORT: "support", + ORGANIZATION: "organization", + USER: "user", }, + + COLLAB_TYPE_ORDER: [ + "everyone", // osparc.store.Groups.COLLAB_TYPE.EVERYONE + "support", // osparc.store.Groups.COLLAB_TYPE.SUPPORT, + "organization", // osparc.store.Groups.COLLAB_TYPE.ORGANIZATION + "user", // osparc.store.Groups.COLLAB_TYPE.USER + ], }, members: { @@ -77,21 +85,37 @@ qx.Class.define("osparc.store.Groups", { .then(resp => { const everyoneGroup = this.__addToGroupsCache(resp["all"], "everyone"); const productEveryoneGroup = this.__addToGroupsCache(resp["product"], "productEveryone"); + let supportGroup = null; + if ("support" in resp && resp["support"]) { + resp["support"]["accessRights"] = { + "read": false, + "write": false, + "delete": false, + }; + supportGroup = this.__addToGroupsCache(resp["support"], "support"); + } const groupMe = this.__addToGroupsCache(resp["me"], "me"); const orgs = {}; resp["organizations"].forEach(organization => { + if (supportGroup && supportGroup.getGroupId() === organization["gid"]) { + // support group was already added to the cache, but it was missing the accessRights + // the accessRights come from the organization, update them + supportGroup.setAccessRights(organization["accessRights"]); + } const org = this.__addToGroupsCache(organization, "organization"); orgs[org.getGroupId()] = org; }); this.setEveryoneGroup(everyoneGroup); this.setEveryoneProductGroup(productEveryoneGroup); + this.setSupportGroup(supportGroup); this.setOrganizations(orgs); this.setGroupMe(groupMe); const myAuthData = osparc.auth.Data.getInstance(); + const description = osparc.data.model.User.userDataToDescription(myAuthData.getFirstName(), myAuthData.getLastName(), myAuthData.getEmail()); groupMe.set({ - label: myAuthData.getUsername(), - description: `${myAuthData.getFirstName()} ${myAuthData.getLastName()} - ${myAuthData.getEmail()}`, - thumbnail: osparc.utils.Avatar.emailToThumbnail(myAuthData.getEmail(), myAuthData.getUsername()), + label: myAuthData.getUserName(), + description, + thumbnail: myAuthData.getAvatar(32), }) return orgs; }); @@ -135,10 +159,19 @@ qx.Class.define("osparc.store.Groups", { const allGroupsAndUsers = {}; const groupEveryone = this.getEveryoneGroup(); - allGroupsAndUsers[groupEveryone.getGroupId()] = groupEveryone; + if (groupEveryone) { + allGroupsAndUsers[groupEveryone.getGroupId()] = groupEveryone; + } const groupProductEveryone = this.getEveryoneProductGroup(); - allGroupsAndUsers[groupProductEveryone.getGroupId()] = groupProductEveryone; + if (groupProductEveryone) { + allGroupsAndUsers[groupProductEveryone.getGroupId()] = groupProductEveryone; + } + + const supportGroup = this.getSupportGroup(); + if (supportGroup) { + allGroupsAndUsers[supportGroup.getGroupId()] = supportGroup; + } const groupMe = this.getGroupMe(); allGroupsAndUsers[groupMe.getGroupId()] = groupMe; @@ -158,32 +191,82 @@ qx.Class.define("osparc.store.Groups", { return Object.keys(this.getOrganizations()); }, + getAllMyGroupIds: function() { + const allMyGroupIds = [ + this.getMyGroupId(), + ...this.getOrganizationIds().map(gId => parseInt(gId)) + ]; + if (this.getEveryoneGroup()) { + allMyGroupIds.push(this.getEveryoneGroup().getGroupId()); + } + return allMyGroupIds; + }, + + getEveryoneGroupIds: function() { + const everyoneGroupIds = this.getEveryoneGroups().map(g => g.getGroupId()); + return everyoneGroupIds; + }, + + getEveryoneGroups: function() { + const everyoneGroups = []; + if (this.getEveryoneProductGroup()) { + everyoneGroups.push(this.getEveryoneProductGroup()); + } + if (this.getEveryoneGroup()) { + everyoneGroups.push(this.getEveryoneGroup()); + } + return everyoneGroups; + }, + + isSupportEnabled: function() { + return Boolean(this.getSupportGroup()); + }, + + amIASupportUser: function() { + const supportGroup = this.getSupportGroup(); + if (supportGroup) { + const myOrgIds = this.getOrganizationIds().map(gId => parseInt(gId)); + return myOrgIds.includes(supportGroup.getGroupId()); + } + return false; + }, + getGroup: function(groupId) { const groups = []; const groupMe = this.getGroupMe(); - groupMe["collabType"] = 2; + groupMe["collabType"] = osparc.store.Groups.COLLAB_TYPE.USER; groups.push(groupMe); const usersStore = osparc.store.Users.getInstance(); const users = usersStore.getUsers(); users.forEach(user => { - user["collabType"] = 2; + user["collabType"] = osparc.store.Groups.COLLAB_TYPE.USER; groups.push(user); }); Object.values(this.getOrganizations()).forEach(org => { - org["collabType"] = 1; + org["collabType"] = osparc.store.Groups.COLLAB_TYPE.ORGANIZATION; groups.push(org); }); + const supportGroup = this.getSupportGroup(); + if (supportGroup && groups.findIndex(g => g.getGroupId() === supportGroup.getGroupId()) === -1) { + supportGroup["collabType"] = osparc.store.Groups.COLLAB_TYPE.SUPPORT; + groups.push(supportGroup); + } + const groupProductEveryone = this.getEveryoneProductGroup(); - groupProductEveryone["collabType"] = 0; - groups.push(groupProductEveryone); + if (groupProductEveryone) { + groupProductEveryone["collabType"] = osparc.store.Groups.COLLAB_TYPE.EVERYONE; + groups.push(groupProductEveryone); + } const groupEveryone = this.getEveryoneGroup(); - groupEveryone["collabType"] = 0; - groups.push(groupEveryone); + if (groupEveryone) { + groupEveryone["collabType"] = osparc.store.Groups.COLLAB_TYPE.EVERYONE; + groups.push(groupEveryone); + } const idx = groups.findIndex(group => group.getGroupId() === parseInt(groupId)); if (idx > -1) { return groups[idx]; @@ -194,10 +277,11 @@ qx.Class.define("osparc.store.Groups", { getPotentialCollaborators: function(includeMe = false, includeProductEveryone = false) { const potentialCollaborators = {}; const orgs = this.getOrganizations(); + const supportGroup = this.getSupportGroup(); const productEveryone = this.getEveryoneProductGroup(); if (includeProductEveryone && productEveryone) { - productEveryone["collabType"] = 0; + productEveryone["collabType"] = osparc.store.Groups.COLLAB_TYPE.EVERYONE; potentialCollaborators[productEveryone.getGroupId()] = productEveryone; } @@ -208,21 +292,26 @@ qx.Class.define("osparc.store.Groups", { if (org.getGroupId() === productEveryone.getGroupId() && !includeProductEveryone) { return; } - org["collabType"] = 1; + org["collabType"] = osparc.store.Groups.COLLAB_TYPE.ORGANIZATION; potentialCollaborators[org.getGroupId()] = org; } }); + if (supportGroup && !(supportGroup.getGroupId() in potentialCollaborators)) { + supportGroup["collabType"] = osparc.store.Groups.COLLAB_TYPE.SUPPORT; + potentialCollaborators[supportGroup.getGroupId()] = supportGroup; + } + if (includeMe) { const myGroup = this.getGroupMe(); - myGroup["collabType"] = 2; + myGroup["collabType"] = osparc.store.Groups.COLLAB_TYPE.USER; potentialCollaborators[myGroup.getGroupId()] = myGroup; } const usersStore = osparc.store.Users.getInstance(); const users = usersStore.getUsers(); users.forEach(user => { - user["collabType"] = 2; + user["collabType"] = osparc.store.Groups.COLLAB_TYPE.USER; potentialCollaborators[user.getGroupId()] = user; }); @@ -263,10 +352,10 @@ qx.Class.define("osparc.store.Groups", { return null; }, - getGroupMemberByUsername: function(orgId, username) { + getGroupMemberByUserName: function(orgId, userName) { const org = this.getGroup(orgId); if (org) { - return org.getGroupMemberByUsername(username); + return org.getGroupMemberByUserName(userName); } return null; }, @@ -340,7 +429,7 @@ qx.Class.define("osparc.store.Groups", { // CRUD GROUP // CRUD GROUP MEMBERS - addMember: function(orgId, username, email = null) { + addMember: function(orgId, userName, email = null) { const gid = parseInt(orgId); const params = { url: { @@ -351,7 +440,7 @@ qx.Class.define("osparc.store.Groups", { if (email) { params.data["email"] = email; } else { - params.data["userName"] = username; + params.data["userName"] = userName; } return osparc.data.Resources.fetch("organizationMembers", "post", params) .then(() => { @@ -360,7 +449,7 @@ qx.Class.define("osparc.store.Groups", { return this.__fetchGroupMembers(gid); }) .then(() => { - const groupMember = email ? this.getGroupMemberByLogin(gid, email) : this.getGroupMemberByUsername(gid, username); + const groupMember = email ? this.getGroupMemberByLogin(gid, email) : this.getGroupMemberByUserName(gid, userName); if (groupMember) { return groupMember; } diff --git a/services/static-webserver/client/source/class/osparc/store/Jobs.js b/services/static-webserver/client/source/class/osparc/store/Jobs.js index 6e6dbb1d5d0e..b3c88476c53e 100644 --- a/services/static-webserver/client/source/class/osparc/store/Jobs.js +++ b/services/static-webserver/client/source/class/osparc/store/Jobs.js @@ -28,10 +28,6 @@ qx.Class.define("osparc.store.Jobs", { }, }, - events: { - "changeJobsActive": "qx.event.type.Data", - }, - statics: { SERVER_MAX_LIMIT: 49, }, @@ -62,9 +58,6 @@ qx.Class.define("osparc.store.Jobs", { }; return osparc.data.Resources.fetch("runs", "getPageLatest", params, options) .then(jobsResp => { - if (runningOnly) { - this.fireDataEvent("changeJobsActive", jobsResp["_meta"]["total"]); - } const jobsActive = []; if ("data" in jobsResp) { jobsResp["data"].forEach(jobActiveData => { @@ -80,8 +73,7 @@ qx.Class.define("osparc.store.Jobs", { }, fetchJobsHistory: function( - studyId, - includeChildren = false, + projectId, offset = 0, limit = this.self().SERVER_MAX_LIMIT, orderBy = { @@ -92,8 +84,7 @@ qx.Class.define("osparc.store.Jobs", { ) { const params = { url: { - studyId, - includeChildren, + projectId, offset, limit, orderBy: JSON.stringify(orderBy), @@ -120,7 +111,7 @@ qx.Class.define("osparc.store.Jobs", { }, fetchSubJobs: function( - projectUuid, + collectionRunId, orderBy = { field: "started_at", direction: "desc" @@ -128,16 +119,15 @@ qx.Class.define("osparc.store.Jobs", { ) { const params = { url: { - studyId: projectUuid, + collectionRunId, orderBy: JSON.stringify(orderBy), - includeChildren: false, } }; return osparc.data.Resources.getInstance().getAllPages("subRuns", params, "getPageLatest") .then(subJobsData => { const subJobs = []; subJobsData.forEach(subJobData => { - subJobs.push(this.addSubJob(subJobData)); + subJobs.push(this.addSubJob(collectionRunId, subJobData)); }); return subJobs; }) @@ -146,7 +136,7 @@ qx.Class.define("osparc.store.Jobs", { __addJob: function(jobData) { const jobs = this.getJobs(); - const jobFound = jobs.find(job => job.getProjectUuid() === jobData["projectUuid"]); + const jobFound = jobs.find(job => job.getCollectionRunId() === jobData["collectionRunId"]); if (jobFound) { jobFound.updateJob(jobData); return jobFound; @@ -156,22 +146,22 @@ qx.Class.define("osparc.store.Jobs", { return job; }, - addSubJob: function(subJobData) { - let job = this.getJob(subJobData["projectUuid"]); + addSubJob: function(collectionRunId, subJobData) { + let job = this.getJob(collectionRunId); if (!job) { const jobs = this.getJobs(); job = new osparc.data.Job({ - "projectUuid": subJobData["projectUuid"], + collectionRunId, }); jobs.push(job); } - const subJob = job.addSubJob(subJobData); + const subJob = job.addSubJob(collectionRunId, subJobData); return subJob; }, - getJob: function(projectUuid) { + getJob: function(collectionRunId) { const jobs = this.getJobs(); - return jobs.find(job => job.getProjectUuid() === projectUuid); + return jobs.find(job => job.getCollectionRunId() === collectionRunId); }, } }); diff --git a/services/static-webserver/client/source/class/osparc/store/Pricing.js b/services/static-webserver/client/source/class/osparc/store/Pricing.js index 46f7629d8a88..13c562e439b9 100644 --- a/services/static-webserver/client/source/class/osparc/store/Pricing.js +++ b/services/static-webserver/client/source/class/osparc/store/Pricing.js @@ -22,15 +22,11 @@ qx.Class.define("osparc.store.Pricing", { construct: function() { this.base(arguments); - this.pricingPlansCached = []; - }, - - events: { - "pricingPlansChanged": "qx.event.type.Data", + this.__pricingPlansCached = []; }, members: { - pricingPlansCached: null, + __pricingPlansCached: null, fetchPricingPlans: function() { const resourceName = osparc.data.Permissions.getInstance().isAdmin() ? "adminPricingPlans" : "pricingPlans"; @@ -71,16 +67,6 @@ qx.Class.define("osparc.store.Pricing", { .catch(console.error); }, - fetchPricingPlansService: function(serviceKey, serviceVersion) { - const plansParams = { - url: osparc.data.Resources.getServiceUrl(serviceKey, serviceVersion) - }; - return osparc.data.Resources.fetch("services", "pricingPlans", plansParams) - .then(pricingPlansData => { - return pricingPlansData; - }); - }, - fetchPricingUnits: function(pricingPlanId) { if (this.getPricingPlan(pricingPlanId) && this.getPricingPlan(pricingPlanId).getPricingUnits().length !== 0) { return new Promise(resolve => resolve(this.getPricingPlan(pricingPlanId).getPricingUnits())); @@ -136,11 +122,11 @@ qx.Class.define("osparc.store.Pricing", { }, getPricingPlans: function() { - return this.pricingPlansCached; + return this.__pricingPlansCached; }, getPricingPlan: function(pricingPlanId = null) { - return this.pricingPlansCached.find(f => f.getPricingPlanId() === pricingPlanId); + return this.__pricingPlansCached.find(f => f.getPricingPlanId() === pricingPlanId); }, getPricingUnits: function(pricingPlanId) { @@ -160,7 +146,7 @@ qx.Class.define("osparc.store.Pricing", { }, __addToCache: function(pricingPlanData) { - let pricingPlan = this.pricingPlansCached.find(f => f.getPricingPlanId() === pricingPlanData["pricingPlanId"]); + let pricingPlan = this.__pricingPlansCached.find(f => f.getPricingPlanId() === pricingPlanData["pricingPlanId"]); if (pricingPlan) { // put pricingPlan.set({ @@ -173,7 +159,7 @@ qx.Class.define("osparc.store.Pricing", { } else { // get and post pricingPlan = new osparc.data.model.PricingPlan(pricingPlanData); - this.pricingPlansCached.unshift(pricingPlan); + this.__pricingPlansCached.unshift(pricingPlan); } return pricingPlan; }, diff --git a/services/static-webserver/client/source/class/osparc/store/Products.js b/services/static-webserver/client/source/class/osparc/store/Products.js index 38ef8ad2e6f2..c62da0be5ee5 100644 --- a/services/static-webserver/client/source/class/osparc/store/Products.js +++ b/services/static-webserver/client/source/class/osparc/store/Products.js @@ -38,23 +38,12 @@ qx.Class.define("osparc.store.Products", { } Promise.all([ - osparc.data.Resources.fetch("productMetadata", "getUiConfig"), - osparc.utils.Utils.fetchJSON("/resource/osparc/ui_config.json"), + this.__getUIConfig(), osparc.utils.Utils.fetchJSON("/resource/schemas/product-ui.json"), ]) .then(values => { - let uiConfig = {}; - const beUiConfig = values[0]; - const feUiConfig = values[1]; - const schema = values[2]; - if (beUiConfig && beUiConfig["ui"] && Object.keys(beUiConfig["ui"]).length) { - uiConfig = beUiConfig["ui"]; - } else { - const product = osparc.product.Utils.getProductName(); - if (feUiConfig && product in feUiConfig) { - uiConfig = feUiConfig[product]; - } - } + const uiConfig = values[0]; + const schema = values[1]; const ajvLoader = new qx.util.DynamicScriptLoader([ "/resource/ajv/ajv-6-11-0.min.js", "/resource/object-path/object-path-0-11-4.min.js" @@ -85,6 +74,46 @@ qx.Class.define("osparc.store.Products", { }); }, + __getUIConfig: function() { + return Promise.all([ + this.__getUiConfigBackend(), + this.__getUiConfigFrontend(), + ]) + .then(values => { + const beUiConfig = values[0]; + if (beUiConfig) { + return beUiConfig; + } + const feUiConfig = values[1]; + return feUiConfig || {}; + }); + }, + + __getUiConfigBackend: function() { + if (osparc.auth.Data.getInstance().isGuest()) { + // Guest users do not have access to product metadata + return Promise.resolve(null); + } + return osparc.data.Resources.fetch("productMetadata", "getUiConfig") + .then(response => { + if (response && response["ui"] && Object.keys(response["ui"]).length) { + return response["ui"]; + } + return null; + }); + }, + + __getUiConfigFrontend: function() { + return osparc.utils.Utils.fetchJSON("/resource/osparc/ui_config.json") + .then(uiConfig => { + const product = osparc.product.Utils.getProductName(); + if (uiConfig && product in uiConfig) { + return uiConfig[product]; + } + return null; + }); + }, + getPlusButtonUiConfig: function() { return this.__uiConfig["plusButton"]; }, @@ -92,5 +121,9 @@ qx.Class.define("osparc.store.Products", { getNewStudiesUiConfig: function() { return this.__uiConfig["newStudies"]; }, + + getGroupedServicesUiConfig: function() { + return this.__uiConfig["groupedServices"]; + }, } }); diff --git a/services/static-webserver/client/source/class/osparc/store/Services.js b/services/static-webserver/client/source/class/osparc/store/Services.js index eac4a2ba3384..07df08d4337e 100644 --- a/services/static-webserver/client/source/class/osparc/store/Services.js +++ b/services/static-webserver/client/source/class/osparc/store/Services.js @@ -22,6 +22,9 @@ qx.Class.define("osparc.store.Services", { __servicesCached: {}, __servicesPromisesCached: {}, __studyServicesPromisesCached: {}, + __pricingPlansCached: {}, + + UNKNOWN_SERVICE_KEY: "simcore/services/frontend/unknown", getServicesLatest: function(useCache = true) { return new Promise(resolve => { @@ -37,7 +40,7 @@ qx.Class.define("osparc.store.Services", { const servicesObj = osparc.service.Utils.convertArrayToObject(servicesArray); this.__addHits(servicesObj); this.__addTSRInfos(servicesObj); - this.__addExtraTypeInfos(servicesObj); + this.__addXTypeInfos(servicesObj); Object.values(servicesObj).forEach(serviceKey => { Object.values(serviceKey).forEach(service => this.__addServiceToCache(service)); @@ -63,8 +66,8 @@ qx.Class.define("osparc.store.Services", { getLatestCompatible: function(key, version) { const services = this.__servicesCached; if (key in services && version in services[key]) { - const historyEntry = osparc.service.Utils.extractVersionFromHistory(services[key][version]); - if (historyEntry["compatibility"] && historyEntry["compatibility"]["canUpdateTo"]) { + const historyEntry = osparc.service.Utils.getHistoryEntry(services[key][version]); + if (historyEntry && historyEntry["compatibility"] && historyEntry["compatibility"]["canUpdateTo"]) { const canUpdateTo = historyEntry["compatibility"]["canUpdateTo"]; return { key: "key" in canUpdateTo ? canUpdateTo["key"] : key, // key is optional @@ -92,68 +95,65 @@ qx.Class.define("osparc.store.Services", { const services = this.__servicesCached; if ( key in services && - version in services[key] && - "released" in services[key][version] + version in services[key] ) { - return services[key][version]["released"]; + const serviceMetadata = services[key][version]; + return osparc.service.Utils.extractReleasedDateFromHistory(serviceMetadata); } return null; }, getService: function(key, version, useCache = true) { + if (!this.__servicesPromisesCached) { + this.__servicesPromisesCached = {}; + } + if (!(key in this.__servicesPromisesCached)) { + this.__servicesPromisesCached[key] = {}; + } + // avoid request deduplication - if (key in this.__servicesPromisesCached && version in this.__servicesPromisesCached[key]) { + if (this.__servicesPromisesCached[key][version]) { return this.__servicesPromisesCached[key][version]; } - // Create a new promise - const promise = new Promise((resolve, reject) => { - if ( - useCache && - this.__isInCache(key, version) && - ( - this.__servicesCached[key][version] === null || - "history" in this.__servicesCached[key][version] - ) - ) { - resolve(this.__servicesCached[key][version]); - return; - } + if ( + useCache && + this.__isInCache(key, version) && + ( + this.__servicesCached[key][version] === null || + "history" in this.__servicesCached[key][version] + ) + ) { + return Promise.resolve(this.__servicesCached[key][version]); + } - if (!(key in this.__servicesPromisesCached)) { - this.__servicesPromisesCached[key] = {}; - } - const params = { - url: osparc.data.Resources.getServiceUrl(key, version) - }; - this.__servicesPromisesCached[key][version] = osparc.data.Resources.fetch("services", "getOne", params) - .then(service => { - this.__addHit(service); - this.__addTSRInfo(service); - this.__addExtraTypeInfo(service); - this.__addServiceToCache(service); - // Resolve the promise locally before deleting it - resolve(service); - }) - .catch(err => { - // Store null in cache to avoid repeated failed requests - this.__addToCache(key, version, null); - console.error(err); - reject(err); - }) - .finally(() => { - // Remove the promise from the cache - delete this.__servicesPromisesCached[key][version]; - }); - }); + const params = { + url: osparc.data.Resources.getServiceUrl(key, version) + }; + const fetchPromise = osparc.data.Resources.fetch("services", "getOne", params) + .then(service => { + this.__addServiceToCache(service); + // Resolve the promise locally before deleting it + return service; + }) + .catch(err => { + // Store null in cache to avoid repeated failed requests + this.__addToCache(key, version, null); + console.error(err); + throw err; + }) + .finally(() => { + // Remove the promise from the cache + delete this.__servicesPromisesCached[key][version]; + }); // Store the promise in the cache // The point of keeping this assignment outside of the main Promise block is to // ensure that the promise is immediately stored in the cache before any asynchronous // operations (like fetch) are executed. This prevents duplicate requests for the // same key and version when multiple consumers call getService concurrently. - this.__servicesPromisesCached[key][version] = promise; - return promise; + this.__servicesPromisesCached[key][version] = fetchPromise; + return fetchPromise; }, getStudyServices: function(studyId) { @@ -168,6 +168,15 @@ qx.Class.define("osparc.store.Services", { } }; this.__studyServicesPromisesCached[studyId] = osparc.data.Resources.fetch("studies", "getServices", params) + .then(resp => { + const services = resp["services"]; + services.forEach(service => { + // this service information is not complete, keep it in cache anyway + service.version = service["release"]["version"]; + this.__addServiceToCache(service); + }); + return resp; + }) .finally(() => { delete this.__studyServicesPromisesCached[studyId]; }); @@ -234,7 +243,7 @@ qx.Class.define("osparc.store.Services", { } serviceLatest = osparc.utils.Utils.deepCloneObject(olderNonRetired); // make service metadata latest model like - serviceLatest["release"] = osparc.service.Utils.extractVersionFromHistory(olderNonRetired); + serviceLatest["release"] = osparc.service.Utils.getHistoryEntry(olderNonRetired); break; } } @@ -341,6 +350,32 @@ qx.Class.define("osparc.store.Services", { return msg; }, + getPricingPlan: function(serviceKey, serviceVersion) { + const serviceUrl = osparc.data.Resources.getServiceUrl(serviceKey, serviceVersion); + const key = serviceUrl["key"]; + const version = serviceUrl["version"]; + // check if the service is already cached + if ( + key in this.__pricingPlansCached && + version in this.__pricingPlansCached[key] + ) { + return Promise.resolve(this.__pricingPlansCached[key][version]); + } + + const plansParams = { + url: serviceUrl, + }; + return osparc.data.Resources.fetch("services", "pricingPlans", plansParams) + .then(pricingPlansData => { + // store the fetched pricing plans in the cache + if (!(key in this.__pricingPlansCached)) { + this.__pricingPlansCached[key] = {}; + } + this.__pricingPlansCached[key][version] = pricingPlansData; + return pricingPlansData; + }); + }, + getFilePicker: function() { return this.getLatest("simcore/services/frontend/file-picker"); }, @@ -367,7 +402,42 @@ qx.Class.define("osparc.store.Services", { return this.getLatest("simcore/services/frontend/iterator-consumer/probe/"+type); }, + getUnknownServiceMetadata: function() { + const key = this.UNKNOWN_SERVICE_KEY; + const version = "0.0.0"; + const versionDisplay = "Unknown"; + const releaseInfo = { + version, + versionDisplay, + retired: null, + released: "2025-08-07T11:00:00.000000", + compatibility: null, + }; + return { + key, + version, + versionDisplay, + description: "Unknown App", + type: "frontend", + name: "Unknown", + inputs: {}, + outputs: {}, + accessRights: { + 1: { + execute: true, + write: false, + } + }, + release: releaseInfo, + history: [releaseInfo], + }; + }, + __addServiceToCache: function(service) { + this.__addHit(service); + this.__addTSRInfo(service); + this.__addXTypeInfo(service); + const key = service.key; const version = service.version; service["resourceType"] = "service"; @@ -375,6 +445,24 @@ qx.Class.define("osparc.store.Services", { }, __addToCache: function(key, version, value) { + // some services that go to the cache are not complete: /latest, /study/services + // if the one in the cache is the complete one, do not overwrite it + if ( + this.__isInCache(key, version) && + this.__servicesCached[key][version] && + "history" in this.__servicesCached[key][version] // the most complete service metadata is already in cache + ) { + return; + } + if ( + this.__isInCache(key, version) && + this.__servicesCached[key][version] && + "inputs" in this.__servicesCached[key][version] && // this is the second most complete service metadata (/latest) + value && !("inputs" in value) // the one to be added is not more complete + ) { + return; + } + if (!(key in this.__servicesCached)) { this.__servicesCached[key] = {}; } @@ -414,7 +502,7 @@ qx.Class.define("osparc.store.Services", { }); }, - __addExtraTypeInfo: function(service) { + __addXTypeInfo: function(service) { service["xType"] = service["type"]; if (["backend", "frontend"].includes(service["xType"])) { if (osparc.data.model.Node.isFilePicker(service)) { @@ -429,10 +517,10 @@ qx.Class.define("osparc.store.Services", { } }, - __addExtraTypeInfos: function(servicesObj) { + __addXTypeInfos: function(servicesObj) { Object.values(servicesObj).forEach(serviceWVersion => { Object.values(serviceWVersion).forEach(service => { - this.__addExtraTypeInfo(service); + this.__addXTypeInfo(service); }); }); }, diff --git a/services/static-webserver/client/source/class/osparc/store/StaticInfo.js b/services/static-webserver/client/source/class/osparc/store/StaticInfo.js index 92c4b42bbd04..0026d8b8779f 100644 --- a/services/static-webserver/client/source/class/osparc/store/StaticInfo.js +++ b/services/static-webserver/client/source/class/osparc/store/StaticInfo.js @@ -16,10 +16,9 @@ ************************************************************************ */ qx.Class.define("osparc.store.StaticInfo", { - extend: qx.core.Object, - type: "singleton", + type: "static", - members: { + statics: { getValue: function(key) { const statics = osparc.store.Store.getInstance().get("statics"); if (key in statics) { @@ -69,11 +68,6 @@ qx.Class.define("osparc.store.StaticInfo", { return null; }, - areLicensesEnabled: function() { - const isDisabled = osparc.utils.DisabledPlugins.isLicensesDisabled(); - return !isDisabled; - }, - getTrashRetentionDays: function() { const staticKey = "webserverTrash"; const wsStaticData = this.getValue(staticKey); @@ -104,6 +98,10 @@ qx.Class.define("osparc.store.StaticInfo", { return false; }, + isUpdatePhoneNumberEnabled: function() { + return osparc.store.StaticInfo.isDevFeaturesEnabled() && osparc.store.StaticInfo.is2FARequired(); + }, + getCountries: function() { const staticKey = "countries"; const metadata = this.getValue(staticKey); diff --git a/services/static-webserver/client/source/class/osparc/store/Store.js b/services/static-webserver/client/source/class/osparc/store/Store.js index ab5107209c7d..b421d301c1ec 100644 --- a/services/static-webserver/client/source/class/osparc/store/Store.js +++ b/services/static-webserver/client/source/class/osparc/store/Store.js @@ -77,12 +77,16 @@ qx.Class.define("osparc.store.Store", { }, studyBrowserContext: { check: [ - "studiesAndFolders", - "workspaces", - "search", - "templates", - "public", - "trash", + "studiesAndFolders", // osparc.dashboard.StudyBrowser.CONTEXT.PROJECTS, + "workspaces", // osparc.dashboard.StudyBrowser.CONTEXT.WORKSPACES, + "templates", // osparc.dashboard.StudyBrowser.CONTEXT.TEMPLATES, + "publicTemplates", // osparc.dashboard.StudyBrowser.CONTEXT.PUBLIC_TEMPLATES, + "functions", // osparc.dashboard.StudyBrowser.CONTEXT.FUNCTIONS, + "trash", // osparc.dashboard.StudyBrowser.CONTEXT.TRASH, + "searchProjects", // osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PROJECTS, + "searchTemplates", // osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_TEMPLATES, + "searchPublicTemplates", // osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_PUBLIC_TEMPLATES, + "searchFunctions", // osparc.dashboard.StudyBrowser.CONTEXT.SEARCH_FUNCTIONS, ], init: "studiesAndFolders", nullable: false, @@ -100,7 +104,7 @@ qx.Class.define("osparc.store.Store", { check: "Array", init: [] }, - conversations: { + conversationsStudies: { check: "Array", init: [] }, @@ -178,6 +182,10 @@ qx.Class.define("osparc.store.Store", { check: "Array", init: [] }, + functionPermissions: { + check: "Object", + init: {} + }, apiKeys: { check: "Array", init: [] @@ -241,11 +249,10 @@ qx.Class.define("osparc.store.Store", { check: "Array", init: null, }, - }, - - events: { - "studyStateChanged": "qx.event.type.Data", - "studyDebtChanged": "qx.event.type.Data", + conversationsSupport: { + check: "Array", + init: [] + }, }, members: { @@ -383,10 +390,18 @@ qx.Class.define("osparc.store.Store", { const preferenceWalletId = preferenceSettings.getPreferredWalletId(); if ( (preferenceWalletId === null || osparc.desktop.credits.Utils.getWallet(preferenceWalletId) === null) && - wallets.length === 1 + wallets.length ) { - // If there is only one wallet available, make it default - preferenceSettings.requestChangePreferredWalletId(wallets[0].getWalletId()); + // If there is no default wallet set in preferences or the default wallet is not available anymore: + const myGroupId = osparc.auth.Data.getInstance().getGroupId(); + const myWallet = wallets.find(wallet => wallet.getOwner() === myGroupId); + if (myWallet) { + // select the personal wallet if it exists + preferenceSettings.requestChangePreferredWalletId(myWallet.getWalletId()); + } else { + // otherwise select the first wallet available + preferenceSettings.requestChangePreferredWalletId(wallets[0].getWalletId()); + } } else if (preferenceWalletId) { const walletFound = wallets.find(wallet => wallet.getWalletId() === preferenceWalletId); if (walletFound) { @@ -420,108 +435,6 @@ qx.Class.define("osparc.store.Store", { return null; }, - getStudyState: function(studyId) { - osparc.data.Resources.fetch("studies", "state", { - url: { - "studyId": studyId - } - }) - .then(({state}) => { - this.setStudyState(studyId, state); - }); - }, - - setStudyState: function(studyId, state) { - const studiesWStateCache = this.getStudies(); - const idx = studiesWStateCache.findIndex(studyWStateCache => studyWStateCache["uuid"] === studyId); - if (idx !== -1) { - studiesWStateCache[idx]["state"] = state; - } - - const currentStudy = this.getCurrentStudy(); - if (currentStudy && currentStudy.getUuid() === studyId) { - currentStudy.setState(state); - } - - this.fireDataEvent("studyStateChanged", { - studyId, - state, - }); - }, - - setStudyDebt: function(studyId, debt) { - const studiesWStateCache = this.getStudies(); - const idx = studiesWStateCache.findIndex(studyWStateCache => studyWStateCache["uuid"] === studyId); - if (idx !== -1) { - if (debt) { - studiesWStateCache[idx]["debt"] = debt; - } else { - delete studiesWStateCache[idx]["debt"]; - } - } - - this.fireDataEvent("studyDebtChanged", { - studyId, - debt, - }); - }, - - trashStudy: function(studyId) { - const params = { - url: { - studyId - } - }; - return new Promise((resolve, reject) => { - osparc.data.Resources.fetch("studies", "trash", params) - .then(() => { - this.remove("studies", "uuid", studyId); - resolve(); - }) - .catch(err => { - console.error(err); - reject(err); - }); - }); - }, - - untrashStudy: function(studyId) { - const params = { - url: { - studyId - } - }; - return new Promise((resolve, reject) => { - osparc.data.Resources.fetch("studies", "untrash", params) - .then(() => { - resolve(); - }) - .catch(err => { - console.error(err); - reject(err); - }); - }); - }, - - deleteStudy: function(studyId) { - const params = { - url: { - studyId - } - }; - return new Promise((resolve, reject) => { - osparc.data.Resources.fetch("studies", "delete", params) - .then(() => { - this.remove("studies", "uuid", studyId); - resolve(); - }) - .catch(err => { - console.error(err); - reject(err); - }); - }); - }, - reloadCreditPrice: function() { const store = osparc.store.Store.getInstance(); store.setCreditPrice(null); diff --git a/services/static-webserver/client/source/class/osparc/store/Study.js b/services/static-webserver/client/source/class/osparc/store/Study.js index 9abefa8cfa08..b20bc06d6174 100644 --- a/services/static-webserver/client/source/class/osparc/store/Study.js +++ b/services/static-webserver/client/source/class/osparc/store/Study.js @@ -16,9 +16,138 @@ ************************************************************************ */ qx.Class.define("osparc.store.Study", { - type: "static", + extend: qx.core.Object, + type: "singleton", + + events: { + "studyStateChanged": "qx.event.type.Data", + "studyDebtChanged": "qx.event.type.Data", + }, + + members: { + __nodeResources: null, + __nodePricingUnit: null, + __studiesInDebt: null, + + invalidateStudies: function() { + osparc.store.Store.getInstance().invalidate("studies"); + }, + + getPage: function(params, options) { + return osparc.data.Resources.fetch("studies", "getPage", params, options) + }, + + getPageTrashed: function(params, options) { + return osparc.data.Resources.fetch("studies", "getPageTrashed", params, options) + }, + + getPageSearch: function(params, options) { + return osparc.data.Resources.fetch("studies", "getPageSearch", params, options); + }, + + getActive: function(clientSessionID) { + const params = { + url: { + tabId: clientSessionID, + } + }; + return osparc.data.Resources.fetch("studies", "getActive", params) + }, + + getOne: function(studyId) { + const params = { + url: { + studyId + } + }; + return osparc.data.Resources.fetch("studies", "getOne", params) + }, + + openStudy: function(studyId, autoStart = true) { + const params = { + url: { + studyId, + }, + data: osparc.utils.Utils.getClientSessionID() + }; + if (autoStart) { + return osparc.data.Resources.fetch("studies", "open", params); + } + params["url"]["disableServiceAutoStart"] = true; + return osparc.data.Resources.fetch("studies", "openDisableAutoStart", params); + }, + + closeStudy: function(studyId) { + const params = { + url: { + studyId, + }, + data: osparc.utils.Utils.getClientSessionID() + }; + return osparc.data.Resources.fetch("studies", "close", params); + }, + + createStudy: function(studyData) { + const params = { + data: studyData + }; + const options = { + pollTask: true, + }; + return osparc.data.Resources.fetch("studies", "postNewStudy", params, options); + }, + + createStudyFromTemplate: function(templateId, studyData) { + const params = { + url: { + templateId, + }, + data: studyData + }; + const options = { + pollTask: true, + }; + return osparc.data.Resources.fetch("studies", "postNewStudyFromTemplate", params, options); + }, + + duplicateStudy: function(studyId) { + const params = { + url: { + studyId, + } + }; + const options = { + pollTask: true + }; + return osparc.data.Resources.fetch("studies", "duplicate", params, options); + }, + + deleteStudy: function(studyId) { + const params = { + url: { + studyId + } + }; + return osparc.data.Resources.fetch("studies", "delete", params) + .then(() => { + osparc.store.Store.getInstance().remove("studies", "uuid", studyId); + }) + .catch(err => { + console.error(err); + throw err; + }); + }, + + patchStudy: function(studyId, patchData) { + const params = { + url: { + studyId, + }, + data: patchData + }; + return osparc.data.Resources.fetch("studies", "patch", params); + }, - statics: { patchStudyData: function(studyData, fieldKey, value) { if (osparc.data.model.Study.OwnPatch.includes(fieldKey)) { console.error(fieldKey, "has it's own PATCH path"); @@ -27,13 +156,7 @@ qx.Class.define("osparc.store.Study", { const patchData = {}; patchData[fieldKey] = value; - const params = { - url: { - "studyId": studyData["uuid"] - }, - data: patchData - }; - return osparc.data.Resources.fetch("studies", "patch", params) + return this.patchStudy(studyData["uuid"], patchData) .then(() => { studyData[fieldKey] = value; // A bit hacky, but it's not sent back to the backend @@ -42,16 +165,146 @@ qx.Class.define("osparc.store.Study", { }, patchTemplateType: function(templateId, templateType) { + return this.patchStudyData(templateId, "templateType", templateType); + }, + + updateMetadata: function(studyId, metadata) { + const params = { + url: { + studyId, + }, + data: metadata + }; + return osparc.data.Resources.fetch("studies", "updateMetadata", params); + }, + + fetchStudyState: function(studyId) { + osparc.data.Resources.fetch("studies", "state", { + url: { + "studyId": studyId + } + }) + .then(({state}) => { + this.setStudyState(studyId, state); + }); + }, + + setStudyState: function(studyId, state) { + const studiesWStateCache = osparc.store.Store.getInstance().getStudies(); + const idx = studiesWStateCache.findIndex(studyWStateCache => studyWStateCache["uuid"] === studyId); + if (idx !== -1) { + studiesWStateCache[idx]["state"] = state; + } + + const currentStudy = osparc.store.Store.getInstance().getCurrentStudy(); + if (currentStudy && currentStudy.getUuid() === studyId) { + currentStudy.setState(state); + } + + this.fireDataEvent("studyStateChanged", { + studyId, + state, + }); + }, + + setStudyDebt: function(studyId, debt) { + // init object if it does not exist + if (this.__studiesInDebt === null) { + this.__studiesInDebt = {}; + } + if (debt) { + this.__studiesInDebt[studyId] = debt; + } else { + delete this.__studiesInDebt[studyId]; + } + + const studiesWStateCache = osparc.store.Store.getInstance().getStudies(); + const idx = studiesWStateCache.findIndex(studyWStateCache => studyWStateCache["uuid"] === studyId); + if (idx !== -1) { + if (debt) { + studiesWStateCache[idx]["debt"] = debt; + } else { + delete studiesWStateCache[idx]["debt"]; + } + } + + this.fireDataEvent("studyDebtChanged", { + studyId, + debt, + }); + }, + + getStudyDebt: function(studyId) { + if (this.__studiesInDebt && studyId in this.__studiesInDebt) { + return this.__studiesInDebt[studyId]; + } + return null; + }, + + isStudyInDebt: function(studyId) { + return Boolean(this.getStudyDebt(studyId)); + }, + + payDebt: function(studyId, walletId, amount) { const params = { url: { - "studyId": templateId + studyId, + walletId, }, data: { - "templateType": templateType, + amount, + } + }; + return osparc.data.Resources.fetch("studies", "payDebt", params); + }, + + trashStudy: function(studyId) { + const params = { + url: { + studyId + } + }; + return osparc.data.Resources.fetch("studies", "trash", params) + .then(() => { + osparc.store.Store.getInstance().remove("studies", "uuid", studyId); + }) + .catch(err => { + console.error(err); + throw err; + }); + }, + + untrashStudy: function(studyId) { + const params = { + url: { + studyId + } + }; + return osparc.data.Resources.fetch("studies", "untrash", params) + .catch(err => { + console.error(err); + throw err; + }); + }, + + moveStudyToWorkspace: function(studyId, destWorkspaceId) { + const params = { + url: { + studyId, + workspaceId: destWorkspaceId, + } + }; + return osparc.data.Resources.fetch("studies", "moveToWorkspace", params); + }, + + moveStudyToFolder: function(studyId, destFolderId) { + const params = { + url: { + studyId, + folderId: destFolderId, } }; - return osparc.data.Resources.fetch("studies", "patch", params) - .catch(err => osparc.FlashMessenger.logError(err)); + return osparc.data.Resources.fetch("studies", "moveToFolder", params); }, patchNodeData: function(studyData, nodeId, patchData) { @@ -72,6 +325,61 @@ qx.Class.define("osparc.store.Study", { }); }, + getWallet: function(studyId) { + const params = { + url: { + studyId + } + }; + return osparc.data.Resources.fetch("studies", "getWallet", params) + .catch(err => { + osparc.FlashMessenger.logError(err); + throw err; + }); + }, + + selectWallet: function(studyId, walletId) { + const params = { + url: { + studyId, + walletId, + } + }; + return osparc.data.Resources.fetch("studies", "selectWallet", params) + .catch(err => { + osparc.FlashMessenger.logError(err); + throw err; + }); + }, + + addTag: function(studyId, tagId) { + const params = { + url: { + tagId, + studyId, + } + }; + return osparc.data.Resources.fetch("studies", "addTag", params) + .catch(err => { + console.error(err); + throw err; + }); + }, + + removeTag: function(studyId, tagId) { + const params = { + url: { + tagId, + studyId, + } + }; + return osparc.data.Resources.fetch("studies", "removeTag", params) + .catch(err => { + console.error(err); + throw err; + }); + }, + addCollaborators: function(studyData, newCollaborators) { const promises = []; Object.keys(newCollaborators).forEach(gid => { @@ -91,7 +399,10 @@ qx.Class.define("osparc.store.Study", { }); studyData["lastChangeDate"] = new Date().toISOString(); }) - .catch(err => osparc.FlashMessenger.logError(err)); + .catch(err => { + osparc.FlashMessenger.logError(err); + throw err; + }); }, removeCollaborator: function(studyData, gid) { @@ -106,7 +417,10 @@ qx.Class.define("osparc.store.Study", { delete studyData["accessRights"][gid]; studyData["lastChangeDate"] = new Date().toISOString(); }) - .catch(err => osparc.FlashMessenger.logError(err)); + .catch(err => { + osparc.FlashMessenger.logError(err); + throw err; + }); }, updateCollaborator: function(studyData, gid, newPermissions) { @@ -122,7 +436,10 @@ qx.Class.define("osparc.store.Study", { studyData["accessRights"][gid] = newPermissions; studyData["lastChangeDate"] = new Date().toISOString(); }) - .catch(err => osparc.FlashMessenger.logError(err)); + .catch(err => { + osparc.FlashMessenger.logError(err); + throw err; + }); }, sendShareEmails: function(studyData, selectedEmails, newAccessRights, message) { @@ -143,5 +460,123 @@ qx.Class.define("osparc.store.Study", { }); return Promise.all(promises); }, + + getNodeResources: function(studyId, nodeId) { + // init nodeResources if it is null + if (this.__nodeResources === null) { + this.__nodeResources = {}; + } + + // check if the resources for this node are already fetched + if ( + studyId in this.__nodeResources && + nodeId in this.__nodeResources[studyId] + ) { + return Promise.resolve(this.__nodeResources[studyId][nodeId]); + } + + const params = { + url: { + studyId, + nodeId, + } + }; + return osparc.data.Resources.get("nodesInStudyResources", params) + .then(resources => { + // store the fetched resources in the cache + if (!(studyId in this.__nodeResources)) { + this.__nodeResources[studyId] = {}; + } + this.__nodeResources[studyId][nodeId] = resources; + return resources; + }) + .catch(err => { + console.error("Failed to fetch node resources:", err); + throw err; + }); + }, + + updateNodeResources: function(studyId, nodeId, updatedResources) { + const params = { + url: { + studyId, + nodeId, + }, + data: updatedResources + }; + return osparc.data.Resources.fetch("nodesInStudyResources", "put", params) + .then(() => { + // update the cache + if (!(studyId in this.__nodeResources)) { + this.__nodeResources[studyId] = {}; + } + this.__nodeResources[studyId][nodeId] = updatedResources; + }); + }, + + getSelectedPricingUnit: function(studyId, nodeId) { + // init nodePricingUnit if it is null + if (this.__nodePricingUnit === null) { + this.__nodePricingUnit = {}; + } + + // check if the pricing unit for this node is already fetched + if ( + studyId in this.__nodePricingUnit && + nodeId in this.__nodePricingUnit[studyId] + ) { + return Promise.resolve(this.__nodePricingUnit[studyId][nodeId]); + } + + const params = { + url: { + studyId, + nodeId + } + }; + return osparc.data.Resources.fetch("studies", "getPricingUnit", params) + .then(selectedPricingUnit => { + // store the fetched pricing unit in the cache + if (!(studyId in this.__nodePricingUnit)) { + this.__nodePricingUnit[studyId] = {}; + } + this.__nodePricingUnit[studyId][nodeId] = selectedPricingUnit; + return selectedPricingUnit; + }) + .catch(err => { + console.error("Failed to fetch pricing units:", err); + throw err; + }); + }, + + updateSelectedPricingUnit: function(studyId, nodeId, planId, selectedPricingUnit) { + let pricingUnit = null; + if (selectedPricingUnit instanceof osparc.data.model.PricingUnit) { + // convert to JSON if it's a model instance + pricingUnit = JSON.parse(qx.util.Serializer.toJson(selectedPricingUnit)); + } else { + pricingUnit = osparc.utils.Utils.deepCloneObject(selectedPricingUnit); + } + const params = { + url: { + studyId, + nodeId, + pricingPlanId: planId, + pricingUnitId: pricingUnit["pricingUnitId"], + } + }; + return osparc.data.Resources.fetch("studies", "putPricingUnit", params) + .then(() => { + // update the cache + if (!(studyId in this.__nodePricingUnit)) { + this.__nodePricingUnit[studyId] = {}; + } + this.__nodePricingUnit[studyId][nodeId] = pricingUnit; + }) + .catch(err => { + console.error("Failed to update selected pricing unit:", err); + throw err; + }); + }, } }); diff --git a/services/static-webserver/client/source/class/osparc/store/Support.js b/services/static-webserver/client/source/class/osparc/store/Support.js index 281127d06ffb..5ebaa5deb7d9 100644 --- a/services/static-webserver/client/source/class/osparc/store/Support.js +++ b/services/static-webserver/client/source/class/osparc/store/Support.js @@ -4,7 +4,7 @@ qx.Class.define("osparc.store.Support", { statics: { getLicenseURL: function() { - const vendor = osparc.store.VendorInfo.getInstance().getVendor(); + const vendor = osparc.store.VendorInfo.getVendor(); if (vendor) { if ("license_url" in vendor) { return vendor["license_url"]; @@ -16,36 +16,65 @@ qx.Class.define("osparc.store.Support", { }, getManuals: function() { - return osparc.store.VendorInfo.getInstance().getManuals(); + return osparc.store.VendorInfo.getManuals(); }, - addQuickStartToMenu: function(menu) { + __getQuickStartInfo: function() { const quickStart = osparc.product.quickStart.Utils.getQuickStart(); if (quickStart) { - const qsButton = new qx.ui.menu.Button(qx.locale.Manager.tr("Quick Start"), "@FontAwesome5Solid/graduation-cap/14"); + return { + label: qx.locale.Manager.tr("Introduction"), + icon: "@FontAwesome5Solid/graduation-cap/14", + callback: () => { + const tutorialWindow = quickStart.tutorial(); + tutorialWindow.center(); + tutorialWindow.open(); + } + } + } + return null; + }, + + addQuickStartToMenu: function(menu) { + const quickStartInfo = this.__getQuickStartInfo(); + if (quickStartInfo) { + const qsButton = new qx.ui.menu.Button(quickStartInfo.label, quickStartInfo.icon); qsButton.getChildControl("label").set({ rich: true }); - qsButton.addListener("execute", () => { - const tutorialWindow = quickStart.tutorial(); - tutorialWindow.center(); - tutorialWindow.open(); - }); + qsButton.addListener("execute", () => quickStartInfo.callback()); menu.add(qsButton); } }, - addGuidedToursToMenu: function(menu) { - const guidedToursButton = new qx.ui.menu.Button(qx.locale.Manager.tr("Guided Tours"), "@FontAwesome5Solid/graduation-cap/14"); - guidedToursButton.exclude(); - menu.add(guidedToursButton); + getQuickStartButton: function() { + const quickStartInfo = this.__getQuickStartInfo(); + if (quickStartInfo) { + const qsButton = new qx.ui.form.Button(quickStartInfo.label, quickStartInfo.icon); + qsButton.getChildControl("label").set({ + rich: true + }); + qsButton.addListener("execute", () => quickStartInfo.callback()); + return qsButton; + } + return null; + }, + + __getGuidedToursInfo: function() { + return { + label: qx.locale.Manager.tr("Guided Tours"), + icon: "@FontAwesome5Solid/graduation-cap/14", + } + }, + + populateGuidedToursButton: function(button) { const fetchTours = osparc.product.tours.Tours.getTours(); if (fetchTours) { fetchTours .then(tours => { if (tours) { - guidedToursButton.show(); - guidedToursButton.addListener("execute", () => { + button.show(); + button.addListener("execute", () => { const toursManager = new osparc.tours.Manager(); toursManager.setTours(tours); toursManager.start(); @@ -55,91 +84,198 @@ qx.Class.define("osparc.store.Support", { } }, - addManualButtonsToMenu: function(menu, menuButton) { + addGuidedToursToMenu: function(menu) { + const guidedToursInfo = this.__getGuidedToursInfo(); + const guidedToursButton = new qx.ui.menu.Button(guidedToursInfo.label, guidedToursInfo.icon); + guidedToursButton.exclude(); + menu.add(guidedToursButton); + this.populateGuidedToursButton(guidedToursButton); + }, + + getGuidedToursButton: function() { + const guidedToursInfo = this.__getGuidedToursInfo(); + const guidedToursButton = new qx.ui.form.Button(guidedToursInfo.label, guidedToursInfo.icon); + guidedToursButton.exclude(); + this.populateGuidedToursButton(guidedToursButton); + return guidedToursButton; + }, + + addManualsToMenu: function(menu) { const manuals = osparc.store.Support.getManuals(); - if (menuButton) { - menuButton.setVisibility(manuals && manuals.length ? "visible" : "excluded"); + const addManuals = mn => { + manuals.forEach(manual => { + const manualBtn = new qx.ui.menu.Button(manual.label, "@FontAwesome5Solid/book/14"); + manualBtn.getChildControl("label").set({ + rich: true + }); + manualBtn.addListener("execute", () => window.open(manual.url), this); + mn.add(manualBtn); + }); + }; + if (manuals.length > 1) { + // if there are more than 1 manuals, add them in their own menu + const ownMenu = new qx.ui.menu.Menu().set({ + appearance: "menu-wider", + }); + const manualsBtn = new qx.ui.menu.Button(qx.locale.Manager.tr("Manuals"), "@FontAwesome5Solid/book/14"); + manualsBtn.setMenu(ownMenu); + menu.add(manualsBtn); + addManuals(ownMenu); + } else { + addManuals(menu); } + }, + + getManualButtons: function() { + const manuals = osparc.store.Support.getManuals(); + const manualButtons = []; manuals.forEach(manual => { - const manualBtn = new qx.ui.menu.Button(manual.label, "@FontAwesome5Solid/book/14"); + const manualBtn = new qx.ui.form.Button(manual.label, "@FontAwesome5Solid/book/14"); manualBtn.getChildControl("label").set({ rich: true }); manualBtn.addListener("execute", () => window.open(manual.url), this); - menu.add(manualBtn); + manualButtons.push(manualBtn); }); + return manualButtons; }, - addSupportButtonsToMenu: function(menu, menuButton) { - const issues = osparc.store.VendorInfo.getInstance().getIssues(); - const supports = osparc.store.VendorInfo.getInstance().getSupports(); - if (menuButton) { - menuButton.setVisibility(issues.length || supports.length ? "visible" : "excluded"); - } + __getIssueInfos: function() { + const issuesInfos = []; + const issues = osparc.store.VendorInfo.getIssues(); issues.forEach(issueInfo => { - const label = issueInfo["label"]; - const issueButton = new qx.ui.menu.Button(label, "@FontAwesome5Solid/comments/14"); - issueButton.getChildControl("label").set({ - rich: true + issuesInfos.push({ + label: issueInfo["label"], + icon: "@FontAwesome5Solid/comments/14", + callback: () => { + const issueConfirmationWindow = new osparc.ui.window.Dialog(issueInfo["label"] + " " + qx.locale.Manager.tr("Information"), null, + qx.locale.Manager.tr("To create an issue, you must have an account and be already logged-in.") + ); + const continueBtn = new qx.ui.form.Button(qx.locale.Manager.tr("Continue"), "@FontAwesome5Solid/external-link-alt/14"); + continueBtn.addListener("execute", () => { + window.open(issueInfo["new_url"]); + issueConfirmationWindow.close(); + }, this); + const loginBtn = new qx.ui.form.Button(qx.locale.Manager.tr("Log in in ") + issueInfo["label"], "@FontAwesome5Solid/external-link-alt/14"); + loginBtn.addListener("execute", () => window.open(issueInfo["login_url"]), this); + issueConfirmationWindow.addButton(continueBtn); + issueConfirmationWindow.addButton(loginBtn); + issueConfirmationWindow.addCancelButton(); + issueConfirmationWindow.open(); + }, }); - issueButton.addListener("execute", () => { - const issueConfirmationWindow = new osparc.ui.window.Dialog(label + " " + qx.locale.Manager.tr("Information"), null, - qx.locale.Manager.tr("To create an issue, you must have an account and be already logged-in.") - ); - const contBtn = new qx.ui.form.Button(qx.locale.Manager.tr("Continue"), "@FontAwesome5Solid/external-link-alt/14"); - contBtn.addListener("execute", () => { - window.open(issueInfo["new_url"]); - issueConfirmationWindow.close(); - }, this); - const loginBtn = new qx.ui.form.Button(qx.locale.Manager.tr("Log in in ") + label, "@FontAwesome5Solid/external-link-alt/14"); - loginBtn.addListener("execute", () => window.open(issueInfo["login_url"]), this); - issueConfirmationWindow.addButton(contBtn); - issueConfirmationWindow.addButton(loginBtn); - issueConfirmationWindow.addCancelButton(); - issueConfirmationWindow.open(); - }, this); - menu.add(issueButton); }); + return issuesInfos; + }, - if (issues.length && supports.length) { - menu.addSeparator(); - } - + __getSupportInfos: function() { + const supportInfos = []; + const supports = osparc.store.VendorInfo.getSupports(); supports.forEach(supportInfo => { - const supportBtn = new qx.ui.menu.Button(supportInfo["label"]); - supportBtn.getChildControl("label").set({ - rich: true - }); + const label = supportInfo["label"]; let icon = null; - let cb = null; + let callback = null; switch (supportInfo["kind"]) { case "web": icon = "@FontAwesome5Solid/link/14"; - cb = () => window.open(supportInfo["url"]); + callback = () => window.open(supportInfo["url"]); break; case "forum": icon = "@FontAwesome5Solid/comments/14"; - cb = () => window.open(supportInfo["url"]); + callback = () => window.open(supportInfo["url"]); break; case "email": + if (osparc.store.Groups.getInstance().isSupportEnabled()) { + // if support is enabled, ignore the email option + return; + } icon = "@FontAwesome5Solid/envelope/14"; - cb = () => this.__openSendEmailFeedbackDialog(supportInfo["email"]); + callback = () => this.__openSendEmailFeedbackDialog(supportInfo["email"]); break; } - supportBtn.setIcon(icon); - supportBtn.addListener("execute", () => cb(), this); + supportInfos.push({ + label, + icon, + callback, + }); + }); + return supportInfos; + }, + + addSupportButtonsToMenu: function(menu) { + const issuesInfos = this.__getIssueInfos(); + issuesInfos.forEach(issueInfo => { + const issueButton = new qx.ui.menu.Button(issueInfo.label, issueInfo.icon); + issueButton.getChildControl("label").set({ + rich: true + }); + issueButton.addListener("execute", issueInfo.callback, this); + menu.add(issueButton); + }); + + const supportInfos = this.__getSupportInfos(); + if (issuesInfos.length && supportInfos.length) { + menu.addSeparator(); + } + + supportInfos.forEach(supportInfo => { + const supportBtn = new qx.ui.menu.Button(supportInfo.label, supportInfo.icon); + supportBtn.getChildControl("label").set({ + rich: true + }); + supportBtn.addListener("execute", supportInfo.callback, this); menu.add(supportBtn); }); }, - addReleaseNotesToMenu: function(menu) { + getSupportButtons: function() { + const buttons = []; + const issuesInfos = this.__getIssueInfos(); + issuesInfos.forEach(issueInfo => { + const issueButton = new qx.ui.form.Button(issueInfo.label, issueInfo.icon); + issueButton.getChildControl("label").set({ + rich: true + }); + issueButton.addListener("execute", issueInfo.callback, this); + buttons.push(issueButton); + }); + + const supportInfos = this.__getSupportInfos(); + supportInfos.forEach(supportInfo => { + const supportBtn = new qx.ui.form.Button(supportInfo.label, supportInfo.icon); + supportBtn.getChildControl("label").set({ + rich: true + }); + supportBtn.addListener("execute", supportInfo.callback, this); + buttons.push(supportBtn); + }); + return buttons; + }, + + __getReleaseInfo: function() { const releaseTag = osparc.utils.Utils.getReleaseTag(); const releaseLink = osparc.utils.Utils.getReleaseLink(); - const releaseBtn = new qx.ui.menu.Button(qx.locale.Manager.tr("Release Notes") + " " + releaseTag, "@FontAwesome5Solid/book/14"); - releaseBtn.addListener("execute", () => window.open(releaseLink), this); + return { + label: qx.locale.Manager.tr("What's New in") + " " + releaseTag, + icon: "@FontAwesome5Solid/bullhorn/14", + callback: () => { window.open(releaseLink); }, + }; + }, + + addReleaseNotesToMenu: function(menu) { + const releaseInfo = this.__getReleaseInfo(); + const releaseBtn = new qx.ui.menu.Button(releaseInfo.label, releaseInfo.icon); + releaseBtn.addListener("execute", releaseInfo.callback, this); menu.add(releaseBtn); }, + getReleaseNotesButton: function() { + const releaseInfo = this.__getReleaseInfo(); + const releaseBtn = new qx.ui.form.Button(releaseInfo.label, releaseInfo.icon); + releaseBtn.addListener("execute", releaseInfo.callback, this); + return releaseBtn; + }, + mailToLink: function(email, subject, centered = true) { const color = qx.theme.manager.Color.getInstance().resolve("text"); let textLink = `${email}`; @@ -149,13 +285,26 @@ qx.Class.define("osparc.store.Support", { return textLink; }, + requestAccountLink: function(centered = true) { + const color = qx.theme.manager.Color.getInstance().resolve("text"); + const link = window.location.origin + "/#/request-account"; + let textLink = `Request Account`; + if (centered) { + textLink = `
${textLink}
` + } + return textLink; + }, + getMailToLabel: function(email, subject) { - const mailto = new qx.ui.basic.Label(this.mailToLink(email, subject, false)).set({ + const mailto = new qx.ui.basic.Label().set({ font: "text-14", allowGrowX: true, // let it grow to make it easier to select selectable: true, rich: true, }); + if (email) { + mailto.setValue(this.mailToLink(email, subject, false)); + } return mailto; }, @@ -173,9 +322,9 @@ qx.Class.define("osparc.store.Support", { maxWidth: 380 }); osparc.utils.Utils.setIdToWidget(createAccountWindow, "createAccountWindow"); - const vendor = osparc.store.VendorInfo.getInstance().getVendor(); + const vendor = osparc.store.VendorInfo.getVendor(); if ("invitation_url" in vendor) { - const displayName = osparc.store.StaticInfo.getInstance().getDisplayName(); + const displayName = osparc.store.StaticInfo.getDisplayName(); let message = qx.locale.Manager.tr("Registration is currently only available with an invitation."); message += "
"; message += qx.locale.Manager.tr("Please request access to ") + displayName + ":"; diff --git a/services/static-webserver/client/source/class/osparc/store/Templates.js b/services/static-webserver/client/source/class/osparc/store/Templates.js index 33b0f8b4b9e3..ef03e8625dfc 100644 --- a/services/static-webserver/client/source/class/osparc/store/Templates.js +++ b/services/static-webserver/client/source/class/osparc/store/Templates.js @@ -24,7 +24,21 @@ qx.Class.define("osparc.store.Templates", { __hypertools: null, __hypertoolsPromiseCached: null, - __fetchTemplatesPaginated: function(params, options) { + createTemplate: function(studyId, copyData = true, hidden = false) { + const params = { + url: { + "study_id": studyId, + "copy_data": copyData, + hidden, + }, + }; + const options = { + pollTask: true + }; + return osparc.data.Resources.fetch("templates", "postToTemplate", params, options); + }, + + fetchTemplatesPaginated: function(params, options) { params["url"]["templateType"] = osparc.data.model.StudyUI.TEMPLATE_TYPE; return osparc.data.Resources.fetch("templates", "getPageFilteredSorted", params, options) .then(response => { @@ -35,21 +49,19 @@ qx.Class.define("osparc.store.Templates", { .catch(err => osparc.FlashMessenger.logError(err)); }, - fetchTemplatesNonPublicPaginated: function(params, options) { - return this.__fetchTemplatesPaginated(params, options); - }, - - fetchTemplatesPublicPaginated: function(params, options) { - return this.__fetchTemplatesPaginated(params, options); + searchTemplatesPaginated: function(params, options) { + params["url"]["templateType"] = osparc.data.model.StudyUI.TEMPLATE_TYPE; + return osparc.data.Resources.fetch("templates", "getPageSearchFilteredSorted", params, options) + .then(response => { + const templates = response["data"]; + templates.forEach(template => template["resourceType"] = "template"); + return response; + }) + .catch(err => osparc.FlashMessenger.logError(err)); }, - fetchTemplate: function(templateId) { - const params = { - url: { - "studyId": templateId, - } - }; - return osparc.data.Resources.fetch("studies", "getOne", params) + fetchTemplate: function(studyId) { + return osparc.store.Study.getInstance().getOne(studyId) .catch(err => console.error(err)); }, diff --git a/services/static-webserver/client/source/class/osparc/store/Users.js b/services/static-webserver/client/source/class/osparc/store/Users.js index 693df0b59c7f..2b0ecfbe33c9 100644 --- a/services/static-webserver/client/source/class/osparc/store/Users.js +++ b/services/static-webserver/client/source/class/osparc/store/Users.js @@ -19,6 +19,12 @@ qx.Class.define("osparc.store.Users", { extend: qx.core.Object, type: "singleton", + construct: function() { + this.base(arguments); + + this.__unknowns = []; + }, + properties: { users: { check: "Array", @@ -28,6 +34,8 @@ qx.Class.define("osparc.store.Users", { }, members: { + __unknowns: null, + __fetchUser: function(groupId) { const params = { url: { @@ -38,10 +46,17 @@ qx.Class.define("osparc.store.Users", { .then(userData => { const user = this.addUser(userData[0]); return user; + }) + .catch(() => { + this.__unknowns.push(groupId); + return null; }); }, getUser: async function(groupId, fetchIfNotFound = true) { + if (this.__unknowns.includes(groupId)) { + return null; + } const userFound = this.getUsers().find(user => user.getGroupId() === groupId); if (userFound) { return userFound; diff --git a/services/static-webserver/client/source/class/osparc/store/VendorInfo.js b/services/static-webserver/client/source/class/osparc/store/VendorInfo.js index bcea60318205..e754c86651f0 100644 --- a/services/static-webserver/client/source/class/osparc/store/VendorInfo.js +++ b/services/static-webserver/client/source/class/osparc/store/VendorInfo.js @@ -16,12 +16,11 @@ ************************************************************************ */ qx.Class.define("osparc.store.VendorInfo", { - extend: qx.core.Object, - type: "singleton", + type: "static", - members: { + statics: { __getFromStaticInfo: function(key, defaultValue) { - const staticValue = osparc.store.StaticInfo.getInstance().getValue(key); + const staticValue = osparc.store.StaticInfo.getValue(key); if (staticValue) { return staticValue; } diff --git a/services/static-webserver/client/source/class/osparc/study/BillingSettings.js b/services/static-webserver/client/source/class/osparc/study/BillingSettings.js index 85a4b83cdcb4..4a8ae8fe604b 100644 --- a/services/static-webserver/client/source/class/osparc/study/BillingSettings.js +++ b/services/static-webserver/client/source/class/osparc/study/BillingSettings.js @@ -33,6 +33,7 @@ qx.Class.define("osparc.study.BillingSettings", { events: { "debtPayed": "qx.event.type.Event", + "closeWindow": "qx.event.type.Event", }, members: { @@ -118,12 +119,7 @@ qx.Class.define("osparc.study.BillingSettings", { const walletSelector = this.getChildControl("wallet-selector"); - const paramsGet = { - url: { - studyId: this.__studyData["uuid"] - } - }; - osparc.data.Resources.fetch("studies", "getWallet", paramsGet) + osparc.store.Study.getInstance().getWallet(this.__studyData["uuid"]) .then(wallet => { if (wallet) { this.__studyWalletId = wallet["walletId"]; @@ -242,16 +238,7 @@ qx.Class.define("osparc.study.BillingSettings", { __doTransferCredits: function() { const wallet = this.__getSelectedWallet(); - const params = { - url: { - studyId: this.__studyData["uuid"], - walletId: wallet.getWalletId(), - }, - data: { - amount: this.__studyData["debt"], - } - }; - osparc.data.Resources.fetch("studies", "payDebt", params) + osparc.store.Study.getInstance().payDebt(this.__studyData["uuid"], wallet.getWalletId(), this.__studyData["debt"]) .then(() => { // at this point we can assume that the study got unblocked this.__debtPayed(); @@ -263,7 +250,7 @@ qx.Class.define("osparc.study.BillingSettings", { __debtPayed: function() { delete this.__studyData["debt"]; - osparc.store.Store.getInstance().setStudyDebt(this.__studyData["uuid"], 0); + osparc.store.Study.getInstance().setStudyDebt(this.__studyData["uuid"], 0); this.fireEvent("debtPayed"); if (this.__debtMessage) { this._remove(this.__debtMessage); @@ -274,19 +261,18 @@ qx.Class.define("osparc.study.BillingSettings", { __switchWallet: function(walletId) { const creditAccountBox = this.getChildControl("credit-account-box"); creditAccountBox.setEnabled(false); - const paramsPut = { - url: { - studyId: this.__studyData["uuid"], - walletId - } - }; - osparc.data.Resources.fetch("studies", "selectWallet", paramsPut) + osparc.store.Study.getInstance().selectWallet(this.__studyData["uuid"], walletId) .then(() => { this.__studyWalletId = walletId; const msg = this.tr("Credit Account saved"); osparc.FlashMessenger.logAs(msg, "INFO"); }) - .catch(err => osparc.FlashMessenger.logError(err)) + .catch(err => { + if ("status" in err && err["status"] == 402) { + osparc.study.Utils.extractDebtFromError(this.__studyData["uuid"], err); + } + this.fireEvent("closeWindow"); + }) .finally(() => { creditAccountBox.setEnabled(true); }); diff --git a/services/static-webserver/client/source/class/osparc/study/Conversation.js b/services/static-webserver/client/source/class/osparc/study/Conversation.js new file mode 100644 index 000000000000..1535202eca84 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/study/Conversation.js @@ -0,0 +1,103 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2023 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + + +qx.Class.define("osparc.study.Conversation", { + extend: osparc.conversation.Conversation, + + /** + * @param studyData {String} Study Data + * @param conversation {osparc.data.model.Conversation} Conversation + */ + construct: function(studyData, conversation) { + this.__studyData = studyData; + this.base(arguments, conversation); + }, + + members: { + __studyData: null, + + _buildLayout: function() { + this.base(arguments); + + const addMessage = this.getChildControl("add-message").set({ + studyData: this.__studyData, + enabled: osparc.data.model.Study.canIWrite(this.__studyData["accessRights"]), + }); + addMessage.addListener("addMessage", e => { + const content = e.getData(); + const conversation = this.getConversation(); + if (conversation) { + this.__postMessage(content); + } else { + // create new conversation first + osparc.store.ConversationsProject.getInstance().postConversation(this.__studyData["uuid"]) + .then(data => { + const newConversation = new osparc.data.model.Conversation(data, this.__studyData["uuid"]); + this.setConversation(newConversation); + this.__postMessage(content); + }); + } + }); + addMessage.addListener("notifyUser", e => { + const userGid = e.getData(); + const conversation = this.getConversation(); + if (conversation) { + this.__postNotify(userGid); + } else { + // create new conversation first + osparc.store.ConversationsProject.getInstance().postConversation(this.__studyData["uuid"]) + .then(data => { + const newConversation = new osparc.data.model.Conversation(data, this.__studyData["uuid"]); + this.setConversation(newConversation); + this.__postNotify(userGid); + }); + } + }); + }, + + _createMessageUI: function(message) { + const messageUI = new osparc.conversation.MessageUI(message, this.__studyData); + messageUI.getChildControl("message-content").set({ + measurerMaxWidth: 400, + }); + return messageUI; + }, + + __postMessage: function(content) { + const conversationId = this.getConversation().getConversationId(); + osparc.store.ConversationsProject.getInstance().postMessage(this.__studyData["uuid"], conversationId, content); + }, + + __postNotify: function(userGid) { + const conversationId = this.getConversation().getConversationId(); + osparc.store.ConversationsProject.getInstance().notifyUser(this.__studyData["uuid"], conversationId, userGid) + .then(data => { + this.fireDataEvent("messageAdded", data); + const potentialCollaborators = osparc.store.Groups.getInstance().getPotentialCollaborators(); + if (userGid in potentialCollaborators) { + if ("getUserId" in potentialCollaborators[userGid]) { + const uid = potentialCollaborators[userGid].getUserId(); + osparc.notification.Notifications.pushConversationNotification(uid, this.__studyData["uuid"]); + } + const msg = "getLabel" in potentialCollaborators[userGid] ? potentialCollaborators[userGid].getLabel() + this.tr(" was notified") : this.tr("Notification sent"); + osparc.FlashMessenger.logAs(msg, "INFO"); + } + }); + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/study/ConversationPage.js b/services/static-webserver/client/source/class/osparc/study/ConversationPage.js new file mode 100644 index 000000000000..582d0190d79b --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/study/ConversationPage.js @@ -0,0 +1,193 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2023 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + + +qx.Class.define("osparc.study.ConversationPage", { + extend: qx.ui.tabview.Page, + + /** + * @param studyData {String} Study Data + * @param conversationData {Object} Conversation Data + */ + construct: function(studyData, conversationData) { + this.base(arguments); + + this.__studyData = studyData; + this.__messages = []; + + this._setLayout(new qx.ui.layout.VBox(5)); + + this.set({ + padding: 10, + showCloseButton: false, + }); + + + this.bind("conversation", this.getChildControl("button"), "label", { + converter: conversation => conversation ? conversation.getName() : this.tr("new") + }); + this.getChildControl("button").set({ + font: "text-13", + }); + this.__addConversationButtons(); + + this.__buildLayout(); + + if (conversationData) { + const conversation = new osparc.data.model.Conversation(conversationData, this.__studyData["uuid"]); + this.setConversation(conversation); + } + + }, + + properties: { + conversation: { + check: "osparc.data.model.Conversation", + init: null, + nullable: true, + event: "changeConversation", + }, + }, + + members: { + __studyData: null, + + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "n-messages": + control = new qx.ui.basic.Label(); + this._add(control); + break; + case "conversation": + control = new osparc.study.Conversation(this.__studyData); + this.bind("conversation", control, "conversation"); + control.addListener("messagesChanged", () => this.__updateMessagesNumber()); + this._add(control, { + flex: 1, + }); + break; + } + return control || this.base(arguments, id); + }, + + __buildLayout: function() { + this.getChildControl("n-messages"); + this.getChildControl("conversation"); + }, + + getConversationId: function() { + if (this.getConversation()) { + return this.getConversation().getConversationId(); + } + return null; + }, + + __addConversationButtons: function() { + const tabButton = this.getChildControl("button"); + + const buttonsAesthetics = { + focusable: false, + keepActive: true, + padding: 0, + backgroundColor: "transparent", + }; + const renameButton = new qx.ui.form.Button(null, "@FontAwesome5Solid/pencil-alt/10").set({ + ...buttonsAesthetics, + visibility: osparc.data.model.Study.canIWrite(this.__studyData["accessRights"]) ? "visible" : "excluded", + }); + renameButton.addListener("execute", () => { + const titleEditor = new osparc.widget.Renamer(tabButton.getLabel()).set({ + maxChars: osparc.data.model.Conversation.MAX_TITLE_LENGTH, + }); + titleEditor.addListener("labelChanged", e => { + titleEditor.close(); + const newLabel = e.getData()["newLabel"]; + if (this.getConversationId()) { + osparc.store.ConversationsProject.getInstance().renameConversation(this.__studyData["uuid"], this.getConversationId(), newLabel) + .then(() => this.renameConversation(newLabel)); + } else { + // create new conversation first + osparc.store.ConversationsProject.getInstance().postConversation(this.__studyData["uuid"], newLabel) + .then(data => { + const conversation = new osparc.data.model.Conversation(data, this.__studyData["uuid"]); + this.setConversation(conversation); + this.getChildControl("button").setLabel(newLabel); + }); + } + }, this); + titleEditor.center(); + titleEditor.open(); + }); + // eslint-disable-next-line no-underscore-dangle + tabButton._add(renameButton, { + row: 0, + column: 3 + }); + + const closeButton = new qx.ui.form.Button(null, "@FontAwesome5Solid/times/12").set({ + ...buttonsAesthetics, + paddingLeft: 4, // adds spacing between buttons + visibility: osparc.data.model.Study.canIWrite(this.__studyData["accessRights"]) ? "visible" : "excluded", + }); + closeButton.addListener("execute", () => { + const messages = this.getChildControl("conversation").getMessages(); + if (messages.length === 0) { + osparc.store.ConversationsProject.getInstance().deleteConversation(this.__studyData["uuid"], this.getConversationId()); + } else { + const msg = this.tr("Are you sure you want to delete the conversation?"); + const confirmationWin = new osparc.ui.window.Confirmation(msg).set({ + caption: this.tr("Delete Conversation"), + confirmText: this.tr("Delete"), + confirmAction: "delete" + }); + confirmationWin.open(); + confirmationWin.addListener("close", () => { + if (confirmationWin.getConfirmed()) { + osparc.store.ConversationsProject.getInstance().deleteConversation(this.__studyData["uuid"], this.getConversationId()); + } + }, this); + } + }); + // eslint-disable-next-line no-underscore-dangle + tabButton._add(closeButton, { + row: 0, + column: 4 + }); + this.bind("conversation", closeButton, "visibility", { + converter: value => value ? "visible" : "excluded" + }); + }, + + renameConversation: function(newName) { + this.getChildControl("button").setLabel(newName); + }, + + __updateMessagesNumber: function() { + const nMessagesLabel = this.getChildControl("n-messages"); + const messages = this.getChildControl("conversation").getMessages(); + const nMessages = messages.filter(msg => msg["type"] === "MESSAGE").length; + if (nMessages === 0) { + nMessagesLabel.setValue(this.tr("No Messages yet")); + } else if (nMessages === 1) { + nMessagesLabel.setValue(this.tr("1 Message")); + } else if (nMessages > 1) { + nMessagesLabel.setValue(nMessages + this.tr(" Messages")); + } + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/study/Conversations.js b/services/static-webserver/client/source/class/osparc/study/Conversations.js index d43a945cf0cd..bf0d7c0e33a6 100644 --- a/services/static-webserver/client/source/class/osparc/study/Conversations.js +++ b/services/static-webserver/client/source/class/osparc/study/Conversations.js @@ -20,82 +20,65 @@ qx.Class.define("osparc.study.Conversations", { extend: qx.ui.core.Widget, /** - * @param studyData {String} Study Data + * @param studyData {Object} Study Data */ - construct: function(studyData) { + construct: function(studyData, openConversationId = null) { this.base(arguments); this._setLayout(new qx.ui.layout.VBox()); - this.fetchConversations(studyData); + this.__conversationsPages = []; + this.__openConversationId = openConversationId; + + this.set({ + studyData, + }); + + this.__listenToConversationWS(); + }, + + properties: { + studyData: { + check: "Object", + init: null, + nullable: false, + apply: "__applyStudyData", + }, }, statics: { - popUpInWindow: function(studyData) { - const conversations = new osparc.study.Conversations(studyData); + popUpInWindow: function(studyData, openConversationId = null) { + const conversations = new osparc.study.Conversations(studyData, openConversationId); const title = qx.locale.Manager.tr("Conversations"); const viewWidth = 600; const viewHeight = 700; - const win = osparc.ui.window.Window.popUpInWindow(conversations, title, viewWidth, viewHeight); + const win = osparc.ui.window.Window.popUpInWindow(conversations, title, viewWidth, viewHeight).set({ + maxHeight: viewHeight, + }); + win.addListener("close", () => { + conversations.destroy(); + }, this); return win; }, - addConversation: function(studyId, name = "new 1") { - const params = { - url: { - studyId, - }, - data: { - name, - "type": "PROJECT_STATIC", - } - }; - return osparc.data.Resources.fetch("conversations", "addConversation", params) - .catch(err => osparc.FlashMessenger.logError(err)); - }, - - deleteConversation: function(studyId, conversationId) { - const params = { - url: { - studyId, - conversationId, - }, - }; - return osparc.data.Resources.fetch("conversations", "deleteConversation", params) - .catch(err => osparc.FlashMessenger.logError(err)); - }, - - renameConversation: function(studyId, conversationId, name) { - const params = { - url: { - studyId, - conversationId, - }, - data: { - name, - } - }; - return osparc.data.Resources.fetch("conversations", "renameConversation", params) - .catch(err => osparc.FlashMessenger.logError(err)); - }, - - addMessage: function(studyId, conversationId, message) { - const params = { - url: { - studyId, - conversationId, - }, - data: { - "content": message, - "type": "MESSAGE", - } - }; - return osparc.data.Resources.fetch("conversations", "addMessage", params) - .catch(err => osparc.FlashMessenger.logError(err)); + makeButtonBlink: function(button) { + const socket = osparc.wrapper.WebSocket.getInstance(); + Object.values(osparc.data.model.Conversation.CHANNELS).forEach(eventName => { + socket.on(eventName, () => { + if (button) { + osparc.utils.Utils.makeButtonBlink(button); + } + }); + }); }, }, members: { + __openConversationId: null, + __conversations: null, + __newConversationButton: null, + __wsHandlers: null, + _createChildControlImpl: function(id) { let control; switch (id) { @@ -114,67 +97,189 @@ qx.Class.define("osparc.study.Conversations", { return control || this.base(arguments, id); }, - fetchConversations: function(studyData) { + __listenToConversationWS: function() { + this.__wsHandlers = []; + + const socket = osparc.wrapper.WebSocket.getInstance(); + + [ + osparc.data.model.Conversation.CHANNELS.CONVERSATION_CREATED, + osparc.data.model.Conversation.CHANNELS.CONVERSATION_UPDATED, + osparc.data.model.Conversation.CHANNELS.CONVERSATION_DELETED, + ].forEach(eventName => { + const eventHandler = conversation => { + if (conversation) { + switch (eventName) { + case osparc.data.model.Conversation.CHANNELS.CONVERSATION_CREATED: + if (conversation["projectId"] === this.getStudyData()["uuid"]) { + this.__addConversationPage(conversation); + } + break; + case osparc.data.model.Conversation.CHANNELS.CONVERSATION_UPDATED: + this.__updateConversationName(conversation); + break; + case osparc.data.model.Conversation.CHANNELS.CONVERSATION_DELETED: + this.__removeConversationPage(conversation["conversationId"]); + break; + } + } + }; + socket.on(eventName, eventHandler, this); + this.__wsHandlers.push({ eventName, handler: eventHandler }); + }); + }, + + __getConversationPage: function(conversationId) { + return this.__conversationsPages.find(conversationPage => conversationPage.getConversationId() === conversationId); + }, + + __applyStudyData: function(studyData) { const loadMoreButton = this.getChildControl("loading-button"); loadMoreButton.setFetching(true); - const params = { - url: { - studyId: studyData["uuid"], - offset: 0, - limit: 42, - } - }; - osparc.data.Resources.fetch("conversations", "getConversationsPage", params) - .then(conversations => this.__addConversations(conversations, studyData)) + osparc.store.ConversationsProject.getInstance().getConversations(studyData["uuid"]) + .then(conversations => { + if (conversations.length) { + conversations.forEach(conversation => this.__addConversationPage(conversation)); + if (this.__openConversationId) { + const conversationsLayout = this.getChildControl("conversations-layout"); + const conversation = conversationsLayout.getSelectables().find(c => c.getConversationId() === this.__openConversationId); + if (conversation) { + conversationsLayout.setSelection([conversation]); + } + this.__openConversationId = null; // reset it so it does not open again + } + } else { + this.__addTempConversationPage(); + } + }) .finally(() => { loadMoreButton.setFetching(false); loadMoreButton.exclude(); }); }, - __addConversations: function(conversations, studyData) { - const conversationPages = []; - const conversationsLayout = this.getChildControl("conversations-layout"); + __createConversationPage: function(conversationData) { + const studyData = this.getStudyData(); + let conversationPage = null; + if (conversationData) { + conversationPage = new osparc.study.ConversationPage(studyData, conversationData); + const conversationId = conversationData["conversationId"]; + osparc.store.ConversationsProject.getInstance().addListener("conversationDeleted", e => { + const data = e.getData(); + if (conversationId === data["conversationId"]) { + this.__removeConversationPage(conversationId, true); + } + }); + } else { + // create a temporary conversation page + conversationPage = new osparc.study.ConversationPage(studyData); + } + return conversationPage; + }, - const newConversationButton = new qx.ui.form.Button().set({ - icon: "@FontAwesome5Solid/plus/12", - toolTipText: this.tr("Add new conversation"), - allowGrowX: false, - backgroundColor: "transparent", - }); + __addTempConversationPage: function() { + const temporaryConversationPage = this.__createConversationPage(); + this.__addToPages(temporaryConversationPage); + }, - const reloadConversations = () => { - conversationPages.forEach(conversationPage => conversationPage.fireDataEvent("close", conversationPage)); - conversationsLayout.getChildControl("bar").remove(newConversationButton); - this.fetchConversations(studyData); - }; - - if (conversations.length === 0) { - const noConversationTab = new osparc.info.Conversation(studyData); - conversationPages.push(noConversationTab); - noConversationTab.setLabel(this.tr("new")); - noConversationTab.addListener("conversationDeleted", () => reloadConversations()); - conversationsLayout.add(noConversationTab); - } else { - conversations.forEach(conversation => { - const conversationId = conversation["conversationId"]; - const conversationTab = new osparc.info.Conversation(studyData, conversationId); - conversationPages.push(conversationTab); - conversationTab.setLabel(conversation["name"]); - conversationTab.addListener("conversationDeleted", () => reloadConversations()); - conversationsLayout.add(conversationTab); + __addConversationPage: function(conversationData) { + // ignore it if it was already there + const conversationId = conversationData["conversationId"]; + const conversationPageFound = this.__getConversationPage(conversationId); + if (conversationPageFound) { + return null; + } + + const conversationPage = this.__createConversationPage(conversationData); + this.__addToPages(conversationPage); + + this.__conversationsPages.push(conversationPage); + + return conversationPage; + }, + + __addToPages: function(conversationPage) { + const conversationsLayout = this.getChildControl("conversations-layout"); + if (conversationsLayout.getChildren().length === 1) { + // remove the temporary conversation page + if (conversationsLayout.getChildren()[0].getConversationId() === null) { + conversationsLayout.remove(conversationsLayout.getChildren()[0]); + } + } + conversationsLayout.add(conversationPage); + + if (this.__newConversationButton === null) { + const studyData = this.getStudyData(); + // initialize the new button only once + const newConversationButton = this.__newConversationButton = new qx.ui.form.Button().set({ + icon: "@FontAwesome5Solid/plus/12", + toolTipText: this.tr("Add new conversation"), + allowGrowX: false, + backgroundColor: "transparent", + enabled: osparc.data.model.Study.canIWrite(studyData["accessRights"]), + }); + newConversationButton.addListener("execute", () => { + osparc.store.ConversationsProject.getInstance().postConversation(studyData["uuid"], "new " + (this.__conversationsPages.length + 1)) + .then(conversationDt => { + this.__addConversationPage(conversationDt); + const newConversationPage = this.__getConversationPage(conversationDt["conversationId"]); + if (newConversationPage) { + conversationsLayout.setSelection([newConversationPage]); + } + }); }); + conversationsLayout.getChildControl("bar").add(newConversationButton); } + // remove and add to move to last position + const bar = conversationsLayout.getChildControl("bar"); + if (bar.indexOf(this.__newConversationButton) > -1) { + bar.remove(this.__newConversationButton); + } + bar.add(this.__newConversationButton); + }, - newConversationButton.addListener("execute", () => { - osparc.study.Conversations.addConversation(studyData["uuid"], "new " + (conversations.length + 1)) - .then(() => { - reloadConversations(); - }); - }); + __removeConversationPage: function(conversationId, changeSelection = false) { + const conversationPage = this.__getConversationPage(conversationId); + if (conversationPage) { + const conversationsLayout = this.getChildControl("conversations-layout"); + if (conversationsLayout.indexOf(conversationPage) > -1) { + conversationsLayout.remove(conversationPage); + } + this.__conversationsPages = this.__conversationsPages.filter(c => c !== conversationPage); + const conversationPages = conversationsLayout.getSelectables(); + if (conversationPages.length) { + if (changeSelection) { + // change selection to the first conversation + conversationsLayout.setSelection([conversationPages[0]]); + } + } else { + // no conversations left, add a temporary one + this.__addTempConversationPage(); + } + } + }, - conversationsLayout.getChildControl("bar").add(newConversationButton); + // it can only be renamed, not updated + __updateConversationName: function(conversationData) { + const conversationId = conversationData["conversationId"]; + const conversationPage = this.__getConversationPage(conversationId); + if (conversationPage) { + conversationPage.renameConversation(conversationData["name"]); + } }, - } + + // overridden + destroy: function() { + const socket = osparc.wrapper.WebSocket.getInstance(); + if (this.__wsHandlers) { + this.__wsHandlers.forEach(({ eventName }) => { + socket.removeSlot(eventName); + }); + this.__wsHandlers = null; + } + + this.base(arguments); + }, + }, }); diff --git a/services/static-webserver/client/source/class/osparc/study/CreateFunction.js b/services/static-webserver/client/source/class/osparc/study/CreateFunction.js index 4c771101bdba..9b71ea943fd9 100644 --- a/services/static-webserver/client/source/class/osparc/study/CreateFunction.js +++ b/services/static-webserver/client/source/class/osparc/study/CreateFunction.js @@ -33,65 +33,34 @@ qx.Class.define("osparc.study.CreateFunction", { }, statics: { - createFunctionData: function(projectData, name, description, exposedInputs, exposedOutputs) { - const functionData = { - "projectId": projectData["uuid"], - "title": name, - "description": description, - "function_class": "PROJECT", - "inputSchema": { - "schema_class": "application/schema+json", - "schema_content": { - "type": "object", - "properties": {}, - "required": [] - } - }, - "outputSchema": { - "schema_class": "application/schema+json", - "schema_content": { - "type": "object", - "properties": {}, - "required": [] - } - }, - "defaultInputs": {}, - }; - - const parameters = osparc.study.Utils.extractFunctionableParameters(projectData["workbench"]); - parameters.forEach(parameter => { - const parameterLabel = parameter["label"]; - if (exposedInputs[parameterLabel]) { - const parameterMetadata = osparc.store.Services.getMetadata(parameter["key"], parameter["version"]); - if (parameterMetadata) { - const type = osparc.service.Utils.getParameterType(parameterMetadata); - functionData["inputSchema"]["schema_content"]["properties"][parameterLabel] = { - "type": type, - }; - functionData["inputSchema"]["schema_content"]["required"].push(parameterLabel); - } - } else { - functionData["defaultInputs"][parameterLabel] = osparc.service.Utils.getParameterValue(parameter); - } - }); - - const probes = osparc.study.Utils.extractFunctionableProbes(projectData["workbench"]); - probes.forEach(probe => { - const probeLabel = probe["label"]; - if (exposedOutputs[probeLabel]) { - const probeMetadata = osparc.store.Services.getMetadata(probe["key"], probe["version"]); - if (probeMetadata) { - const type = osparc.service.Utils.getProbeType(probeMetadata); - functionData["outputSchema"]["schema_content"]["properties"][probeLabel] = { - "type": type, - }; - functionData["outputSchema"]["schema_content"]["required"].push(probeLabel); - } - } - }); + CREATE_FUNCTION_TEXT: qx.locale.Manager.tr(` + In order to Create a Function, the pipeline needs: + - at least one parameter and one probe (numbers) + - at least one computational app + - no dynamic apps + `), + + isPotentialFunction: function(workbench) { + // const filePickers = osparc.study.Utils.extractFilePickers(workbench); + // const parameters = osparc.study.Utils.extractParameters(workbench); + // const probes = osparc.study.Utils.extractProbes(workbench); + // return (filePickers.length + parameters.length) && probes.length; + + const parameters = osparc.study.Utils.extractFunctionableParameters(workbench); + const probes = osparc.study.Utils.extractFunctionableProbes(workbench); + const computationals = osparc.study.Utils.extractComputationalServices(workbench); + const dynamics = osparc.study.Utils.extractDynamicServices(workbench); + + return Boolean( + (parameters.length && probes.length) && + computationals.length > 0 && + dynamics.length === 0 + ); + }, - return functionData; - } + checkExposedInputsOutputs: function(exposedInputs, exposedOutputs) { + return Boolean(Object.values(exposedInputs).some(exposedInputValue => exposedInputValue) && Object.values(exposedOutputs).some(exposedOutputValue => exposedOutputValue)); + }, }, members: { @@ -120,11 +89,12 @@ qx.Class.define("osparc.study.CreateFunction", { form.add(description, this.tr("Description"), null, "description"); + const defaultInputs = {}; const exposedInputs = {}; const exposedOutputs = {}; // INPUTS - const inGrid = new qx.ui.layout.Grid(10, 6); + const inGrid = new qx.ui.layout.Grid(12, 6); const inputsLayout = new qx.ui.container.Composite(inGrid).set({ allowGrowX: false, alignX: "left", @@ -163,7 +133,8 @@ qx.Class.define("osparc.study.CreateFunction", { const parameters = osparc.study.Utils.extractFunctionableParameters(this.__studyData["workbench"]); parameters.forEach(parameter => { - const parameterLabel = new qx.ui.basic.Label(parameter["label"]); + const parameterKey = parameter["label"]; + const parameterLabel = new qx.ui.basic.Label(parameterKey); inputsLayout.add(parameterLabel, { row, column, @@ -185,11 +156,28 @@ qx.Class.define("osparc.study.CreateFunction", { row, column, }); - exposedInputs[parameter["label"]] = true; - parameterExposed.addListener("changeValue", e => exposedInputs[parameter["label"]] = e.getData()); + exposedInputs[parameterKey] = true; + parameterExposed.addListener("changeValue", e => exposedInputs[parameterKey] = e.getData()); column++; - const parameterDefaultValue = new qx.ui.basic.Label(String(osparc.service.Utils.getParameterValue(parameter))); + const paramValue = osparc.service.Utils.getParameterValue(parameter); + defaultInputs[parameterKey] = paramValue; + let parameterDefaultValue = null; + if (parameterMetadata && osparc.service.Utils.getParameterType(parameterMetadata) === "number") { + parameterDefaultValue = new qx.ui.form.TextField(String(paramValue)); + parameterDefaultValue.addListener("changeValue", e => { + const newValue = e.getData(); + const oldValue = e.getOldData(); + if (newValue === oldValue) { + return; + } + const curatedValue = (!isNaN(parseFloat(newValue))) ? parseFloat(newValue) : parseFloat(oldValue); + defaultInputs[parameterKey] = curatedValue; + parameterDefaultValue.setValue(String(curatedValue)); + }); + } else { + parameterDefaultValue = new qx.ui.basic.Label(String(paramValue)); + } inputsLayout.add(parameterDefaultValue, { row, column, @@ -272,33 +260,34 @@ qx.Class.define("osparc.study.CreateFunction", { allowGrowX: false, alignX: "right" }); + osparc.utils.Utils.setIdToWidget(createFunctionBtn, "create_function_btn"); createFunctionBtn.addListener("execute", () => { if (this.__form.validate()) { - this.__createFunction(exposedInputs, exposedOutputs); + this.__createFunction(defaultInputs, exposedInputs, exposedOutputs); } }, this); }, - __createFunction: function(exposedInputs, exposedOutputs) { + __createFunction: function(defaultInputs, exposedInputs, exposedOutputs) { + if (!osparc.study.CreateFunction.checkExposedInputsOutputs(exposedInputs, exposedOutputs)) { + const msg = this.tr("Expose at least one input and one output"); + osparc.FlashMessenger.logAs(msg, "ERROR"); + return; + } + this.__createFunctionBtn.setFetching(true); - // first publish it as a template - const params = { - url: { - "study_id": this.__studyData["uuid"], - "copy_data": true, - }, - }; - const options = { - pollTask: true - }; - const fetchPromise = osparc.data.Resources.fetch("studies", "postToTemplate", params, options); + // first publish it as a hidden template + const copyData = true; + const hidden = true; + const pollPromise = osparc.store.Templates.createTemplate(this.__studyData["uuid"], copyData, hidden); const pollTasks = osparc.store.PollTasks.getInstance(); - pollTasks.createPollingTask(fetchPromise) + pollTasks.createPollingTask(pollPromise) .then(task => { task.addListener("resultReceived", e => { const templateData = e.getData(); - this.__doCreateFunction(templateData, exposedInputs, exposedOutputs); + this.__updateTemplateMetadata(templateData); + this.__registerFunction(templateData, defaultInputs, exposedInputs, exposedOutputs); }); }) .catch(err => { @@ -307,17 +296,21 @@ qx.Class.define("osparc.study.CreateFunction", { }); }, - __doCreateFunction: function(templateData, exposedInputs, exposedOutputs) { + __updateTemplateMetadata: function(templateData) { + const metadata = { + "custom" : { + "hidden": "Base template for function", + } + }; + osparc.store.Study.getInstance().updateMetadata(templateData["uuid"], metadata) + .catch(err => console.error(err)); + }, + + __registerFunction: function(templateData, defaultInputs, exposedInputs, exposedOutputs) { const nameField = this.__form.getItem("name"); const descriptionField = this.__form.getItem("description"); - const functionData = this.self().createFunctionData(templateData, nameField.getValue(), descriptionField.getValue(), exposedInputs, exposedOutputs); - console.log("functionData", functionData); - - const params = { - data: functionData, - }; - osparc.data.Resources.fetch("functions", "create", params) + osparc.store.Functions.registerFunction(templateData, nameField.getValue(), descriptionField.getValue(), defaultInputs, exposedInputs, exposedOutputs) .then(() => osparc.FlashMessenger.logAs(this.tr("Function created"), "INFO")) .catch(err => osparc.FlashMessenger.logError(err)) .finally(() => this.__createFunctionBtn.setFetching(false)); diff --git a/services/static-webserver/client/source/class/osparc/study/NodePricingUnits.js b/services/static-webserver/client/source/class/osparc/study/NodePricingUnits.js index dae4d7370c45..d10d3916c6c0 100644 --- a/services/static-webserver/client/source/class/osparc/study/NodePricingUnits.js +++ b/services/static-webserver/client/source/class/osparc/study/NodePricingUnits.js @@ -72,20 +72,6 @@ qx.Class.define("osparc.study.NodePricingUnits", { }, }, - statics: { - patchPricingUnitSelection: function(studyId, nodeId, planId, selectedUnitId) { - const params = { - url: { - studyId, - nodeId, - pricingPlanId: planId, - pricingUnitId: selectedUnitId - } - }; - return osparc.data.Resources.fetch("studies", "putPricingUnit", params); - } - }, - members: { __nodeKey: null, __nodeVersion: null, @@ -100,23 +86,17 @@ qx.Class.define("osparc.study.NodePricingUnits", { const studyId = this.getStudyId(); const nodeId = this.getNodeId(); - osparc.store.Pricing.getInstance().fetchPricingPlansService(nodeKey, nodeVersion) + osparc.store.Services.getPricingPlan(nodeKey, nodeVersion) .then(pricingPlanData => { if (pricingPlanData) { - const unitParams = { - url: { - studyId, - nodeId - } - }; this.set({ pricingPlanId: pricingPlanData["pricingPlanId"] }); - osparc.data.Resources.fetch("studies", "getPricingUnit", unitParams) - .then(preselectedPricingUnit => { + osparc.store.Study.getInstance().getSelectedPricingUnit(studyId, nodeId) + .then(selectedPricingUnit => { if (pricingPlanData && "pricingUnits" in pricingPlanData && pricingPlanData["pricingUnits"].length) { const pricingUnitsData = pricingPlanData["pricingUnits"]; - const pricingUnitTiers = this.__pricingUnits = new osparc.study.PricingUnitTiers(pricingUnitsData, preselectedPricingUnit); + const pricingUnitTiers = this.__pricingUnits = new osparc.study.PricingUnitTiers(pricingUnitsData, selectedPricingUnit); if (inGroupBox) { const pricingUnitsLayout = osparc.study.StudyOptions.createGroupBox(nodeLabel); pricingUnitsLayout.add(pricingUnitTiers); @@ -125,14 +105,17 @@ qx.Class.define("osparc.study.NodePricingUnits", { this._add(pricingUnitTiers); } pricingUnitTiers.addListener("selectPricingUnitRequested", e => { - const selectedPricingUnitId = e.getData(); + const newSelectedPricingUnit = e.getData(); if (this.isPatchNode()) { pricingUnitTiers.setEnabled(false); const pricingPlanId = this.getPricingPlanId(); - this.self().patchPricingUnitSelection(studyId, nodeId, pricingPlanId, selectedPricingUnitId) - .then(() => pricingUnitTiers.setSelectedUnitId(selectedPricingUnitId)) + osparc.store.Study.getInstance().updateSelectedPricingUnit(studyId, nodeId, pricingPlanId, newSelectedPricingUnit) + .then(() => pricingUnitTiers.setSelectedUnitId(newSelectedPricingUnit.getPricingUnitId())) .catch(err => osparc.FlashMessenger.logError(err, this.tr("Cannot change Tier"))) .finally(() => pricingUnitTiers.setEnabled(true)); + } else { + // do not patch node, just update the selected unit (the parent widget will handle the patching) + pricingUnitTiers.setSelectedUnitId(newSelectedPricingUnit.getPricingUnitId()); } }); } diff --git a/services/static-webserver/client/source/class/osparc/study/PricingUnitTiers.js b/services/static-webserver/client/source/class/osparc/study/PricingUnitTiers.js index 028ff5740ffb..b682dd742a41 100644 --- a/services/static-webserver/client/source/class/osparc/study/PricingUnitTiers.js +++ b/services/static-webserver/client/source/class/osparc/study/PricingUnitTiers.js @@ -74,7 +74,7 @@ qx.Class.define("osparc.study.PricingUnitTiers", { pricingUnitTiers.forEach(pricingUnitTier => { pricingUnitTier.addListener("selectPricingUnit", () => { if (changeSelectionAllowed) { - this.fireDataEvent("selectPricingUnitRequested", pricingUnitTier.getUnitData().getPricingUnitId()); + this.fireDataEvent("selectPricingUnitRequested", pricingUnitTier.getUnitData()); } }); }); @@ -84,5 +84,13 @@ qx.Class.define("osparc.study.PricingUnitTiers", { // select and unselect the rest this.__pricingUnitTiers.forEach(puTIer => puTIer.setSelected(puTIer.getUnitData().getPricingUnitId() === selectedUnitId)); }, + + getSelectedUnit: function() { + const selectedUnitTier = this.__pricingUnitTiers.find(puTier => puTier.isSelected()); + if (selectedUnitTier) { + return selectedUnitTier.getUnitData(); + } + return null; + }, } }); diff --git a/services/static-webserver/client/source/class/osparc/study/SaveAsTemplate.js b/services/static-webserver/client/source/class/osparc/study/SaveAsTemplate.js index 77ae4db00f0a..1d2f28f84199 100644 --- a/services/static-webserver/client/source/class/osparc/study/SaveAsTemplate.js +++ b/services/static-webserver/client/source/class/osparc/study/SaveAsTemplate.js @@ -49,16 +49,10 @@ qx.Class.define("osparc.study.SaveAsTemplate", { __createTemplateBtn: null, __buildLayout: function() { - let introText = ""; - if (this.__makeItPublic) { - introText += this.tr("This project will be published and accessible to everyone."); - introText += "
"; - introText += this.tr("All users will see it and can copy it."); - } else { - introText += this.tr("This project will be saved as a template."); - introText += "
"; - introText += this.tr("The users you select will be able to see it and copy it."); - } + const introText = this.__makeItPublic ? + this.tr("Share your project with the entire community as a Public Project. This makes it discoverable and reusable by others, encouraging collaboration and inspiration.") + : + this.tr("Save the current state of your project as a reusable Template. Templates let you branch off easily and start new workflows. You can later decide with whom to share it."); this._add(new qx.ui.basic.Label(introText).set({ font: "text-14", rich: true, @@ -73,7 +67,7 @@ qx.Class.define("osparc.study.SaveAsTemplate", { }); form.add(publishWithData, this.tr("Publish with data"), null, "publishWithData"); - if (osparc.data.Permissions.getInstance().isTester()) { + if (osparc.data.Permissions.getInstance().isProductOwner()) { const templateTypeSB = osparc.study.Utils.createTemplateTypeSB(); form.add(templateTypeSB, this.tr("Template Type"), null, "templateType"); } diff --git a/services/static-webserver/client/source/class/osparc/study/StudyOptions.js b/services/static-webserver/client/source/class/osparc/study/StudyOptions.js index 37cd905c33c5..0569a23295d1 100644 --- a/services/static-webserver/client/source/class/osparc/study/StudyOptions.js +++ b/services/static-webserver/client/source/class/osparc/study/StudyOptions.js @@ -91,19 +91,12 @@ qx.Class.define("osparc.study.StudyOptions", { }, updateName: function(studyData, name) { - return osparc.store.Study.patchStudyData(studyData, "name", name) + return osparc.store.Study.getInstance().patchStudyData(studyData, "name", name) .catch(err => osparc.FlashMessenger.logError(err, qx.locale.Manager.tr("Something went wrong while renaming"))); }, updateWallet: function(studyId, walletId) { - const params = { - url: { - studyId, - walletId, - } - }; - return osparc.data.Resources.fetch("studies", "selectWallet", params) - .catch(err => osparc.FlashMessenger.logError(err, qx.locale.Manager.tr("An issue occurred while selecting Credit Account"))); + return osparc.store.Study.getInstance().selectWallet(studyId, walletId); }, }, @@ -231,14 +224,9 @@ qx.Class.define("osparc.study.StudyOptions", { }, __fetchStudy: function(studyId) { - const params = { - url: { - studyId - } - }; Promise.all([ - osparc.data.Resources.fetch("studies", "getOne", params), - osparc.data.Resources.fetch("studies", "getWallet", params) + osparc.store.Study.getInstance().getOne(studyId), + osparc.store.Study.getInstance().getWallet(studyId), ]) .then(values => { const studyData = values[0]; diff --git a/services/static-webserver/client/source/class/osparc/study/StudyPreview.js b/services/static-webserver/client/source/class/osparc/study/StudyPreview.js index 7d2a101ba53f..209e35d13f6b 100644 --- a/services/static-webserver/client/source/class/osparc/study/StudyPreview.js +++ b/services/static-webserver/client/source/class/osparc/study/StudyPreview.js @@ -26,23 +26,30 @@ qx.Class.define("osparc.study.StudyPreview", { this._setLayout(new qx.ui.layout.VBox(5)); - this.__study = study; - - this.__buildPreview(); + const uiMode = study.getUi().getMode(); + if (["workbench", "pipeline"].includes(uiMode)) { + this.__buildPreview(study); + } }, members: { - __study: null, - - __buildPreview: function() { - const study = this.__study; - const uiMode = study.getUi().getMode(); - if (["workbench", "pipeline"].includes(uiMode) && !study.isPipelineEmpty()) { + __buildPreview: function(study) { + const workbenchReady = () => { const workbenchUIPreview = new osparc.workbench.WorkbenchUIPreview(); workbenchUIPreview.setStudy(study); workbenchUIPreview.loadModel(study.getWorkbench()); workbenchUIPreview.setMaxHeight(550); this._add(workbenchUIPreview); + }; + + if (study.getWorkbench().isDeserialized()) { + workbenchReady(); + } else { + study.getWorkbench().addListenerOnce("changeDeserialized", e => { + if (e.getData()) { + workbenchReady(); + } + }, this); } } } diff --git a/services/static-webserver/client/source/class/osparc/study/Utils.js b/services/static-webserver/client/source/class/osparc/study/Utils.js index f657359f6130..57bff3573829 100644 --- a/services/static-webserver/client/source/class/osparc/study/Utils.js +++ b/services/static-webserver/client/source/class/osparc/study/Utils.js @@ -60,9 +60,8 @@ qx.Class.define("osparc.study.Utils", { "y": 100 } }; - // maybe check it's dynamic if (!("mode" in minStudyData["ui"])) { - minStudyData["ui"]["mode"] = "standalone"; + minStudyData["ui"]["mode"] = metadata["type"] && metadata["type"] === "dynamic" ? "standalone" : "pipeline"; } const inaccessibleServices = osparc.store.Services.getInaccessibleServices(minStudyData["workbench"]) if (inaccessibleServices.length) { @@ -72,10 +71,7 @@ qx.Class.define("osparc.study.Utils", { }); return; } - const params = { - data: minStudyData - }; - osparc.study.Utils.createStudyAndPoll(params) + osparc.study.Utils.createStudyAndPoll(minStudyData) .then(studyData => resolve(studyData["uuid"])) .catch(err => reject(err)); }) @@ -83,15 +79,12 @@ qx.Class.define("osparc.study.Utils", { }); }, - createStudyAndPoll: function(params) { + createStudyAndPoll: function(studyData) { return new Promise((resolve, reject) => { - const options = { - pollTask: true - }; - const fetchPromise = osparc.data.Resources.fetch("studies", "postNewStudy", params, options); + const pollPromise = osparc.store.Study.getInstance().createStudy(studyData); const pollTasks = osparc.store.PollTasks.getInstance(); const interval = 1000; - pollTasks.createPollingTask(fetchPromise, interval) + pollTasks.createPollingTask(pollPromise, interval) .then(task => { task.addListener("resultReceived", e => { const resultData = e.getData(); @@ -123,19 +116,10 @@ qx.Class.define("osparc.study.Utils", { minStudyData["name"] = templateData["name"]; minStudyData["description"] = templateData["description"]; minStudyData["thumbnail"] = templateData["thumbnail"]; - const params = { - url: { - templateId: templateData["uuid"] - }, - data: minStudyData - }; - const options = { - pollTask: true - }; - const fetchPromise = osparc.data.Resources.fetch("studies", "postNewStudyFromTemplate", params, options); + const pollPromise = osparc.store.Study.getInstance().createStudyFromTemplate(templateData["uuid"], minStudyData); const pollTasks = osparc.store.PollTasks.getInstance(); const interval = 1000; - pollTasks.createPollingTask(fetchPromise, interval) + pollTasks.createPollingTask(pollPromise, interval) .then(task => { const title = qx.locale.Manager.tr("CREATING ") + osparc.product.Utils.getStudyAlias({allUpperCase: true}) + " ..."; const progressSequence = new osparc.widget.ProgressSequence(title).set({ @@ -192,17 +176,9 @@ qx.Class.define("osparc.study.Utils", { const text = qx.locale.Manager.tr("Duplicate process started and added to the background tasks"); osparc.FlashMessenger.logAs(text, "INFO"); - const params = { - url: { - "studyId": studyData["uuid"] - } - }; - const options = { - pollTask: true - }; - const fetchPromise = osparc.data.Resources.fetch("studies", "duplicate", params, options); + const pollPromise = osparc.store.Study.getInstance().duplicateStudy(studyData["uuid"]); const pollTasks = osparc.store.PollTasks.getInstance(); - return pollTasks.createPollingTask(fetchPromise) + return pollTasks.createPollingTask(pollPromise) }, createTemplateTypeSB: function() { @@ -251,6 +227,22 @@ qx.Class.define("osparc.study.Utils", { return Array.from(services); }, + extractComputationalServices: function(workbench) { + const computationals = Object.values(workbench).filter(node => { + const metadata = osparc.store.Services.getMetadata(node["key"], node["version"]); + return metadata && osparc.data.model.Node.isComputational(metadata); + }); + return computationals; + }, + + extractDynamicServices: function(workbench) { + const dynamics = Object.values(workbench).filter(node => { + const metadata = osparc.store.Services.getMetadata(node["key"], node["version"]); + return metadata && osparc.data.model.Node.isDynamic(metadata); + }); + return dynamics; + }, + extractFilePickers: function(workbench) { const parameters = Object.values(workbench).filter(srv => srv["key"].includes("simcore/services/frontend/file-picker")); return parameters; @@ -278,23 +270,7 @@ qx.Class.define("osparc.study.Utils", { return parameters; }, - canCreateFunction: function(workbench) { - // in order to create a function, the pipeline needs: - // - at least one parameter (or file-picker (file type parameter)) - // - at least one probe - - // const filePickers = osparc.study.Utils.extractFilePickers(workbench); - // const parameters = osparc.study.Utils.extractParameters(workbench); - // const probes = osparc.study.Utils.extractProbes(workbench); - // return (filePickers.length + parameters.length) && probes.length; - - // - for now, only float types are allowed - const parameters = osparc.study.Utils.extractFunctionableParameters(workbench); - const probes = osparc.study.Utils.extractFunctionableProbes(workbench); - return parameters.length && probes.length; - }, - - getCantExecuteServices: function(studyServices = []) { + getCantReadServices: function(studyServices = []) { return studyServices.filter(studyService => studyService["myAccessRights"]["execute"] === false); }, @@ -345,7 +321,26 @@ qx.Class.define("osparc.study.Utils", { }, isInDebt: function(studyData) { - return Boolean("debt" in studyData && studyData["debt"] < 0); + return osparc.store.Study.getInstance().isStudyInDebt(studyData["uuid"]); + }, + + extractDebtFromError: function(studyId, err) { + const msg = err["message"]; + // The backend might have thrown a 402 because the wallet was negative + const match = msg.match(/last transaction of\s([-]?\d+(\.\d+)?)\sresulted/); + let debt = null; + if ("debtAmount" in err) { + // the study has some debt that needs to be paid + debt = err["debtAmount"]; + } else if (match) { + // the study has some debt that needs to be paid + debt = parseFloat(match[1]); // Convert the captured string to a number + } + if (debt) { + // if get here, it means that the 402 was thrown due to the debt + osparc.store.Study.getInstance().setStudyDebt(studyId, debt); + } + return debt; }, getUiMode: function(studyData) { @@ -355,15 +350,79 @@ qx.Class.define("osparc.study.Utils", { return null; }, + state: { + __getShareState: function(state) { + if (state && "shareState" in state) { + return state["shareState"]; + } + return null; + }, + + getProjectStatus: function(state) { + const shareState = this.__getShareState(state); + if (shareState && "status" in shareState) { + return shareState["status"]; + } + return null; + }, + + isProjectLocked: function(state) { + const shareState = this.__getShareState(state); + if (shareState && "locked" in shareState) { + return shareState["locked"]; + } + return false; + }, + + getCurrentGroupIds: function(state) { + const shareState = this.__getShareState(state); + if (shareState && "currentUserGroupids" in shareState) { + return shareState["currentUserGroupids"]; + } + return []; + }, + + getPipelineState: function(state) { + if ( + state && + "state" in state && + "value" in state["state"] + ) { + return state["state"]["value"]; + } + return undefined; + }, + + PIPELINE_RUNNING_STATES: [ + "PUBLISHED", + "PENDING", + "WAITING_FOR_RESOURCES", + "WAITING_FOR_CLUSTER", + "STARTED", + "STOPPING", + "RETRY", + ], + + isPipelineRunning: function(state) { + const pipelineState = this.getPipelineState(state); + if (pipelineState) { + return this.PIPELINE_RUNNING_STATES.includes(pipelineState); + } + return false; + }, + }, + __getBlockedState: function(studyData) { - if (studyData["services"]) { - const cantReadServices = osparc.study.Utils.getCantExecuteServices(studyData["services"]); + if (studyData["services"] === null) { + return "UNKNOWN_SERVICES"; + } else if (studyData["services"]) { + const cantReadServices = osparc.study.Utils.getCantReadServices(studyData["services"]); const inaccessibleServices = osparc.store.Services.getInaccessibleServices(studyData["workbench"]); if (cantReadServices.length || inaccessibleServices.length) { return "UNKNOWN_SERVICES"; } } - if (studyData["state"] && studyData["state"]["locked"] && studyData["state"]["locked"]["value"]) { + if (this.state.isProjectLocked(studyData["state"])) { return "IN_USE"; } if (this.isInDebt(studyData)) { @@ -374,6 +433,9 @@ qx.Class.define("osparc.study.Utils", { canBeOpened: function(studyData) { const blocked = this.__getBlockedState(studyData); + if (osparc.utils.DisabledPlugins.isRTCEnabled()) { + return ["IN_USE", false].includes(blocked); + } return [false].includes(blocked); }, @@ -399,6 +461,9 @@ qx.Class.define("osparc.study.Utils", { canShowPreview: function(studyData) { const blocked = this.__getBlockedState(studyData); + if (osparc.utils.DisabledPlugins.isRTCEnabled()) { + return ["IN_USE", false].includes(blocked); + } return [false].includes(blocked); }, @@ -441,9 +506,8 @@ qx.Class.define("osparc.study.Utils", { if (studyData["ui"]["mode"] === "pipeline") { resolve(osparc.data.model.StudyUI.PIPELINE_ICON); } else { - const defaultIcon = osparc.dashboard.CardBase.PRODUCT_ICON; - // the was to guess the TI type is to check the boot mode of the ti-postpro in the pipeline - const wbServices = this.self().getNonFrontendNodes(studyData); + const productIcon = osparc.dashboard.CardBase.PRODUCT_ICON; + const wbServices = this.getNonFrontendNodes(studyData); if (wbServices.length === 1) { const wbService = wbServices[0]; osparc.store.Services.getService(wbService.key, wbService.version) @@ -451,8 +515,9 @@ qx.Class.define("osparc.study.Utils", { if (serviceMetadata && serviceMetadata["icon"]) { resolve(serviceMetadata["icon"]); } - resolve(defaultIcon); - }); + resolve(productIcon); + }) + .catch(() => resolve(productIcon)); } else { resolve(osparc.data.model.StudyUI.PIPELINE_ICON); } diff --git a/services/static-webserver/client/source/class/osparc/support/Conversation.js b/services/static-webserver/client/source/class/osparc/support/Conversation.js new file mode 100644 index 000000000000..23588a4c1e26 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/support/Conversation.js @@ -0,0 +1,221 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + + +qx.Class.define("osparc.support.Conversation", { + extend: osparc.conversation.Conversation, + + /** + * @param conversation {osparc.data.model.Conversation} Conversation + */ + construct: function(conversation) { + this.base(arguments, conversation); + }, + + statics: { + SYSTEM_MESSAGE_TYPE: { + ASK_A_QUESTION: "askAQuestion", + BOOK_A_CALL: "bookACall", + BOOK_A_CALL_3RD: "bookACall3rd", + ESCALATE_TO_SUPPORT: "escalateToSupport", + REPORT_OEC: "reportOEC", + FOLLOW_UP: "followUp", + }, + }, + + members: { + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "share-project-layout": + control = new qx.ui.container.Composite(new qx.ui.layout.HBox()).set({ + backgroundColor: "strong-main", + decorator: "rounded", + }); + this._addAt(control, 5); + break; + case "share-project-checkbox": + control = new qx.ui.form.CheckBox().set({ + value: false, + label: this.tr("Share Project with Support"), + textColor: "white", + padding: 3, + }); + this.getChildControl("share-project-layout").add(new qx.ui.core.Spacer(), { flex: 1 }); + this.getChildControl("share-project-layout").add(control); + this.getChildControl("share-project-layout").add(new qx.ui.core.Spacer(), { flex: 1 }); + break; + } + return control || this.base(arguments, id); + }, + + _buildLayout: function() { + this.base(arguments); + + const addMessages = this.getChildControl("add-message"); + addMessages.addListener("addMessage", e => { + const content = e.getData(); + const conversation = this.getConversation(); + if (conversation) { + this.__postMessage(content); + } else { + // create new conversation first + const extraContext = {}; + const currentStudy = osparc.store.Store.getInstance().getCurrentStudy() + if (currentStudy) { + extraContext["projectId"] = currentStudy.getUuid(); + } + osparc.store.ConversationsSupport.getInstance().postConversation(extraContext) + .then(data => { + let prePostMessagePromise = new Promise((resolve) => resolve()); + let isBookACall = false; + // make these checks first, setConversation will reload messages + if ( + this._messages.length === 1 && + this._messages[0]["systemMessageType"] && + this._messages[0]["systemMessageType"] === osparc.support.Conversation.SYSTEM_MESSAGE_TYPE.BOOK_A_CALL + ) { + isBookACall = true; + } + const newConversation = new osparc.data.model.Conversation(data); + this.setConversation(newConversation); + if (isBookACall) { + // add a first message + prePostMessagePromise = this.__postMessage("Book a Call"); + // rename the conversation + newConversation.renameConversation("Book a Call"); + } + prePostMessagePromise + .then(() => { + // add the actual message + return this.__postMessage(content); + }) + .then(() => { + setTimeout(() => this.addSystemMessage("followUp"), 1000); + }); + }); + } + }); + }, + + _applyConversation: function(conversation) { + this.base(arguments, conversation); + + this.__populateShareProjectCheckbox(); + }, + + __postMessage: function(content) { + const conversationId = this.getConversation().getConversationId(); + return osparc.store.ConversationsSupport.getInstance().postMessage(conversationId, content); + }, + + __populateShareProjectCheckbox: function() { + const conversation = this.getConversation(); + + const shareProjectCB = this.getChildControl("share-project-checkbox"); + const shareProjectLayout = this.getChildControl("share-project-layout"); + const currentStudy = osparc.store.Store.getInstance().getCurrentStudy(); + let showCB = false; + let enabledCB = false; + if (conversation === null && currentStudy) { + // initiating conversation + showCB = true; + enabledCB = true; + } else if (conversation) { + // it was already set + showCB = conversation.getContextProjectId(); + enabledCB = conversation.amIOwner(); + } + shareProjectLayout.set({ + visibility: showCB ? "visible" : "excluded", + enabled: enabledCB, + }); + + if (conversation && conversation.getContextProjectId()) { + const projectId = conversation.getContextProjectId(); + osparc.store.Study.getInstance().getOne(projectId) + .then(studyData => { + let isAlreadyShared = false; + const accessRights = studyData["accessRights"]; + const supportGroupId = osparc.store.Groups.getInstance().getSupportGroup().getGroupId(); + if (supportGroupId && supportGroupId in accessRights) { + isAlreadyShared = true; + } else { + isAlreadyShared = false; + } + shareProjectCB.setValue(isAlreadyShared); + shareProjectCB.removeListener("changeValue", this.__shareProjectWithSupport, this); + if (showCB) { + shareProjectCB.addListener("changeValue", this.__shareProjectWithSupport, this); + } + }); + } + }, + + __shareProjectWithSupport: function(e) { + const share = e.getData(); + const supportGroupId = osparc.store.Groups.getInstance().getSupportGroup().getGroupId(); + const projectId = this.getConversation().getContextProjectId(); + osparc.store.Study.getInstance().getOne(projectId) + .then(studyData => { + if (share) { + const newCollaborators = { + [supportGroupId]: osparc.data.Roles.STUDY["write"].accessRights + }; + osparc.store.Study.getInstance().addCollaborators(studyData, newCollaborators) + } else { + osparc.store.Study.getInstance().removeCollaborator(studyData, supportGroupId); + } + }); + }, + + addSystemMessage: function(type) { + type = type || osparc.support.Conversation.SYSTEM_MESSAGE_TYPE.ASK_A_QUESTION; + + const now = new Date(); + const systemMessage = { + "conversationId": null, + "created": now.toISOString(), + "messageId": `system-${now.getTime()}`, + "modified": now.toISOString(), + "type": "MESSAGE", + "userGroupId": "system", + }; + let msg = null; + const greet = "Hi " + osparc.auth.Data.getInstance().getUserName() + ",\n"; + switch (type) { + case osparc.support.Conversation.SYSTEM_MESSAGE_TYPE.ASK_A_QUESTION: + msg = greet + "Have a question or feedback?\nWe are happy to assist!"; + break; + case osparc.support.Conversation.SYSTEM_MESSAGE_TYPE.BOOK_A_CALL: + msg = greet + "Let us know what your availability is and we will get back to you shortly to schedule a meeting."; + break; + case osparc.support.Conversation.SYSTEM_MESSAGE_TYPE.ESCALATE_TO_SUPPORT: + msg = greet + "Our support team will take it from here — please confirm or edit your question below to get started."; + break; + case osparc.support.Conversation.SYSTEM_MESSAGE_TYPE.FOLLOW_UP: + msg = "A support ticket has been created.\nOur team will review your request and contact you soon."; + break; + } + if (msg) { + systemMessage["content"] = msg; + systemMessage["systemMessageType"] = type; + this.addMessage(systemMessage); + } + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/support/ConversationListItem.js b/services/static-webserver/client/source/class/osparc/support/ConversationListItem.js new file mode 100644 index 000000000000..ab88649bfad7 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/support/ConversationListItem.js @@ -0,0 +1,73 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.support.ConversationListItem", { + extend: osparc.ui.list.ListItem, + + construct: function() { + this.base(arguments); + + const layout = this._getLayout(); + layout.setSpacingX(10); + layout.setSpacingY(0); + + // decorate + this.getChildControl("thumbnail").setDecorator("circled"); + this.getChildControl("subtitle").set({ + textColor: "text-disabled", + }); + }, + + properties: { + conversation: { + check: "osparc.data.model.Conversation", + init: null, + nullable: false, + event: "changeConversation", + apply: "__applyConversation", + }, + }, + + members: { + __applyConversation: function(conversation) { + conversation.bind("nameAlias", this, "title"); + + this.__populateWithLastMessage(); + conversation.addListener("changeLastMessage", this.__populateWithLastMessage, this); + }, + + __populateWithLastMessage: function() { + const lastMessage = this.getConversation().getLastMessage(); + if (lastMessage) { + const date = osparc.utils.Utils.formatDateAndTime(new Date(lastMessage.created)); + this.set({ + subtitle: date, + }); + const userGroupId = lastMessage.userGroupId; + osparc.store.Users.getInstance().getUser(userGroupId) + .then(user => { + if (user) { + this.set({ + thumbnail: user.getThumbnail(), + subtitle: user.getLabel() + " - " + date, + }); + } + }); + } + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/support/ConversationPage.js b/services/static-webserver/client/source/class/osparc/support/ConversationPage.js new file mode 100644 index 000000000000..d7c1b87036c6 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/support/ConversationPage.js @@ -0,0 +1,330 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + + +qx.Class.define("osparc.support.ConversationPage", { + extend: qx.ui.core.Widget, + + construct: function() { + this.base(arguments); + + this.__messages = []; + + this._setLayout(new qx.ui.layout.VBox(5)); + + this.getChildControl("back-button"); + + const conversation = this.getChildControl("conversation-content"); + this.bind("conversation", conversation, "conversation"); + conversation.bind("conversation", this, "conversation"); + }, + + properties: { + conversation: { + check: "osparc.data.model.Conversation", + init: null, + nullable: true, + event: "changeConversation", + apply: "__applyConversation", + }, + }, + + events: { + "showConversations": "qx.event.type.Event", + }, + + members: { + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "conversation-header-layout": { + const headerLayout = new qx.ui.layout.HBox(5).set({ + alignY: "middle", + }) + control = new qx.ui.container.Composite(headerLayout).set({ + padding: 5, + }); + this._add(control); + break; + } + case "back-button": + control = new qx.ui.form.Button().set({ + toolTipText: this.tr("Return to Messages"), + icon: "@FontAwesome5Solid/arrow-left/16", + backgroundColor: "transparent" + }); + control.addListener("execute", () => this.fireEvent("showConversations")); + this.getChildControl("conversation-header-layout").addAt(control, 0); + break; + case "conversation-header-center-layout": + control = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)); + this.getChildControl("conversation-header-layout").addAt(control, 1, { + flex: 1, + }); + break; + case "conversation-title": + control = new qx.ui.basic.Label().set({ + font: "text-14", + alignY: "middle", + allowGrowX: true, + }); + this.getChildControl("conversation-header-center-layout").addAt(control, 0); + break; + case "conversation-extra-layout": + control = new qx.ui.container.Composite(new qx.ui.layout.VBox(2)); + this.getChildControl("conversation-header-center-layout").addAt(control, 1); + break; + case "buttons-layout": + control = new qx.ui.container.Composite(new qx.ui.layout.HBox(5).set({ + alignY: "middle", + })).set({ + maxHeight: 24, + }); + this.getChildControl("conversation-header-layout").addAt(control, 2); + break; + case "rename-conversation-button": { + control = new qx.ui.form.Button().set({ + icon: "@FontAwesome5Solid/i-cursor/12", + toolTipText: this.tr("Rename"), + alignX: "center", + alignY: "middle", + }); + control.addListener("execute", () => this.__renameConversation()); + this.getChildControl("buttons-layout").addAt(control, 0); + break; + } + case "open-project-button": + control = new qx.ui.form.Button().set({ + icon: "@FontAwesome5Solid/external-link-alt/12", + alignX: "center", + alignY: "middle", + }); + control.addListener("execute", () => this.__openProjectDetails()); + this.getChildControl("buttons-layout").addAt(control, 1); + break; + case "copy-ticket-id-button": { + control = new qx.ui.form.Button().set({ + icon: "@FontAwesome5Solid/copy/12", + toolTipText: this.tr("Copy Ticket ID"), + alignX: "center", + alignY: "middle", + }); + control.addListener("execute", () => this.__copyTicketId()); + this.getChildControl("buttons-layout").addAt(control, 2); + break; + } + case "open-ticket-link-button": { + control = new qx.ui.form.Button().set({ + icon: "@FontAwesome5Solid/link/12", + toolTipText: this.tr("Open Ticket"), + alignX: "center", + alignY: "middle", + }); + this.getChildControl("buttons-layout").addAt(control, 3); + break; + } + case "set-appointment-button": { + control = new qx.ui.form.Button().set({ + icon: "@FontAwesome5Solid/clock/12", + toolTipText: this.tr("Set Appointment"), + alignX: "center", + alignY: "middle", + }); + control.addListener("execute", () => this.__openAppointmentDetails()); + this.getChildControl("buttons-layout").addAt(control, 4); + break; + } + case "main-stack": + control = new qx.ui.container.Stack(); + this._add(control, { + flex: 1 + }); + break; + case "conversation-container": + control = new qx.ui.container.Scroll(); + this.getChildControl("main-stack").add(control); + break; + case "conversation-content": + control = new osparc.support.Conversation(); + this.getChildControl("conversation-container").add(control); + break; + case "book-a-call-iframe": + control = new osparc.wrapper.BookACallIframe(); + this.getChildControl("main-stack").add(control); + break; + } + return control || this.base(arguments, id); + }, + + proposeConversation: function(type, prefillText) { + type = type || osparc.support.Conversation.SYSTEM_MESSAGE_TYPE.ASK_A_QUESTION; + this.setConversation(null); + + const title = this.getChildControl("conversation-title"); + const conversationContent = this.getChildControl("conversation-content"); + conversationContent.clearAllMessages(); + const conversationContainer = this.getChildControl("conversation-container"); + this.getChildControl("main-stack").setSelection([conversationContainer]); + switch (type) { + case osparc.support.Conversation.SYSTEM_MESSAGE_TYPE.ASK_A_QUESTION: + title.setValue(this.tr("Ask a Question")); + break; + case osparc.support.Conversation.SYSTEM_MESSAGE_TYPE.BOOK_A_CALL: + title.setValue(this.tr("Book a Call")); + break; + case osparc.support.Conversation.SYSTEM_MESSAGE_TYPE.BOOK_A_CALL_3RD: { + title.setValue(this.tr("Book a Call 3rd")); + const bookACallIframe = this.getChildControl("book-a-call-iframe"); + this.getChildControl("main-stack").setSelection([bookACallIframe]); + break; + } + case osparc.support.Conversation.SYSTEM_MESSAGE_TYPE.ESCALATE_TO_SUPPORT: + title.setValue(this.tr("Ask a Question")); + break; + case osparc.support.Conversation.SYSTEM_MESSAGE_TYPE.REPORT_OEC: + title.setValue(this.tr("Report an Error")); + break; + } + conversationContent.addSystemMessage(type); + + if (prefillText) { + this.getChildControl("conversation-content").getChildControl("add-message").getChildControl("comment-field").setText(prefillText); + } + }, + + __applyConversation: function(conversation) { + const title = this.getChildControl("conversation-title"); + if (conversation) { + conversation.bind("nameAlias", title, "value"); + } + + const extraContextLayout = this.getChildControl("conversation-extra-layout"); + extraContextLayout.removeAll(); + if (conversation) { + const amISupporter = osparc.store.Groups.getInstance().amIASupportUser(); + + const createExtraContextLabel = text => { + return new qx.ui.basic.Label(text).set({ + font: "text-12", + textColor: "text-disabled", + allowGrowX: true, + selectable: true, + }); + }; + const updateExtraContext = () => { + extraContextLayout.removeAll(); + const extraContext = conversation.getExtraContext(); + if (extraContext && Object.keys(extraContext).length) { + const ticketIdLabel = createExtraContextLabel(`Ticket ID: ${osparc.utils.Utils.uuidToShort(conversation.getConversationId())}`); + extraContextLayout.add(ticketIdLabel); + if (amISupporter) { + const fogbugzLink = conversation.getFogbugzLink(); + if (fogbugzLink) { + const text = "Fogbugz Case: " + fogbugzLink.split("/").pop(); + const fogbugzLabel = new osparc.ui.basic.LinkLabel(text, fogbugzLink).set({ + font: "link-label-12", + textColor: "text-disabled", + allowGrowX: true, + }); + extraContextLayout.add(fogbugzLabel); + } + const contextProjectId = conversation.getContextProjectId(); + if (contextProjectId) { + const projectIdLabel = createExtraContextLabel(`Project ID: ${osparc.utils.Utils.uuidToShort(contextProjectId)}`); + extraContextLayout.add(projectIdLabel); + } + + } + } + }; + updateExtraContext(); + conversation.addListener("changeExtraContext", () => updateExtraContext(), this); + } + + this.getChildControl("buttons-layout").setVisibility(conversation ? "visible" : "excluded"); + + this.getChildControl("rename-conversation-button"); + const openProjectButton = this.getChildControl("open-project-button"); + openProjectButton.setVisibility(conversation && conversation.getContextProjectId() ? "visible" : "excluded"); + this.getChildControl("copy-ticket-id-button"); + }, + + __openProjectDetails: function() { + const projectId = this.getConversation().getContextProjectId(); + if (projectId) { + osparc.store.Study.getInstance().getOne(projectId) + .then(studyData => { + if (studyData) { + const studyDataCopy = osparc.data.model.Study.deepCloneStudyObject(studyData); + studyDataCopy["resourceType"] = "study"; + osparc.dashboard.ResourceDetails.popUpInWindow(studyDataCopy); + } + }) + .catch(err => console.warn(err)); + } + }, + + __copyTicketId: function() { + if (this.getConversation()) { + const conversationId = this.getConversation().getConversationId(); + osparc.utils.Utils.copyTextToClipboard(conversationId); + } + }, + + __openAppointmentDetails: function() { + const win = new osparc.widget.DateTimeChooser(); + win.addListener("dateChanged", e => { + const newValue = e.getData()["newValue"]; + this.getConversation().setAppointment(newValue) + .catch(err => console.error(err)); + win.close(); + }, this); + win.open(); + }, + + __renameConversation: function() { + let oldName = this.getConversation().getName(); + if (oldName === "null") { + oldName = ""; + } + const renamer = new osparc.widget.Renamer(oldName).set({ + maxChars: osparc.data.model.Conversation.MAX_TITLE_LENGTH, + }); + renamer.addListener("labelChanged", e => { + renamer.close(); + const newLabel = e.getData()["newLabel"]; + this.getConversation().renameConversation(newLabel); + }, this); + renamer.center(); + renamer.open(); + }, + + __getAddMessageField: function() { + return this.getChildControl("conversation-content") && + this.getChildControl("conversation-content").getChildControl("add-message"); + }, + + postMessage: function(message) { + const addMessage = this.__getAddMessageField(); + if (addMessage && addMessage.getChildControl("comment-field")) { + addMessage.getChildControl("comment-field").setText(message); + return addMessage.addComment(); + } + return Promise.reject(); + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/support/Conversations.js b/services/static-webserver/client/source/class/osparc/support/Conversations.js new file mode 100644 index 000000000000..95dc2ff46366 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/support/Conversations.js @@ -0,0 +1,103 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + + +qx.Class.define("osparc.support.Conversations", { + extend: qx.ui.core.Widget, + + construct: function() { + this.base(arguments); + + this._setLayout(new qx.ui.layout.VBox(10)); + this.__conversationListItems = []; + + this.__fetchConversations(); + + this.__listenToNewConversations(); + }, + + events: { + "openConversation": "qx.event.type.Data", + }, + + members: { + __conversationListItems: null, + + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "loading-button": + control = new osparc.ui.form.FetchButton(); + this._add(control); + break; + case "conversations-layout": + control = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)); + this._add(control, { + flex: 1 + }); + break; + } + + return control || this.base(arguments, id); + }, + + __getConversationItem: function(conversationId) { + return this.__conversationListItems.find(conversation => conversation.getConversation().getConversationId() === conversationId); + }, + + __fetchConversations: function() { + const loadMoreButton = this.getChildControl("loading-button"); + loadMoreButton.setFetching(true); + + osparc.store.ConversationsSupport.getInstance().fetchConversations() + .then(conversations => { + if (conversations.length) { + conversations.forEach(conversation => this.__addConversation(conversation)); + } + }) + .finally(() => { + loadMoreButton.setFetching(false); + loadMoreButton.exclude(); + }); + }, + + __listenToNewConversations: function() { + osparc.store.ConversationsSupport.getInstance().addListener("conversationCreated", e => { + const conversation = e.getData(); + this.__addConversation(conversation); + }); + }, + + __addConversation: function(conversation) { + // ignore it if it was already there + const conversationId = conversation.getConversationId(); + const conversationItemFound = this.__getConversationItem(conversationId); + if (conversationItemFound) { + return null; + } + + const conversationListItem = new osparc.support.ConversationListItem(); + conversationListItem.setConversation(conversation); + conversationListItem.addListener("tap", () => this.fireDataEvent("openConversation", conversationId, this)); + const conversationsLayout = this.getChildControl("conversations-layout"); + conversationsLayout.add(conversationListItem); + this.__conversationListItems.push(conversationListItem); + + return conversationListItem; + }, + }, +}); diff --git a/services/static-webserver/client/source/class/osparc/support/ConversationsPage.js b/services/static-webserver/client/source/class/osparc/support/ConversationsPage.js new file mode 100644 index 000000000000..bbb45dd159b7 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/support/ConversationsPage.js @@ -0,0 +1,94 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + + +qx.Class.define("osparc.support.ConversationsPage", { + extend: qx.ui.core.Widget, + + construct: function() { + this.base(arguments); + + this._setLayout(new qx.ui.layout.VBox(15)); + + this.getChildControl("conversations-list"); + this.getChildControl("ask-a-question-button"); + this.getChildControl("book-a-call-button"); + if (osparc.utils.Utils.isDevelopmentPlatform()) { + this.getChildControl("book-a-call-button-3rd"); + } + }, + + events: { + "openConversation": "qx.event.type.Data", + "createConversation": "qx.event.type.Data", + }, + + members: { + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "conversations-list": { + control = new osparc.support.Conversations(); + control.addListener("openConversation", e => { + const conversationId = e.getData(); + this.fireDataEvent("openConversation", conversationId); + }, this); + const scroll = new qx.ui.container.Scroll(); + scroll.add(control); + this._add(scroll, { + flex: 1, + }); + break; + } + case "buttons-layout": + control = new qx.ui.container.Composite(new qx.ui.layout.HBox(15).set({ + alignX: "center", + })); + this._add(control); + break; + case "ask-a-question-button": + control = new qx.ui.form.Button(this.tr("Ask a Question"), "@FontAwesome5Solid/comments/14").set({ + appearance: "strong-button", + allowGrowX: false, + center: true, + }); + control.addListener("execute", () => this.fireDataEvent("createConversation", osparc.support.Conversation.SYSTEM_MESSAGE_TYPE.ASK_A_QUESTION), this); + this.getChildControl("buttons-layout").add(control); + break; + case "book-a-call-button": + control = new qx.ui.form.Button(this.tr("Book a Call"), "@FontAwesome5Solid/phone/14").set({ + appearance: "strong-button", + allowGrowX: false, + center: true, + }); + control.addListener("execute", () => this.fireDataEvent("createConversation", osparc.support.Conversation.SYSTEM_MESSAGE_TYPE.BOOK_A_CALL), this); + this.getChildControl("buttons-layout").add(control); + break; + case "book-a-call-button-3rd": + control = new qx.ui.form.Button(this.tr("Book a Call"), "@FontAwesome5Solid/flask/14").set({ + appearance: "strong-button", + allowGrowX: false, + center: true, + }); + control.addListener("execute", () => this.fireDataEvent("createConversation", osparc.support.Conversation.SYSTEM_MESSAGE_TYPE.BOOK_A_CALL_3RD), this); + this.getChildControl("buttons-layout").add(control); + break; + } + return control || this.base(arguments, id); + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/support/HomePage.js b/services/static-webserver/client/source/class/osparc/support/HomePage.js new file mode 100644 index 000000000000..ac65bf740c43 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/support/HomePage.js @@ -0,0 +1,176 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + + +qx.Class.define("osparc.support.HomePage", { + extend: qx.ui.core.Widget, + + construct: function() { + this.base(arguments); + + this._setLayout(new qx.ui.layout.VBox(20)); + + this.set({ + padding: 5, + }); + + if (osparc.store.Groups.getInstance().isSupportEnabled()) { + this.getChildControl("ask-a-question-button"); + this.getChildControl("book-a-call-button"); + if (osparc.utils.Utils.isDevelopmentPlatform()) { + this.getChildControl("book-a-call-button-3rd"); + } + } + this.__populateButtons(); + }, + + events: { + "createConversation": "qx.event.type.Data", + }, + + statics: { + decorateButton: function(button) { + button.set({ + appearance: "help-list-button", + icon: null, + gap: 8, + paddingLeft: 12, + paddingRight: 12, + }); + button.getChildControl("label").set({ + rich: true + }); + }, + + addExternalLinkIcon: function(button) { + const icon = new qx.ui.basic.Image("@FontAwesome5Solid/external-link-alt/14").set({ + alignY: "middle", + marginLeft: 5 + }); + button._add(icon); + }, + }, + + members: { + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "conversation-buttons-layout": { + control = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)).set({ + // align it with the rest of the buttons in section boxes + marginLeft: 11, + marginRight: 11, + }); + this._add(control); + break; + } + case "ask-a-question-button": + control = new qx.ui.form.Button(this.tr("Ask a Question"), "@FontAwesome5Solid/comments/16").set({ + gap: 8, + appearance: "strong-button", + center: true, + width: 183, + }); + control.addListener("execute", () => this.fireDataEvent("createConversation", osparc.support.Conversation.SYSTEM_MESSAGE_TYPE.ASK_A_QUESTION)); + this.getChildControl("conversation-buttons-layout").add(control, { flex: 1 }); + break; + case "book-a-call-button": + control = new qx.ui.form.Button(this.tr("Book a Call"), "@FontAwesome5Solid/phone/16").set({ + gap: 8, + appearance: "strong-button", + center: true, + width: 183, + }); + control.addListener("execute", () => this.fireDataEvent("createConversation", osparc.support.Conversation.SYSTEM_MESSAGE_TYPE.BOOK_A_CALL)); + this.getChildControl("conversation-buttons-layout").add(control, { flex: 1 }); + break; + case "book-a-call-button-3rd": + control = new qx.ui.form.Button(this.tr("Book a Call"), "@FontAwesome5Solid/flask/16").set({ + gap: 8, + appearance: "strong-button", + center: true, + width: 183, + }); + control.addListener("execute", () => this.fireDataEvent("createConversation", osparc.support.Conversation.SYSTEM_MESSAGE_TYPE.BOOK_A_CALL_3RD)); + this.getChildControl("conversation-buttons-layout").add(control, { flex: 1 }); + break; + case "learning-box": + control = new osparc.widget.SectionBox(this.tr("Learning"), "@FontAwesome5Solid/graduation-cap/14"); + control.getChildControl("legend").set({ + gap: 8 + }); + this._add(control); + break; + case "references-box": + control = new osparc.widget.SectionBox(this.tr("References"), "@FontAwesome5Solid/book/14"); + control.getChildControl("legend").set({ + gap: 8 + }); + this._add(control); + break; + } + return control || this.base(arguments, id); + }, + + __populateButtons: function() { + const learningBox = this.getChildControl("learning-box"); + const quickStartButton = osparc.store.Support.getQuickStartButton(); + if (quickStartButton) { + learningBox.add(quickStartButton); + this.self().decorateButton(quickStartButton); + } + + const permissions = osparc.data.Permissions.getInstance(); + if (permissions.canDo("dashboard.templates.read")) { + const tutorialsBtn = new qx.ui.form.Button(this.tr("Explore Tutorials"), "@FontAwesome5Solid/graduation-cap/14"); + tutorialsBtn.addListener("execute", () => qx.event.message.Bus.getInstance().dispatchByName("showTab", "tutorialsTab"), this); + learningBox.add(tutorialsBtn); + this.self().decorateButton(tutorialsBtn); + } + + const guidedToursButton = osparc.store.Support.getGuidedToursButton(); + learningBox.add(guidedToursButton); + this.self().decorateButton(guidedToursButton); + + const referencesBox = this.getChildControl("references-box"); + const manualButtons = osparc.store.Support.getManualButtons(); + manualButtons.forEach(manualButton => { + referencesBox.add(manualButton); + this.self().decorateButton(manualButton); + this.self().addExternalLinkIcon(manualButton); + }); + + const supportButtons = osparc.store.Support.getSupportButtons(); + supportButtons.forEach(supportButton => { + referencesBox.add(supportButton); + this.self().decorateButton(supportButton); + this.self().addExternalLinkIcon(supportButton); + }); + + const releaseNotesButton = osparc.store.Support.getReleaseNotesButton(); + this._add(releaseNotesButton); + this.self().decorateButton(releaseNotesButton); + this.self().addExternalLinkIcon(releaseNotesButton); + releaseNotesButton.set({ + icon: "@FontAwesome5Solid/bullhorn/14", + // align it with the rest of the buttons in section boxes + marginLeft: 11, + marginRight: 11, + }); + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/support/SuggestedQuestion.js b/services/static-webserver/client/source/class/osparc/support/SuggestedQuestion.js new file mode 100644 index 000000000000..0ae7b8dbd526 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/support/SuggestedQuestion.js @@ -0,0 +1,84 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + + +qx.Class.define("osparc.support.SuggestedQuestion", { + extend: qx.ui.core.Widget, + + construct: function() { + this.base(arguments); + + const layout = new qx.ui.layout.Grid(12, 4); + layout.setColumnFlex(1, 1); // content + this._setLayout(layout); + this.setPadding(5); + }, + + events: { + "questionAnswered": "qx.event.type.Data", + }, + + members: { + __addProductThumbnail: function() { + const thumbnail = osparc.utils.Utils.createThumbnail(32).set({ + source: osparc.product.Utils.getIconUrl(), + }); + this._add(thumbnail, { + row: 0, + column: 0, + }); + }, + + __addQuestionLabel: function(text) { + const question = new qx.ui.basic.Label(text); + this._add(question, { + row: 0, + column: 1, + }); + }, + + __addAnswers: function(answers) { + const answersContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox(5)); + answers.forEach(answer => { + const button = new qx.ui.form.Button(answer.label).set({ + appearance: "strong-button", + allowGrowX: false, + }); + button.addListener("execute", () => this.fireDataEvent("questionAnswered", answer.key)); + answersContainer.add(button); + }); + this._add(answersContainer, { + row: 1, + column: 1, + }); + }, + + isProjectRelated: function(answers) { + this._removeAll(); + this.__addProductThumbnail(); + this.__addQuestionLabel(this.tr("Is your question related to the current project?")); + this.__addAnswers(answers); + }, + + shareProject: function() { + this._removeAll(); + this.__addProductThumbnail(); + this.__addQuestionLabel(this.tr("Do you want to share this project with Support?")); + this.__addAnswers(answers); + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/support/SupportCenter.js b/services/static-webserver/client/source/class/osparc/support/SupportCenter.js new file mode 100644 index 000000000000..c13997267df9 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/support/SupportCenter.js @@ -0,0 +1,205 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.support.SupportCenter", { + extend: osparc.ui.window.SingletonWindow, + + construct: function() { + this.base(arguments, "support-center"); + + this.getChildControl("title").set({ + textAlign: "center", + }); + + this.set({ + layout: new qx.ui.layout.VBox(10), + width: osparc.support.SupportCenter.WINDOW_WIDTH, + height: osparc.support.SupportCenter.getMaxHeight(), + modal: false, + showMaximize: false, + showMinimize: false, + showClose: true, + }); + + this.getLayout().set({ + separator: "separator-vertical" + }); + + this.getChildControl("home-page"); + if (osparc.store.Groups.getInstance().isSupportEnabled()) { + this.getChildControl("conversations-page"); + this.getChildControl("conversation-page"); + this.getChildControl("home-button"); + this.getChildControl("conversations-button"); + } + + this.__selectHomeStackPage(); + }, + + statics: { + WINDOW_WIDTH: 430, + WINDOW_HEIGHT: 700, + REQUEST_CALL_MESSAGE: "Dear Support,\nI would like to make an appointment for a support call.", + + getMaxHeight: function() { + // height: max 80% of screen, or WINDOW_HEIGHTpx + const clientHeight = document.documentElement.clientHeight; + return Math.min(osparc.support.SupportCenter.WINDOW_HEIGHT, parseInt(clientHeight * 0.8)); + }, + + openWindow: function(stackPage) { + const supportCenterWindow = new osparc.support.SupportCenter(); + + if (stackPage === "conversations") { + supportCenterWindow.showConversations(); + } + + const positionWindow = () => { + supportCenterWindow.set({ + height: osparc.support.SupportCenter.getMaxHeight(), + }); + // bottom right + const clientWidth = document.documentElement.clientWidth; + const clientHeight = document.documentElement.clientHeight; + const posX = clientWidth - osparc.support.SupportCenter.WINDOW_WIDTH - 4; + const posY = clientHeight - supportCenterWindow.getHeight() - 4; + supportCenterWindow.moveTo(posX, posY); + }; + supportCenterWindow.open(); + positionWindow(); + window.addEventListener("resize", positionWindow); + + return supportCenterWindow; + } + }, + + members: { + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "main-stack": + control = new qx.ui.container.Stack(); + this.add(control, { + flex: 1 + }); + break; + case "buttons-layout": + control = new qx.ui.container.Composite(new qx.ui.layout.HBox().set({ + alignX: "center", + })).set({ + visibility: osparc.store.Groups.getInstance().isSupportEnabled() ? "visible" : "excluded", + }); + this.add(control); + break; + case "home-button": + control = new qx.ui.form.Button().set({ + label: this.tr("Help & Support"), + icon: "@FontAwesome5Solid/question-circle/18", + backgroundColor: "transparent", + iconPosition: "top", + allowGrowX: true, + center: true, + }); + control.addListener("execute", () => this.__selectHomeStackPage(), this); + this.getChildControl("buttons-layout").add(control, { flex: 1 }); + break; + case "conversations-button": + control = new qx.ui.form.Button().set({ + label: this.tr("Conversations"), + icon: "@FontAwesome5Solid/comments/18", + backgroundColor: "transparent", + iconPosition: "top", + allowGrowX: true, + center: true, + }); + control.addListener("execute", () => this.showConversations(), this); + this.getChildControl("buttons-layout").add(control, { flex: 1 }); + break; + case "home-page": + control = new osparc.support.HomePage(); + control.addListener("createConversation", e => this.createConversation(e.getData()), this); + this.getChildControl("main-stack").add(control); + break; + case "conversations-stack": + control = new qx.ui.container.Stack(); + this.getChildControl("main-stack").add(control); + break; + case "conversations-page": + control = new osparc.support.ConversationsPage(); + control.addListener("openConversation", e => this.openConversation(e.getData()), this); + control.addListener("createConversation", e => this.createConversation(e.getData()), this); + this.getChildControl("conversations-stack").add(control); + break; + case "conversation-page": + control = new osparc.support.ConversationPage(); + control.addListener("showConversations", () => this.showConversations(), this); + this.getChildControl("conversations-stack").add(control); + break; + } + return control || this.base(arguments, id); + }, + + __selectHomeStackPage: function() { + this.setCaption(this.tr("Help & Support")); + this.getChildControl("main-stack").setSelection([this.getChildControl("home-page")]); + this.getChildControl("home-button").getChildControl("icon").set({ + textColor: "strong-main", + }); + this.getChildControl("conversations-button").getChildControl("icon").set({ + textColor: "text", + }); + }, + + __selectConversationsStackPage: function() { + this.setCaption(this.tr("Conversations")); + this.getChildControl("main-stack").setSelection([this.getChildControl("conversations-stack")]); + this.getChildControl("home-button").getChildControl("icon").set({ + textColor: "text", + }); + this.getChildControl("conversations-button").getChildControl("icon").set({ + textColor: "strong-main", + }); + }, + + showConversations: function() { + this.__selectConversationsStackPage(); + this.getChildControl("conversations-stack").setSelection([this.getChildControl("conversations-page")]); + }, + + __showConversation: function() { + this.__selectConversationsStackPage(); + this.getChildControl("conversations-stack").setSelection([this.getChildControl("conversation-page")]); + }, + + openConversation: function(conversationId) { + const conversationPage = this.getChildControl("conversation-page"); + if (conversationId) { + osparc.store.ConversationsSupport.getInstance().getConversation(conversationId) + .then(conversation => { + conversationPage.setConversation(conversation); + this.__showConversation(); + }); + } + }, + + createConversation: function(type, prefillText) { + const conversationPage = this.getChildControl("conversation-page"); + conversationPage.proposeConversation(type, prefillText); + this.__showConversation(); + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/task/TasksButton.js b/services/static-webserver/client/source/class/osparc/task/TasksButton.js index 3631cabf1a27..64393192e9d7 100644 --- a/services/static-webserver/client/source/class/osparc/task/TasksButton.js +++ b/services/static-webserver/client/source/class/osparc/task/TasksButton.js @@ -24,10 +24,6 @@ qx.Class.define("osparc.task.TasksButton", { this._setLayout(new qx.ui.layout.Canvas()); this.set({ - width: 30, - alignX: "center", - cursor: "pointer", - visibility: "excluded", toolTipText: this.tr("Tasks"), }); @@ -56,7 +52,7 @@ qx.Class.define("osparc.task.TasksButton", { } case "number": control = new qx.ui.basic.Label().set({ - backgroundColor: "background-main-1", + backgroundColor: osparc.navigation.NavigationBar.BG_COLOR, paddingLeft: 4, font: "text-12" }); @@ -95,16 +91,7 @@ qx.Class.define("osparc.task.TasksButton", { document.removeEventListener("mousedown", tapListener); }; - const bounds = this.getBounds(); - const cel = this.getContentElement(); - if (cel) { - const domeEle = cel.getDomElement(); - if (domeEle) { - const rect = domeEle.getBoundingClientRect(); - bounds.left = parseInt(rect.x); - bounds.top = parseInt(rect.y); - } - } + const bounds = osparc.utils.Utils.getBounds(this); const tasks = osparc.task.TasksContainer.getInstance(); tasks.setTasksContainerPosition( bounds.left + bounds.width - osparc.task.TaskUI.MAX_WIDTH, diff --git a/services/static-webserver/client/source/class/osparc/tester/Statics.js b/services/static-webserver/client/source/class/osparc/tester/Statics.js index b6655075e9f1..3dacfdbd5fa0 100644 --- a/services/static-webserver/client/source/class/osparc/tester/Statics.js +++ b/services/static-webserver/client/source/class/osparc/tester/Statics.js @@ -23,7 +23,7 @@ qx.Class.define("osparc.tester.Statics", { let control; switch (id) { case "statics-container": - control = osparc.ui.window.TabbedView.createSectionBox(this.tr("Statics")); + control = new osparc.widget.SectionBox(this.tr("Statics")); this._add(control, { flex: 1 }); @@ -44,7 +44,7 @@ qx.Class.define("osparc.tester.Statics", { break; } case "local-storage-container": - control = osparc.ui.window.TabbedView.createSectionBox(this.tr("Local Storage")); + control = new osparc.widget.SectionBox(this.tr("Local Storage")); this._add(control); break; case "local-storage-content": { diff --git a/services/static-webserver/client/source/class/osparc/tester/WebSocketMessages.js b/services/static-webserver/client/source/class/osparc/tester/WebSocketMessages.js index 0872301459f0..dd07c2f4373d 100644 --- a/services/static-webserver/client/source/class/osparc/tester/WebSocketMessages.js +++ b/services/static-webserver/client/source/class/osparc/tester/WebSocketMessages.js @@ -78,7 +78,7 @@ qx.Class.define("osparc.tester.WebSocketMessages", { }); break; } - case "json-viewer": + case "json-tree-widget": control = new osparc.ui.basic.JsonTreeWidget(); this._add(control); break; @@ -89,7 +89,7 @@ qx.Class.define("osparc.tester.WebSocketMessages", { _buildLayout: function() { const filterMessage = this.getChildControl("filter-message"); const table = this.getChildControl("messages-table"); - const jsonViewer = this.getChildControl("json-viewer"); + const jsonTreeWidget = this.getChildControl("json-tree-widget"); const model = table.getTableModel(); filterMessage.addListener("changeValue", e => { @@ -101,7 +101,7 @@ qx.Class.define("osparc.tester.WebSocketMessages", { table.addListener("cellTap", e => { const selectedRow = e.getRow(); const rowData = table.getTableModel().getRowData(selectedRow); - jsonViewer.setJson(JSON.parse(rowData[2])); + jsonTreeWidget.setJson(JSON.parse(rowData[2])); }, this); this.__populateTable(); diff --git a/services/static-webserver/client/source/class/osparc/theme/Appearance.js b/services/static-webserver/client/source/class/osparc/theme/Appearance.js index c0a1a3da81ce..e89ba3e03d38 100644 --- a/services/static-webserver/client/source/class/osparc/theme/Appearance.js +++ b/services/static-webserver/client/source/class/osparc/theme/Appearance.js @@ -19,6 +19,15 @@ qx.Theme.define("osparc.theme.Appearance", { extend: osparc.theme.common.Appearance, appearances: { + "iframe-no-border": { + style: () => { + return { + backgroundColor: "transparent", + decorator: "no-border-0" + }; + } + }, + "strong-ui": { style: () => { return { @@ -28,6 +37,26 @@ qx.Theme.define("osparc.theme.Appearance", { } }, + "help-list-button": { + include: "button", + style() { + return { + font: "text-14", + allowGrowX: true, + minHeight: 29, + center: false + }; + } + }, + "help-list-button/label": { + style() { + return { + textAlign: "left", + allowGrowX: true + }; + } + }, + "dragdrop-no-cursor": { style: () => { return { @@ -129,6 +158,10 @@ qx.Theme.define("osparc.theme.Appearance", { } }, + "pb-function": { + include: "pb-template", + }, + "pb-hypertool": { include: "pb-template", }, @@ -236,6 +269,20 @@ qx.Theme.define("osparc.theme.Appearance", { } }, + /* + --------------------------------------------------------------------------- + TABLE + --------------------------------------------------------------------------- + */ + + "table-header-cell": { + style: function() { + return { + font: "text-13", // override the default theme's bold font + } + } + }, + /* --------------------------------------------------------------------------- WINDOW-SMALL-CAP CHOOSER @@ -432,6 +479,21 @@ qx.Theme.define("osparc.theme.Appearance", { }) }, + "selectbox/arrow": { + style: () => ({ + // keep the original source + source: osparc.theme.common.Image.URLS["arrow-down"], + // keep the original paddings + paddingRight: 0, + paddingLeft: 2, + paddingTop: -3, + // ensure the arrow has explicit size + width: 16, + height: 16, + scale: true, + }) + }, + /* --------------------------------------------------------------------------- PROGRESSBAR @@ -850,14 +912,12 @@ qx.Theme.define("osparc.theme.Appearance", { backgroundColor = "default-button-focus-background"; } if (states.selected || states.checked) { - textColor = "default-button-disabled"; + textColor = "white"; cursor = "default"; decorator = "form-button-checked"; - backgroundColor = "default-button-disabled-background"; + backgroundColor = "product-color"; } - decorator; - return { textColor: textColor, cursor: cursor, @@ -1006,8 +1066,8 @@ qx.Theme.define("osparc.theme.Appearance", { include: "form-button", style: state => ({ decorator: state.hovered || state.focused ? "form-button-danger-hover" : "form-button-danger", - backgroundColor: state.hovered || state.focused ? "default-button-hover-background" : "error", - textColor: "black", + backgroundColor: state.hovered || state.focused || state.disabled ? "default-button-hover-background" : "error", + textColor: state.disabled ? "text": "black", }) }, @@ -1192,8 +1252,8 @@ qx.Theme.define("osparc.theme.Appearance", { padding: [5, 10], // showTimeout is themeable so it can be tuned // it was defaulted to 700 which was too short - showTimeout: 2000, - hideTimeout: 6000, + showTimeout: 1400, + hideTimeout: 5000, }) }, diff --git a/services/static-webserver/client/source/class/osparc/theme/ColorDark.js b/services/static-webserver/client/source/class/osparc/theme/ColorDark.js index 47b42050a368..a7fb3673b67b 100644 --- a/services/static-webserver/client/source/class/osparc/theme/ColorDark.js +++ b/services/static-webserver/client/source/class/osparc/theme/ColorDark.js @@ -35,7 +35,7 @@ qx.Theme.define("osparc.theme.ColorDark", { "navigation_bar_background_color": "rgba(1, 18, 26, 0.8)", "fab_text": "contrasted-text-dark", "fab-background": "rgba(255, 255, 255, 0.2)", - "input_background": "#213248", + "input_background": "rgb(33, 50, 72)", "input_background_disable": "rgba(113, 157, 181, 0.25)", "hint-background": "rgba(82, 82, 82, 1)", "transparent_overlay": "rgba(1, 18, 26, 0.1)", @@ -50,6 +50,7 @@ qx.Theme.define("osparc.theme.ColorDark", { "text-darker": "rgba(255, 255, 255, 1)", "contrasted-text-dark": "rgba(216, 216, 216, 1)", "contrasted-text-light": "rgba(255, 255, 255, 1)", + "text-complementary": "rgba(40, 40, 40, 1)", "link": "rgba(10, 182, 255, 1)", // shadows @@ -58,7 +59,7 @@ qx.Theme.define("osparc.theme.ColorDark", { "shadow": qx.core.Environment.get("css.rgba") ? "a-bit-transparent" : "bg-shadow", // window - "window-popup-background": "rgba(66, 66, 66, 1)", + "window-popup-background": "background-main-1", "window-caption-background": "background-main", "window-caption-background-active": "background-main-3", "window-caption-text": "text", @@ -135,8 +136,6 @@ qx.Theme.define("osparc.theme.ColorDark", { // OSPARC - "workbench-edge-comp-active": "#777777", - "workbench-edge-api-active": "#BBBBBB", "workbench-start-hint": "#505050", "workbench-view-navbar": "c00", "workbench-view-splitter": "#000000", diff --git a/services/static-webserver/client/source/class/osparc/theme/ColorLight.js b/services/static-webserver/client/source/class/osparc/theme/ColorLight.js index 629e75ccef5a..e6b8583074ad 100644 --- a/services/static-webserver/client/source/class/osparc/theme/ColorLight.js +++ b/services/static-webserver/client/source/class/osparc/theme/ColorLight.js @@ -34,7 +34,7 @@ qx.Theme.define("osparc.theme.ColorLight", { "navigation_bar_background_color": "rgba(229, 229, 229, 0.8)", "fab_text": "contrasted-text-dark", - "fab-background": "rgba(255, 255, 255, 0.2)", + "fab-background": "rgba(0, 0, 0, 0.2)", "input_background": "rgba(209, 214, 218, 1)", "input_background_disable": "rgba(113, 157, 181, 0.04)", "hint-background": "rgba(201, 201, 201, 1)", @@ -50,6 +50,7 @@ qx.Theme.define("osparc.theme.ColorLight", { "text-darker": "rgba(20, 20, 20, 1)", "contrasted-text-dark": "rgba(20, 20, 20, 1)", "contrasted-text-light": "rgba(40, 40, 40, 1)", + "text-complementary": "rgba(216, 216, 216, 1)", "link": "rgba(10, 182, 255, 1)", // shadows @@ -58,8 +59,7 @@ qx.Theme.define("osparc.theme.ColorLight", { "shadow": qx.core.Environment.get("css.rgba") ? "a-bit-transparent" : "bg-shadow", // window - // OM here - "window-popup-background": "rgba(225, 225, 225, 1)", + "window-popup-background": "background-main-1", "window-caption-background": "background-main", "window-caption-background-active": "background-main-3", "window-caption-text": "text", @@ -136,8 +136,6 @@ qx.Theme.define("osparc.theme.ColorLight", { // OSPARC - "workbench-edge-comp-active": "#888888", - "workbench-edge-api-active": "#444444", "workbench-start-hint": "#AFAFAF", "workbench-view-navbar": "c02", "workbench-view-splitter": "background-main-3", diff --git a/services/static-webserver/client/source/class/osparc/theme/Decoration.js b/services/static-webserver/client/source/class/osparc/theme/Decoration.js index 2a32cae25598..f9474e3dcb06 100644 --- a/services/static-webserver/client/source/class/osparc/theme/Decoration.js +++ b/services/static-webserver/client/source/class/osparc/theme/Decoration.js @@ -26,6 +26,28 @@ qx.Theme.define("osparc.theme.Decoration", { } }, + "circled": { + style: { + radius: 16 + } + }, + + "chat-bubble": { + style: { + radius: 4, + // width: 1, + // color: "text-disabled", + backgroundColor: "background-main-2", + } + }, + + "separator-strong": { + style: { + widthTop: 1, + colorTop: "product-color", + } + }, + "border-simple": { include: "border", style: { @@ -258,9 +280,10 @@ qx.Theme.define("osparc.theme.Decoration", { } }, - "no-border-2": { + "no-border-0": { style: { - width: 0 + width: 0, + color: "transparent" } }, diff --git a/services/static-webserver/client/source/class/osparc/theme/mixin/Color.js b/services/static-webserver/client/source/class/osparc/theme/mixin/Color.js index aaa58363b215..75bc4def7a25 100644 --- a/services/static-webserver/client/source/class/osparc/theme/mixin/Color.js +++ b/services/static-webserver/client/source/class/osparc/theme/mixin/Color.js @@ -51,6 +51,7 @@ qx.Theme.define("osparc.theme.mixin.Color", { "logger-warning-message": "warning-yellow", "logger-error-message": "failed-red", + "workbench-edge": "#787878", "workbench-edge-selected": "busy-orange", diff --git a/services/static-webserver/client/source/class/osparc/ui/basic/AvatarGroup.js b/services/static-webserver/client/source/class/osparc/ui/basic/AvatarGroup.js new file mode 100644 index 000000000000..85cfa1416abf --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/ui/basic/AvatarGroup.js @@ -0,0 +1,204 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + + +qx.Class.define("osparc.ui.basic.AvatarGroup", { + extend: qx.ui.core.Widget, + + construct: function(size = 32, orientation = "right", maxWidth = 150) { + this.base(arguments); + + this.set({ + decorator: null, + padding: 0, + backgroundColor: null, + width: maxWidth, + maxWidth: maxWidth, + allowGrowY: false, + }); + this._setLayout(new qx.ui.layout.Canvas()); + + this.__avatarSize = size; + this.__orientation = orientation; + this.__maxVisible = Math.max(1, Math.floor(maxWidth/size) - 1); // Ensure at least 1 visible avatar + this.__userGroupIds = []; + this.__avatars = []; + + this.__isPointerInside = false; + this.__onGlobalPointerMove = this.__onGlobalPointerMove.bind(this); + document.addEventListener("pointermove", this.__onGlobalPointerMove); + }, + + properties: { + hideMyself: { + check: "Boolean", + init: false, + } + }, + + members: { + __avatarSize: null, + __maxVisible: null, + __userGroupIds: null, + __avatars: null, + __collapseTimeout: null, + __isPointerInside: null, + __onGlobalPointerMove: null, + + setUserGroupIds: function(userGroupIds) { + if ( + userGroupIds.length && + JSON.stringify(userGroupIds) === JSON.stringify(this.__userGroupIds) + ) { + return; + } + this.__userGroupIds = userGroupIds || []; + + if (this.isHideMyself()) { + // remove myself from the list of users + userGroupIds = userGroupIds.filter(gid => gid !== osparc.store.Groups.getInstance().getMyGroupId()); + } + + const usersStore = osparc.store.Users.getInstance(); + const userPromises = userGroupIds.map(userGroupId => usersStore.getUser(userGroupId)); + const users = []; + Promise.all(userPromises) + .then(usersResult => { + usersResult.forEach(user => { + users.push({ + name: user.getUserName(), + avatar: user.getThumbnail(), + }); + }); + this.__buildAvatars(users); + }) + .catch(error => { + console.error("Failed to fetch user data for avatars:", error); + }); + }, + + __buildAvatars(users) { + this._removeAll(); + this.__avatars = []; + + const usersToShow = users.slice(0, this.__maxVisible); + const totalAvatars = [...usersToShow]; + if (users.length > this.__maxVisible) { + totalAvatars.push({ + name: `+${users.length - this.__maxVisible}`, + isExtra: true + }); + } + + totalAvatars.forEach(user => { + let avatar; + + if (user.isExtra) { + avatar = new qx.ui.basic.Label(user.name); + avatar.set({ + width: this.__avatarSize, + height: this.__avatarSize, + textAlign: "center", + backgroundColor: "text", + textColor: "text-complementary", + toolTipText: `${user.name.replace("+", "")} more` + }); + + avatar.getContentElement().setStyles({ + display: "flex", + justifyContent: "center", + alignItems: "center", + fontWeight: "bold", + fontSize: "0.8em" + }); + } else { + avatar = new qx.ui.basic.Image(user.avatar); + avatar.set({ + width: this.__avatarSize, + height: this.__avatarSize, + scale: true, + toolTipText: user.name + }); + } + + const haloColor = qx.theme.manager.Color.getInstance().resolve("text"); + avatar.getContentElement().setStyles({ + borderRadius: "50%", + border: "1px solid " + haloColor, + boxShadow: "0 0 0 1px rgba(0,0,0,0.1)", + transition: "left 0.1s ease, right 0.1s ease", + position: "absolute" + }); + + this.__avatars.push(avatar); + this._add(avatar); + }); + + this.__expand(false); + }, + + __expand: function(expand = true) { + const overlap = Math.floor(this.__avatarSize * (expand ? 0.1 : 0.7)); + this.__avatars.forEach((avatar, index) => { + const shift = index * (this.__avatarSize - overlap); + avatar.setLayoutProperties({ + [this.__orientation]: shift + }); + avatar.setZIndex(index); + }); + }, + + __onGlobalPointerMove(e) { + const domEl = this.getContentElement().getDomElement(); + if (!domEl) { + return; + } + + const rect = domEl.getBoundingClientRect(); + const inside = + e.clientX >= rect.left && + e.clientX <= rect.right && + e.clientY >= rect.top && + e.clientY <= rect.bottom; + + if (inside) { + if (!this.__isPointerInside) { + this.__isPointerInside = true; + if (this.__collapseTimeout) { + clearTimeout(this.__collapseTimeout); + this.__collapseTimeout = null; + } + this.__expand(true); + } + } else { + if (this.__isPointerInside) { + this.__isPointerInside = false; + if (this.__collapseTimeout) { + clearTimeout(this.__collapseTimeout); + } + this.__collapseTimeout = setTimeout(() => { + this.__expand(false); + }, 200); + } + } + } + }, + + destruct: function() { + document.removeEventListener("pointermove", this.__onGlobalPointerMove); + }, +}); diff --git a/services/static-webserver/client/source/class/osparc/ui/basic/Chip.js b/services/static-webserver/client/source/class/osparc/ui/basic/Chip.js index eaa08c3812cb..a15571a02b03 100644 --- a/services/static-webserver/client/source/class/osparc/ui/basic/Chip.js +++ b/services/static-webserver/client/source/class/osparc/ui/basic/Chip.js @@ -10,12 +10,57 @@ qx.Class.define("osparc.ui.basic.Chip", { construct: function(label, icon) { this.base(arguments, label, icon); + + this.set({ + allowGrowX: false, + }); }, properties: { appearance: { init: "chip", refine: true - } - } + }, + + statusColor: { + check: ["success", "warning", "error"], + init: null, + apply: "__applyStatusColor", + }, + }, + + statics: { + STATUS: { + SUCCESS: "success", + WARNING: "warning", + ERROR: "error", + }, + }, + + members: { + __applyStatusColor: function(status) { + if (status) { + switch (status.toLowerCase()) { + case this.self().STATUS.SUCCESS: + this.set({ + textColor: "white", + backgroundColor: "product-color", + }); + break; + case this.self().STATUS.WARNING: + this.set({ + textColor: "black", + backgroundColor: "warning-yellow", + }); + break; + case this.self().STATUS.ERROR: + this.set({ + textColor: "black", + backgroundColor: "failed-red", + }); + break; + } + } + }, + }, }); diff --git a/services/static-webserver/client/source/class/osparc/ui/basic/JsonTreeWidget.js b/services/static-webserver/client/source/class/osparc/ui/basic/JsonTreeWidget.js index 60f5a55e5b16..179b632f22a9 100644 --- a/services/static-webserver/client/source/class/osparc/ui/basic/JsonTreeWidget.js +++ b/services/static-webserver/client/source/class/osparc/ui/basic/JsonTreeWidget.js @@ -48,7 +48,7 @@ qx.Class.define("osparc.ui.basic.JsonTreeWidget", { }, members: { - setJson(data) { + setJson: function(data) { const prettyJson = JSON.stringify(data, null, " ").replace(/\n/ig, "
"); this.setValue(prettyJson); } diff --git a/services/static-webserver/client/source/class/osparc/ui/basic/LogoWPlatform.js b/services/static-webserver/client/source/class/osparc/ui/basic/LogoWPlatform.js index 5670dddf9622..efbad5bfba74 100644 --- a/services/static-webserver/client/source/class/osparc/ui/basic/LogoWPlatform.js +++ b/services/static-webserver/client/source/class/osparc/ui/basic/LogoWPlatform.js @@ -57,7 +57,7 @@ qx.Class.define("osparc.ui.basic.LogoWPlatform", { font: "text-9" }); - let platformName = osparc.store.StaticInfo.getInstance().getPlatformName(); + let platformName = osparc.store.StaticInfo.getPlatformName(); platformName = platformName.toUpperCase(); if (osparc.utils.Utils.isInZ43()) { platformName = "Z43 " + platformName; diff --git a/services/static-webserver/client/source/class/osparc/ui/basic/SVGImage.js b/services/static-webserver/client/source/class/osparc/ui/basic/SVGImage.js index 878413af5e9d..719f132c4e28 100644 --- a/services/static-webserver/client/source/class/osparc/ui/basic/SVGImage.js +++ b/services/static-webserver/client/source/class/osparc/ui/basic/SVGImage.js @@ -68,7 +68,7 @@ qx.Class.define("osparc.ui.basic.SVGImage", { filter = "invert(66%) sepia(24%) saturate(5763%) hue-rotate(188deg) brightness(101%) contrast(101%)"; break; case "text": // light or dark - if (qx.theme.manager.Meta.getInstance().getTheme().basename === "ThemeLight") { + if (osparc.ui.switch.ThemeSwitcher.isLight()) { // ThemeLight #282828 filter = "invert(10%) sepia(4%) saturate(19%) hue-rotate(354deg) brightness(102%) contrast(86%)"; } else { diff --git a/services/static-webserver/client/source/class/osparc/ui/basic/Thumbnail.js b/services/static-webserver/client/source/class/osparc/ui/basic/Thumbnail.js index ff7c6f5d98d2..876abfbdb1de 100644 --- a/services/static-webserver/client/source/class/osparc/ui/basic/Thumbnail.js +++ b/services/static-webserver/client/source/class/osparc/ui/basic/Thumbnail.js @@ -83,7 +83,7 @@ qx.Class.define("osparc.ui.basic.Thumbnail", { __applySource: function(val) { const image = this.getChildControl("image"); if (val) { - if (osparc.utils.Utils.isValidHttpUrl(val)) { + if (!val.startsWith("osparc/") && osparc.utils.Utils.isValidHttpUrl(val)) { osparc.utils.Utils.setUrlSourceToImage(image, val); } else { image.setSource(val); diff --git a/services/static-webserver/client/source/class/osparc/ui/basic/UserThumbnail.js b/services/static-webserver/client/source/class/osparc/ui/basic/UserThumbnail.js new file mode 100644 index 000000000000..b323cb8a867d --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/ui/basic/UserThumbnail.js @@ -0,0 +1,56 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.ui.basic.UserThumbnail", { + extend: qx.ui.basic.Image, + + construct: function(size) { + this.base(arguments); + + this.set(osparc.utils.Utils.getThumbnailProps(size)); + + if (osparc.store.Groups.getInstance().amIASupportUser()) { + this.setCursor("pointer"); + this.addListener("tap", this.__openUserDetails, this); + } + }, + + properties: { + user: { + check: "osparc.data.model.User", + init: null, + nullable: true, + apply: "__applyUser", + } + }, + + members: { + __applyUser: function(user) { + if (user) { + this.setSource(user.getThumbnail()); + } else { + this.setSource(osparc.utils.Avatar.emailToThumbnail()); + } + }, + + __openUserDetails: function() { + if (this.getUser()) { + osparc.user.UserAccountWindow.openWindow(this.getUser().getGroupId()); + } + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/ui/form/DateTimeField.js b/services/static-webserver/client/source/class/osparc/ui/form/DateTimeField.js new file mode 100644 index 000000000000..300a6d212b9e --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/ui/form/DateTimeField.js @@ -0,0 +1,134 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.ui.form.DateTimeField", { + extend: qx.ui.core.Widget, + include: [qx.ui.form.MForm], + implement: [qx.ui.form.IForm, qx.ui.form.IStringForm], + + construct: function() { + this.base(arguments); + + this._setLayout(new qx.ui.layout.HBox(5)); + + this.set({ + maxHeight: 26 + }); + + // Date selector + this.__dateField = new qx.ui.form.DateField(); + const dateFormat = new qx.util.format.DateFormat("dd/MM/yyyy"); + this.__dateField.setDateFormat(dateFormat); + this._add(this.__dateField); + + // Hour selector + this.__hourSpinner = new qx.ui.form.Spinner(0, 12, 23); + this._add(this.__hourSpinner); + + // Minute selector + this.__minuteSpinner = new qx.ui.form.Spinner(0, 0, 59); + this._add(this.__minuteSpinner); + + const now = new Date(); + this.setValue(now); + + // Sync changes back to value + this.__dateField.addListener("changeValue", this.__updateValue, this); + this.__hourSpinner.addListener("changeValue", this.__updateValue, this); + this.__minuteSpinner.addListener("changeValue", this.__updateValue, this); + }, + + properties: { + // The combined Date value + value: { + check: "Date", + nullable: true, + event: "changeValue", + apply: "_applyValue" + } + }, + + members: { + __dateField: null, + __hourSpinner: null, + __minuteSpinner: null, + + _applyValue: function(value, old) { + if (value) { + this.__dateField.setValue(value); + this.__hourSpinner.setValue(value.getHours()); + this.__minuteSpinner.setValue(value.getMinutes()); + } else { + this.__dateField.resetValue(); + this.__hourSpinner.resetValue(); + this.__minuteSpinner.resetValue(); + } + }, + + __updateValue: function() { + const date = this.__dateField.getValue(); + const now = new Date(); + + if (date) { + // Prevent past dates + if (date < now.setHours(0,0,0,0)) { + this.__dateField.setValue(new Date()); + return; + } + + const newDate = new Date(date.getTime()); + newDate.setHours(this.__hourSpinner.getValue()); + newDate.setMinutes(this.__minuteSpinner.getValue()); + + // If today, prevent past time + const isToday = + date.getFullYear() === now.getFullYear() && + date.getMonth() === now.getMonth() && + date.getDate() === now.getDate(); + + if (isToday && newDate < now) { + this.__hourSpinner.setValue(now.getHours()); + this.__minuteSpinner.setValue(now.getMinutes()); + this.setValue(now); + } else { + this.setValue(newDate); + } + } else { + this.resetValue(); + } + }, + + // Interface methods (IStringForm) + setValueAsString: function(str) { + const d = new Date(str); + if (!isNaN(d.getTime())) { + this.setValue(d); + } + }, + + getValueAsString: function() { + const v = this.getValue(); + return v ? v.toISOString() : ""; + } + }, + + destruct: function() { + this.__dateField = null; + this.__hourSpinner = null; + this.__minuteSpinner = null; + } +}); diff --git a/services/static-webserver/client/source/class/osparc/ui/form/HoverMenuButton.js b/services/static-webserver/client/source/class/osparc/ui/form/HoverMenuButton.js deleted file mode 100644 index 837e185a89f4..000000000000 --- a/services/static-webserver/client/source/class/osparc/ui/form/HoverMenuButton.js +++ /dev/null @@ -1,67 +0,0 @@ -/* ************************************************************************ - - osparc - the simcore frontend - - https://osparc.io - - Copyright: - 2023 IT'IS Foundation, https://itis.swiss - - License: - MIT: https://opensource.org/licenses/MIT - - Authors: - * Odei Maiz (odeimaiz) - -************************************************************************ */ - -qx.Class.define("osparc.ui.form.HoverMenuButton", { - extend: qx.ui.form.MenuButton, - - construct: function(label, icon, menu) { - this.base(arguments, label, icon, menu); - - this.set({ - backgroundColor: "transparent" - }); - }, - - members: { - // overriden - _onPointerOver: function() { - this.base(arguments); - - this.open(); - }, - - // overriden - _onPointerOut: function() { - this.base(arguments); - - /* - if (this.getMenu() && this.getMenu().isVisible()) { - const menu = this.getMenu(); - this.getMenu().addListener("pointerout", e => { - if (!qx.ui.core.Widget.contains(menu, e.getRelatedTarget())) { - this.getMenu().exclude(); - } - }); - } - */ - }, - - // overriden - _applyMenu: function(menu) { - this.base(arguments, menu); - - menu.set({ - padding: 10, - backgroundColor: "background-main-1" - }); - - menu.getContentElement().setStyles({ - "border-width": "0px" - }); - } - } -}); diff --git a/services/static-webserver/client/source/class/osparc/ui/form/IntlTelInput.js b/services/static-webserver/client/source/class/osparc/ui/form/IntlTelInput.js new file mode 100644 index 000000000000..d4519ff9e1be --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/ui/form/IntlTelInput.js @@ -0,0 +1,232 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +/* global intlTelInput */ + +/** + * @ignore(intlTelInput) + */ + +qx.Class.define("osparc.ui.form.IntlTelInput", { + extend: qx.ui.core.Widget, + implement: [qx.ui.form.IForm, qx.ui.form.IStringForm], + include: [qx.ui.form.MForm, qx.ui.form.MModelProperty], + + construct: function() { + this.base(arguments); + + this.setFocusable(true); + + this._setLayout(new qx.ui.layout.HBox()); + + this.getContentElement().setStyles({ + "overflow": "visible" // needed for countries dropdown menu + }); + + const randId = Math.floor(Math.random() * 100); + this.__htmlId = `phone-${randId}`; + const html = ``; + const phoneNumber = this.getChildControl("phone-input-field"); + phoneNumber.setHtml(html); + phoneNumber.addListenerOnce("appear", () => this.__convertInputToPhoneInput(), this); + + const themeManager = qx.theme.manager.Meta.getInstance(); + themeManager.addListener("changeTheme", () => this.__updateStyle()); + }, + + properties: { + // Form-compatible property + value: { + check: "String", + nullable: true, + event: "changeValue", + apply: "_applyValue" + }, + + compactField: { + check: "Boolean", + init: false, + nullable: false, + apply: "__updateStyle", + } + }, + + members: { + __htmlId: null, + __inputElement: null, + __phoneInput: null, + + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "phone-input-field": + control = new qx.ui.embed.Html(); + this._add(control, { flex: 1 }); + break; + } + return control || this.base(arguments, id); + }, + + // IStringForm interface implementation + getValue: function() { + return this.__phoneInput ? this.__phoneInput.getNumber() : null; + }, + + setValue: function(value) { + if (this.__phoneInput && value) { + // intlTelInput doesn't have a full setter for raw numbers + this.__phoneInput.setNumber(value); + } + this._applyValue(value); + }, + + resetValue: function() { + this.setValue(null); + }, + // IStringForm interface implementation + + // Make the widget tabbable/focusable + focus: function() { + if (this.__inputElement) { + this.__inputElement.focus(); + } else { + // fallback: let qooxdoo focus the content element + this.base(arguments); + } + }, + + tabFocus: function() { + this.focus(); + }, + + getFocusElement: function() { + const phoneNumber = this.getChildControl("phone-input-field"); + // phoneNumber is a qx.ui.embed.Html, it has a ContentElement (qx.html.Element) + return phoneNumber.getContentElement(); + }, + // Make the widget tabbable/focusable + + _applyValue: function(value) { + this.fireDataEvent("changeValue", value); + }, + + validate: function() { + return this.isValidNumber(); + }, + + isValidNumber: function() { + return this.__phoneInput ? this.__phoneInput.isValidNumber() : false; + }, + + verifyPhoneNumber: function() { + if (this.isValidNumber()) { + this.setValid(true); + } else { + this.setValid(false); + const validationError = this.__phoneInput.getValidationError(); + const errorMap = { + 0: this.tr("Invalid number"), + 1: this.tr("Invalid country code"), + 2: this.tr("Number too short"), + 3: this.tr("Number too long") + }; + const errorMsg = errorMap[validationError] || this.tr("Invalid number"); + this.setInvalidMessage(errorMsg); + } + this.__updateStyle(); + }, + + __updateStyle: function() { + const textColor = qx.theme.manager.Color.getInstance().resolve("text"); + const bgColor = qx.theme.manager.Color.getInstance().resolve("input_background"); + const productColor = qx.theme.manager.Color.getInstance().resolve("product-color"); + const defaultBottomBorder = qx.theme.manager.Color.getInstance().resolve("default-button-active"); + document.documentElement.style.setProperty('--country-list-dropdown-bg', bgColor); + document.documentElement.style.setProperty('--country-list-dropdown-text', textColor); + document.documentElement.style.setProperty('--tel-border-bottom-color', defaultBottomBorder); + document.documentElement.style.setProperty('--tel-border-bottom-color-focused', productColor); + + const isCompact = this.isCompactField(); + const phoneInputField = this.getChildControl("phone-input-field"); + const width = isCompact ? 152 : 223; + const height = isCompact ? 26 : 30; + + phoneInputField.set({ + maxWidth: width, + maxHeight: height, + margin: 0, + }); + + const phoneInput = this.__phoneInput; + if (phoneInput) { + phoneInput.a.style["width"] = width + "px"; + phoneInput.a.style["height"] = height + "px"; + phoneInput.a.style["borderWidth"] = "0px"; + phoneInput.a.style["backgroundColor"] = isCompact ? "transparent" : bgColor; + phoneInput.a.style["color"] = textColor; + + if (this.getValue() && !this.isValidNumber()) { + const errorColor = qx.theme.manager.Color.getInstance().resolve("failed-red"); + document.documentElement.style.setProperty('--tel-border-bottom-color', errorColor); + } + } + }, + + __convertInputToPhoneInput: function() { + const convertInputToPhoneInput = () => { + const domElement = document.querySelector(`#${this.__htmlId}`); + this.__inputElementToPhoneInput(domElement); + const phoneNumber = this.getChildControl("phone-input-field"); + phoneNumber.getContentElement().setStyles({ + "overflow": "visible" // needed for countries dropdown menu + }); + this.__updateStyle(); + }; + + const intlTelInputLib = osparc.wrapper.IntlTelInput.getInstance(); + if (intlTelInputLib.getLibReady()) { + convertInputToPhoneInput(); + } else { + intlTelInputLib.addListenerOnce("changeLibReady", e => { + if (e.getData()) { + convertInputToPhoneInput(); + } + }); + } + }, + + __inputElementToPhoneInput: function(domElement) { + this.__inputElement = domElement; // keep reference to raw + this.__phoneInput = intlTelInput(domElement, { + initialCountry: "auto", + geoIpLookup: callback => { + fetch("https://ipapi.co/json") + .then(res => res.json()) + .then(data => callback(data.country_code)) + .catch(() => callback("ch")); + }, + preferredCountries: [], + dropdownContainer: document.body, + }); + + // Trigger validation on blur + domElement.addEventListener("blur", () => this.verifyPhoneNumber()); + + this.__updateStyle(); + } + } +}); diff --git a/services/static-webserver/client/source/class/osparc/ui/form/renderer/LoginSinglePlaceholder.js b/services/static-webserver/client/source/class/osparc/ui/form/renderer/LoginSinglePlaceholder.js new file mode 100644 index 000000000000..8077a6b6c34b --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/ui/form/renderer/LoginSinglePlaceholder.js @@ -0,0 +1,38 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +/** + * Based on the single renderer {@link qx.ui.form.renderer.SinglePlaceholder}. + * Just a more relaxed version with more spacing and transparent backgrounds. + */ +qx.Class.define("osparc.ui.form.renderer.LoginSinglePlaceholder", { + extend: qx.ui.form.renderer.SinglePlaceholder, + + construct: function(form) { + this.base(arguments, form); + + this._getLayout().setSpacing(10); + }, + + members: { + addItems : function(items, names, title) { + this.base(arguments, items, names, title); + + items.forEach(item => item.setBackgroundColor("transparent")); + } + } +}); diff --git a/services/static-webserver/client/source/class/osparc/ui/form/renderer/SingleWithIcon.js b/services/static-webserver/client/source/class/osparc/ui/form/renderer/SingleWithWidget.js similarity index 68% rename from services/static-webserver/client/source/class/osparc/ui/form/renderer/SingleWithIcon.js rename to services/static-webserver/client/source/class/osparc/ui/form/renderer/SingleWithWidget.js index a8252d6040c2..d886748c42ba 100644 --- a/services/static-webserver/client/source/class/osparc/ui/form/renderer/SingleWithIcon.js +++ b/services/static-webserver/client/source/class/osparc/ui/form/renderer/SingleWithWidget.js @@ -15,24 +15,24 @@ ************************************************************************ */ -qx.Class.define("osparc.ui.form.renderer.SingleWithIcon", { +qx.Class.define("osparc.ui.form.renderer.SingleWithWidget", { extend: qx.ui.form.renderer.Single, - construct: function(form, icons) { - if (icons) { - this.__icons = icons; + construct: function(form, widgets) { + if (widgets) { + this.__widgets = widgets; } else { - this.__icons = {}; + this.__widgets = {}; } this.base(arguments, form); }, members: { - __icons: null, + __widgets: null, - setIcons: function(icons) { - this.__icons = icons; + setWidgets: function(widgets) { + this.__widgets = widgets; this._onFormChange(); }, @@ -45,11 +45,9 @@ qx.Class.define("osparc.ui.form.renderer.SingleWithIcon", { let row = title === null ? 0 : 1; for (let i = 0; i < items.length; i++) { - if (i in this.__icons) { - const image = new qx.ui.basic.Image(this.__icons[i]).set({ - alignY: "middle", - }); - this._add(image, { + if (i in this.__widgets) { + const widget = this.__widgets[i]; + this._add(widget, { row, column: 2, }); diff --git a/services/static-webserver/client/source/class/osparc/ui/list/CollaboratorListItem.js b/services/static-webserver/client/source/class/osparc/ui/list/CollaboratorListItem.js index 7ca2bff44c64..9ef3d0b03e6f 100644 --- a/services/static-webserver/client/source/class/osparc/ui/list/CollaboratorListItem.js +++ b/services/static-webserver/client/source/class/osparc/ui/list/CollaboratorListItem.js @@ -18,9 +18,20 @@ qx.Class.define("osparc.ui.list.CollaboratorListItem", { extend: osparc.ui.list.ListItem, + construct: function() { + this.base(arguments); + + this.setCursor("default"); + }, + properties: { collabType: { - check: [0, 1, 2], // 0:all, 1:org, 2:user + check: [ + "everyone", // osparc.store.Groups.COLLAB_TYPE.EVERYONE + "support", // osparc.store.Groups.COLLAB_TYPE.SUPPORT + "organization", // osparc.store.Groups.COLLAB_TYPE.ORGANIZATION + "user", // osparc.store.Groups.COLLAB_TYPE.USER + ], event: "changeCollabType", nullable: true }, @@ -75,17 +86,27 @@ qx.Class.define("osparc.ui.list.CollaboratorListItem", { members: { __getRoleInfo: function(id) { + let roleInfo = undefined; const resource = this.getResourceType(); - if (["study", "template", "tutorial", "hypertool"].includes(resource)) { - return osparc.data.Roles.STUDY[id]; - } else if (resource === "service") { - return osparc.data.Roles.SERVICES[id]; - } else if (resource === "workspace") { - return osparc.data.Roles.WORKSPACE[id]; - } else if (resource === "tag") { - return osparc.data.Roles.STUDY[id]; + switch (resource) { + case "study": + case "template": + case "tutorial": + case "hypertool": + case "tag": + roleInfo = osparc.data.Roles.STUDY[id]; + break; + case "function": + roleInfo = osparc.data.Roles.FUNCTION[id]; + break; + case "service": + roleInfo = osparc.data.Roles.SERVICES[id]; + break; + case "workspace": + roleInfo = osparc.data.Roles.WORKSPACE[id]; + break; } - return undefined; + return roleInfo; }, _createChildControlImpl: function(id) { @@ -119,10 +140,7 @@ qx.Class.define("osparc.ui.list.CollaboratorListItem", { return; } const groupsStore = osparc.store.Groups.getInstance(); - const everyoneGroupIds = [ - groupsStore.getEveryoneProductGroup().getGroupId(), - groupsStore.getEveryoneGroup().getGroupId(), - ]; + const everyoneGroupIds = groupsStore.getEveryoneGroupIds(); const label = this.getChildControl("title"); if (everyoneGroupIds.includes(this.getModel())) { label.setValue(this.tr("Public")); @@ -136,13 +154,13 @@ qx.Class.define("osparc.ui.list.CollaboratorListItem", { if (value === null) { const collabType = this.getCollabType(); switch (collabType) { - case 0: + case osparc.store.Groups.COLLAB_TYPE.EVERYONE: value = "@FontAwesome5Solid/globe/28"; break; - case 1: + case osparc.store.Groups.COLLAB_TYPE.ORGANIZATION: value = "@FontAwesome5Solid/users/28"; break; - case 2: + case osparc.store.Groups.COLLAB_TYPE.USER: value = "@FontAwesome5Solid/user/28"; break; } @@ -157,7 +175,9 @@ qx.Class.define("osparc.ui.list.CollaboratorListItem", { // highlight me const email = osparc.auth.Data.getInstance().getEmail(); if (value && value.includes(email)) { - this.addState("selected"); + this.setBackgroundColor("background-selected"); + } else { + this.setBackgroundColor("background-main-2"); } }, @@ -212,9 +232,9 @@ qx.Class.define("osparc.ui.list.CollaboratorListItem", { break; } case "write": { - const resource = this.getResourceType(); - if (resource !== "service") { - // there is no owner role for services + // there might not be delete role + const deleteRole = this.__getRoleInfo("delete"); + if (deleteRole) { const promoteButton = new qx.ui.menu.Button(this.tr(`Promote to ${this.__getRoleInfo("delete").label}`)); promoteButton.addListener("execute", () => { this.fireDataEvent("promoteToOwner", { diff --git a/services/static-webserver/client/source/class/osparc/ui/list/ListItem.js b/services/static-webserver/client/source/class/osparc/ui/list/ListItem.js index 89f0d7c87b7c..296486eaf78b 100644 --- a/services/static-webserver/client/source/class/osparc/ui/list/ListItem.js +++ b/services/static-webserver/client/source/class/osparc/ui/list/ListItem.js @@ -60,6 +60,7 @@ qx.Class.define("osparc.ui.list.ListItem", { padding: 5, minHeight: 48, alignY: "middle", + decorator: "rounded", }); this.addListener("pointerover", this._onPointerOver, this); @@ -141,16 +142,7 @@ qx.Class.define("osparc.ui.list.ListItem", { let control; switch (id) { case "thumbnail": - control = new qx.ui.basic.Image().set({ - alignY: "middle", - scale: true, - allowGrowX: true, - allowGrowY: true, - allowShrinkX: true, - allowShrinkY: true, - maxWidth: 32, - maxHeight: 32 - }); + control = osparc.utils.Utils.createThumbnail(32); this._add(control, { row: 0, column: 0, @@ -159,7 +151,9 @@ qx.Class.define("osparc.ui.list.ListItem", { break; case "title": control = new qx.ui.basic.Label().set({ - font: "text-14" + font: "text-14", + selectable: true, + rich: true, }); this._add(control, { row: 0, @@ -169,7 +163,8 @@ qx.Class.define("osparc.ui.list.ListItem", { case "subtitle": control = new qx.ui.basic.Label().set({ font: "text-13", - rich: true + selectable: true, + rich: true, }); this._add(control, { row: 1, diff --git a/services/static-webserver/client/source/class/osparc/ui/markdown/Markdown.js b/services/static-webserver/client/source/class/osparc/ui/markdown/Markdown.js index 70fffb4c3ac6..765f6287c47e 100644 --- a/services/static-webserver/client/source/class/osparc/ui/markdown/Markdown.js +++ b/services/static-webserver/client/source/class/osparc/ui/markdown/Markdown.js @@ -92,7 +92,7 @@ qx.Class.define("osparc.ui.markdown.Markdown", { if (linkRepresentation.type === "text") { linkHtml += linkRepresentation.text; } else if (linkRepresentation.type === "image") { - linkHtml += `${linkRepresentation.text}`; + linkHtml += `${linkRepresentation.text}`; } } linkHtml += ``; @@ -100,6 +100,9 @@ qx.Class.define("osparc.ui.markdown.Markdown", { } }; marked.use({ renderer }); + // By default, Markdown requires two spaces at the end of a line or a blank line between paragraphs to produce a line break. + // With this, a single line break (Enter) in your Markdown input will render as a
in HTML. + marked.setOptions({ breaks: true }); // const html = marked.parse(value); diff --git a/services/static-webserver/client/source/class/osparc/ui/markdown/MarkdownChat.js b/services/static-webserver/client/source/class/osparc/ui/markdown/MarkdownChat.js new file mode 100644 index 000000000000..a27463a05d35 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/ui/markdown/MarkdownChat.js @@ -0,0 +1,208 @@ +/* + * oSPARC - The SIMCORE frontend - https://osparc.io + * Copyright: 2025 IT'IS Foundation - https://itis.swiss + * License: MIT - https://opensource.org/licenses/MIT + * Authors: Odei Maiz (odeimaiz) + */ + +/** + * @asset(marked/marked.min.js) + * @asset(marked/markdown.css) + * @ignore(marked) + */ + +/* global marked */ + +qx.Class.define("osparc.ui.markdown.MarkdownChat", { + extend: qx.ui.embed.Html, + + /** + * @param {String} markdown Plain text accepting markdown syntax + */ + construct: function(markdown) { + this.base(arguments); + + this.set({ + allowGrowX: false, + allowGrowY: true, + overflowX: "hidden", // hide scrollbars + overflowY: "hidden", // hide scrollbars + }); + + const markdownCssUri = qx.util.ResourceManager.getInstance().toUri("marked/markdown.css"); + qx.module.Css.includeStylesheet(markdownCssUri); + + this.__loadMarked = new Promise((resolve, reject) => { + if (typeof marked === "function") { + resolve(marked); + } else { + const loader = new qx.util.DynamicScriptLoader([ + "marked/marked.min.js" + ]); + loader.addListenerOnce("ready", () => resolve(marked), this); + loader.addListenerOnce("failed", e => + reject(Error(`Failed to load ${e.getData()}`)) + ); + loader.start(); + } + }); + + if (markdown) { + this.setValue(markdown); + } + + this.addListenerOnce("appear", () => { + this.getContentElement().addClass("osparc-markdown"); + this.__scheduleResize(); // first paint sizing + }); + }, + + properties: { + /** + * Holds the raw markdown text and updates the label's {@link #value} whenever new markdown arrives. + */ + value: { + check: "String", + apply: "__applyMarkdown" + }, + + measurerMaxWidth: { + check: "Integer", + init: 220, + nullable: true, + }, + }, + + events: { + "resized": "qx.event.type.Event", + }, + + statics: { + MD_ROOT: "osparc-md-root", + MD_MEASURE: "osparc-md-measure", + }, + + members: { + __loadMarked: null, + + /** + * Apply function for the markdown property. Compiles the markdown text to HTML and applies it to the value property of the label. + * @param {String} value Plain text accepting markdown syntax. + */ + __applyMarkdown: function(value = "") { + this.__loadMarked.then(() => { + const renderer = { + link(link) { + const linkColor = qx.theme.manager.Color.getInstance().resolve("link"); + let linkHtml = `` + if (link.tokens && link.tokens.length) { + const linkRepresentation = link.tokens[0]; + if (linkRepresentation.type === "text") { + linkHtml += linkRepresentation.text; + } else if (linkRepresentation.type === "image") { + linkHtml += `${linkRepresentation.text}`; + } + } + linkHtml += ``; + return linkHtml; + } + }; + marked.use({ renderer }); + // By default, Markdown requires two spaces at the end of a line or a blank line between paragraphs to produce a line break. + // With this, a single line break (Enter) in your Markdown input will render as a
in HTML. + marked.setOptions({ breaks: true }); + + const html = marked.parse(value); + + const safeHtml = osparc.wrapper.DOMPurify.getInstance().sanitize(html); + + // flow-root prevents margin collapsing; inline style avoids extra stylesheet juggling + const max = this.getMeasurerMaxWidth() || 220; + const mdRoot = ` +
+
+ ${safeHtml} +
+
+ `; + this.setHtml(mdRoot); + + // resize once DOM is updated/painted + this.__scheduleResize(); + + // also resize once images load (they change height later) + const el = this.__getDomElement(); + if (el) { + el.querySelectorAll("img").forEach(img => { + if (!img.complete) { + img.addEventListener("load", () => this.__scheduleResize(), { once: true }); + img.addEventListener("error", () => this.__scheduleResize(), { once: true }); + } + }); + } + + // safety net; sometimes we miss an image load or so + setTimeout(() => this.__scheduleResize(), 500); + }).catch(error => console.error(error)); + }, + + __getDomElement: function() { + if (!this.getContentElement || this.getContentElement() === null) { + return null; + } + const domElement = this.getContentElement().getDomElement(); + if (domElement) { + return domElement; + } + return null; + }, + + __scheduleResize: function() { + const dom = this.__getDomElement(); + if (!dom) { + return; + } + + // collapse first so we don't re-measure an old minHeight + this.setHeight(null); + this.setMinHeight(0); + this.setWidth(null); + this.setMinWidth(0); + + window.requestAnimationFrame(() => { + // force reflow + void dom.offsetHeight; + + // measure the wrapper we injected (covers ALL children) + const root = dom.querySelector("."+this.self().MD_ROOT) || dom; + const meas = root.querySelector("."+this.self().MD_MEASURE) || root; + + const rect = meas.getBoundingClientRect(); + const rH = Math.ceil(rect.height || 0); + const rW = Math.ceil(rect.width || 0); + + // include widget insets (decorator/padding/border) + const insets = this.getInsets ? this.getInsets() : { top:0, right:0, bottom:0, left:0 }; + const totalH = Math.ceil((rH || 0) + (insets.top || 0) + (insets.bottom || 0)); + const totalW = Math.ceil((rW || 0) + (insets.left || 0) + (insets.right || 0)); + + this.setMinHeight(totalH); + this.setHeight(totalH); + + // width: shrink-to-fit, but cap at a max + this.setMaxWidth(null); // measurer already capped; we set exact width + this.setMinWidth(1); // avoid 0 when empty + this.setWidth(totalW); + + this.fireEvent("resized"); + }); + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/ui/message/FlashMessageOEC.js b/services/static-webserver/client/source/class/osparc/ui/message/FlashMessageOEC.js new file mode 100644 index 000000000000..d6069ac75979 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/ui/message/FlashMessageOEC.js @@ -0,0 +1,155 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.ui.message.FlashMessageOEC", { + extend: osparc.ui.message.FlashMessage, + + /** + * Constructor for the FlashMessage. + * + * @param {String} message Message that the user will read. + * @param {Number} duration + * @param {String} supportId + */ + construct: function(message, duration, supportId) { + this.base(arguments, message, "ERROR", duration ? duration*2 : null); + + if (osparc.store.Groups.getInstance().isSupportEnabled() && false) { + this.getChildControl("contact-support"); + } else { + const oecAtom = this.getChildControl("oec-atom"); + this.bind("supportId", oecAtom, "label"); + } + if (supportId) { + this.setSupportId(supportId); + } + }, + + properties: { + supportId: { + check: "String", + init: "", + nullable: true, + event: "changeSupportId", + }, + }, + + members: { + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "oec-atom": + control = new qx.ui.basic.Atom().set({ + icon: "@FontAwesome5Solid/copy/10", + iconPosition: "right", + gap: 8, + cursor: "pointer", + alignX: "center", + allowGrowX: false, + }); + control.addListener("tap", () => this.__copyToClipboard()); + this.addWidget(control); + break; + case "contact-support": + control = new qx.ui.basic.Atom().set({ + label: this.tr("Contact Support"), + icon: "@FontAwesome5Solid/comments/10", + iconPosition: "left", + gap: 8, + cursor: "pointer", + alignX: "center", + allowGrowX: false, + }); + control.addListener("tap", () => this.__openSupportChat()); + this.addWidget(control); + break; + } + return control || this.base(arguments, id); + }, + + __getContext: function() { + const dataToClipboard = { + message: this.getMessage(), + supportId: this.getSupportId(), + timestamp: new Date().toString(), + url: window.location.href, + releaseTag: osparc.utils.Utils.getReleaseTag(), + } + if (osparc.store.Store.getInstance().getCurrentStudy()) { + dataToClipboard["projectId"] = osparc.store.Store.getInstance().getCurrentStudy().getUuid(); + } + return osparc.utils.Utils.prettifyJson(dataToClipboard); + }, + + __getSupportFriendlyContext: function() { + let curatedText = "Extra Context:"; + curatedText += "\nError: " + this.getMessage(); + curatedText += "\nSupportID: " + this.getSupportId(); + curatedText += "\nTimestamp: " + new Date().toISOString(); + curatedText += "\nURL: " + window.location.href; + curatedText += "\nRelease Tag: " + osparc.utils.Utils.getReleaseTag(); + if (osparc.store.Store.getInstance().getCurrentStudy()) { + curatedText += "\nProject ID: " + osparc.store.Store.getInstance().getCurrentStudy().getUuid(); + } + return curatedText; + }, + + __copyToClipboard: function() { + osparc.utils.Utils.copyTextToClipboard(this.__getContext()); + }, + + __openSupportChat: function() { + const supportCenter = osparc.support.SupportCenter.openWindow(); + supportCenter.createConversation("reportOEC"); + + const textToAddMessageField = msg => { + if (supportCenter.getChildControl("conversation-page")) { + supportCenter.getChildControl("conversation-page").postMessage(msg); + } + } + + const caption = this.tr("Something went wrong"); + const introText = this.tr("Please describe what you were doing before the error (optional).\nThis will help our support team understand the context and resolve the issue faster."); + const confirmationWindow = new osparc.ui.window.Confirmation(introText); + confirmationWindow.setCaption(caption); + confirmationWindow.getChildControl("message-label").setFont("text-13"); + const extraContextTA = new qx.ui.form.TextArea().set({ + font: "text-13", + autoSize: true, + minHeight: 70, + maxHeight: 140 + }); + confirmationWindow.addWidget(extraContextTA); + confirmationWindow.addCancelButton(); + confirmationWindow.setConfirmText(this.tr("Send Report")); + confirmationWindow.open(); + confirmationWindow.addListener("close", () => { + if (confirmationWindow.getConfirmed()) { + const extraContext = extraContextTA.getValue() + const friendlyContext = this.__getSupportFriendlyContext(); + const text = "Dear Support Team,\n" + extraContext + "\n" + friendlyContext; + textToAddMessageField(text); + // This should be an automatic response in the chat + const msg = this.tr("Thanks, your report has been sent.
Our support team will get back to you."); + osparc.FlashMessenger.logAs(msg, "INFO"); + } else { + supportCenter.close(); + } + }); + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/ui/message/Loading.js b/services/static-webserver/client/source/class/osparc/ui/message/Loading.js index e222ead4cbff..ffdf2f25908f 100644 --- a/services/static-webserver/client/source/class/osparc/ui/message/Loading.js +++ b/services/static-webserver/client/source/class/osparc/ui/message/Loading.js @@ -35,15 +35,7 @@ qx.Class.define("osparc.ui.message.Loading", { construct: function() { this.base(arguments); - const layout = new qx.ui.layout.Grid(20, 20); - layout.setRowFlex(this.self().GRID_POS.SPACER_TOP, 1); - layout.setRowFlex(this.self().GRID_POS.SPACER_BOTTOM, 1); - layout.setColumnFlex(0, 1); - layout.setColumnMaxWidth(1, 400); - layout.setColumnAlign(1, "center", "middle"); - layout.setColumnFlex(2, 1); - layout.setColumnAlign(2, "right", "middle"); - this._setLayout(layout); + this._setLayout(new qx.ui.layout.VBox(10)); this.__buildLayout(); }, @@ -88,104 +80,72 @@ qx.Class.define("osparc.ui.message.Loading", { LOGO_HEIGHT: 100, ICON_HEIGHT: 220, STATUS_ICON_SIZE: 20, - - GRID_POS: { - TOOLBAR: 0, - SPACER_TOP: 1, - LOGO: 2, - WAITING: 3, - MESSAGES: 4, - EXTRA_WIDGETS: 5, - SPACER_BOTTOM: 6, - } }, members: { - __thumbnail: null, - __header: null, - __messagesContainer: null, - __extraWidgets: null, __maxButton: null, - __buildLayout: function() { - this._add(new qx.ui.core.Widget(), { - column: 0, - row: 0 - }); - - const maxLayout = this.__createMaximizeToolbar(); - this._add(maxLayout, { - column: 2, - row: this.self().GRID_POS.TOOLBAR - }); - - this._add(new qx.ui.core.Spacer(), { - column: 1, - row: this.self().GRID_POS.SPACER_TOP - }); - - const productLogoPath = osparc.product.Utils.getLogoPath(); - const thumbnail = this.__thumbnail = new osparc.ui.basic.Thumbnail(productLogoPath, this.self().ICON_WIDTH, this.self().LOGO_HEIGHT).set({ - alignX: "center" - }); - let logoHeight = this.self().LOGO_HEIGHT; - if (qx.util.ResourceManager.getInstance().getImageFormat(productLogoPath) === "png") { - logoHeight = osparc.ui.basic.Logo.getHeightKeepingAspectRatio(productLogoPath, this.self().ICON_WIDTH); - thumbnail.getChildControl("image").set({ - width: this.self().ICON_WIDTH, - height: logoHeight - }); - } else { - thumbnail.getChildControl("image").set({ - width: this.self().ICON_WIDTH, - height: logoHeight - }); + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "max-toolbar": + control = this.__createMaximizeToolbar(); + this._add(control); + break; + case "spacer-top": + control = new qx.ui.core.Spacer(); + this._add(control, { + flex: 1, + }); + break; + case "thumbnail": + control = this.__createThumbnail(); + this._add(control); + break; + case "loading-title": + control = new qx.ui.basic.Atom().set({ + icon: "@FontAwesome5Solid/circle-notch/"+this.self().STATUS_ICON_SIZE, + font: "title-18", + alignX: "center", + rich: true, + gap: 15, + allowGrowX: false, + }); + osparc.service.StatusUI.updateCircleAnimation(control.getChildControl("icon")); + control.getChildControl("label").set({ + rich: true, + wrap: true, + alignX: "center", + }); + this._add(control); + break; + case "messages-container": + control = new qx.ui.container.Composite(new qx.ui.layout.VBox(10).set({ + alignX: "center" + })); + this._add(control); + break; + case "extra-widgets-container": + control = new qx.ui.container.Composite(new qx.ui.layout.VBox(10).set({ + alignX: "center" + })); + this._add(control); + break; } - this._add(thumbnail, { - column: 1, - row: this.self().GRID_POS.LOGO - }); - - const waitingHeader = this.__header = new qx.ui.basic.Atom().set({ - icon: "@FontAwesome5Solid/circle-notch/"+this.self().STATUS_ICON_SIZE, - font: "title-18", - alignX: "center", - rich: true, - gap: 15, - allowGrowX: false - }); - const icon = waitingHeader.getChildControl("icon"); - osparc.service.StatusUI.updateCircleAnimation(icon); - const label = waitingHeader.getChildControl("label"); - label.set({ - rich: true, - wrap: true, - alignX: "center", - }); - this._add(waitingHeader, { - column: 1, - row: this.self().GRID_POS.WAITING - }); - - const messages = this.__messagesContainer = new qx.ui.container.Composite(new qx.ui.layout.VBox(10).set({ - alignX: "center" - })); - this._add(messages, { - column: 1, - row: this.self().GRID_POS.MESSAGES - }); - - const extraWidgets = this.__extraWidgets = new qx.ui.container.Composite(new qx.ui.layout.VBox(10).set({ - alignX: "center" - })); - this._add(extraWidgets, { - column: 1, - row: this.self().GRID_POS.EXTRA_WIDGETS - }); + return control || this.base(arguments, id); + }, - this._add(new qx.ui.core.Spacer(), { - column: 1, - row: this.self().GRID_POS.SPACER_BOTTOM + __buildLayout: function() { + this.getChildControl("max-toolbar"); + this.getChildControl("spacer-top"); + this.getChildControl("thumbnail"); + this.getChildControl("loading-title"); + this.getChildControl("messages-container"); + this.getChildControl("extra-widgets-container"); + + const bottomSpacer = new qx.ui.core.Spacer(); + this._add(bottomSpacer, { + flex: 1, }); }, @@ -220,42 +180,79 @@ qx.Class.define("osparc.ui.message.Loading", { alignX: "right", })); this.bind("showToolbar", toolbarLayout, "visibility", { - converter: showToolbar => showToolbar ? "visible" : "hidden" + converter: showToolbar => showToolbar ? "visible" : "excluded" }); toolbarLayout.add(maxButton); return toolbarLayout; }, + __createThumbnail: function() { + const productLogoPath = osparc.product.Utils.getLogoPath(); + const thumbnail = new osparc.ui.basic.Thumbnail(productLogoPath, this.self().ICON_WIDTH, this.self().LOGO_HEIGHT).set({ + alignX: "center" + }); + let logoHeight = this.self().LOGO_HEIGHT; + if (qx.util.ResourceManager.getInstance().getImageFormat(productLogoPath) === "png") { + logoHeight = osparc.ui.basic.Logo.getHeightKeepingAspectRatio(productLogoPath, this.self().ICON_WIDTH); + thumbnail.getChildControl("image").set({ + width: this.self().ICON_WIDTH, + height: logoHeight + }); + } else { + thumbnail.getChildControl("image").set({ + width: this.self().ICON_WIDTH, + height: logoHeight + }); + } + return thumbnail; + }, + __applyLogo: function(newLogo) { const productLogoPath = osparc.product.Utils.getLogoPath(); + const thumbnail = this.getChildControl("thumbnail"); if (newLogo !== productLogoPath) { - this.__thumbnail.set({ + thumbnail.set({ maxHeight: this.self().ICON_HEIGHT, height: this.self().ICON_HEIGHT, }); - this.__thumbnail.getChildControl("image").set({ + thumbnail.getChildControl("image").set({ maxHeight: this.self().ICON_HEIGHT, height: this.self().ICON_HEIGHT, }); } - this.__thumbnail.setSource(newLogo); + thumbnail.setSource(newLogo); }, __applyHeader: function(value) { - this.__header.setLabel(value); + this._setHeaderTitle(value); + + // extract the state from the title const words = value.split(" "); if (words.length) { const state = words[0]; const iconSource = osparc.service.StatusUI.getIconSource(state.toLowerCase(), this.self().STATUS_ICON_SIZE); if (iconSource) { - this.__header.setIcon(iconSource); - osparc.service.StatusUI.updateCircleAnimation(this.__header.getChildControl("icon")); + this._setHeaderIcon(iconSource); } } }, + _setHeaderTitle: function(label) { + const loadingTitle = this.getChildControl("loading-title"); + loadingTitle.setLabel(label); + }, + + _setHeaderIcon: function(iconSource) { + const loadingTitle = this.getChildControl("loading-title"); + loadingTitle.setIcon(iconSource); + // this will stop the circle, if it's not a circle + osparc.service.StatusUI.updateCircleAnimation(loadingTitle.getChildControl("icon")); + }, + __applyMessages: function(msgs) { this.clearMessages(); + + const messagesContainer = this.getChildControl("messages-container"); if (msgs) { msgs.forEach(msg => { const text = new qx.ui.basic.Label(msg.toString()).set({ @@ -263,33 +260,36 @@ qx.Class.define("osparc.ui.message.Loading", { rich: true, wrap: true }); - this.__messagesContainer.add(text); + messagesContainer.add(text); }); - this.__messagesContainer.show(); + messagesContainer.show(); } else { - this.__messagesContainer.exclude(); + messagesContainer.exclude(); } }, clearMessages: function() { - this.__messagesContainer.removeAll(); + const messagesContainer = this.getChildControl("messages-container"); + messagesContainer.removeAll(); }, getMessageLabels: function() { - return this.__messagesContainer.getChildren(); + return this.getChildControl("messages-container").getChildren(); }, addWidgetToMessages: function(widget) { + const messagesContainer = this.getChildControl("messages-container"); if (widget) { - this.__messagesContainer.add(widget); - this.__messagesContainer.show(); + messagesContainer.add(widget); + messagesContainer.show(); } else { - this.__messagesContainer.exclude(); + messagesContainer.exclude(); } }, addExtraWidget: function(widget) { - this.__extraWidgets.add(widget); + const extraWidgetsContainer = this.getChildControl("extra-widgets-container"); + extraWidgetsContainer.add(widget); }, } }); diff --git a/services/static-webserver/client/source/class/osparc/ui/message/NodeLockedPage.js b/services/static-webserver/client/source/class/osparc/ui/message/NodeLockedPage.js new file mode 100644 index 000000000000..c12c9cdae807 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/ui/message/NodeLockedPage.js @@ -0,0 +1,111 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +/** + * The locked page + * + * ----------------------- + * | | + * | service logo | + * | locked/unlocked | + * | - who is in | + * | | + * ----------------------- + * + */ +qx.Class.define("osparc.ui.message.NodeLockedPage", { + extend: osparc.ui.message.Loading, + + + construct: function() { + this.base(arguments); + + this.__addActionsLayout(); + }, + + properties: { + node: { + check: "osparc.data.model.Node", + init: null, + nullable: false, + event: "changeNode", + apply: "__applyNode", + }, + }, + + members: { + __avatarGroup: null, + + __applyNode: function(node) { + const thumbnail = node.getMetadata()["thumbnail"]; + if (thumbnail) { + this.setLogo(thumbnail); + } + + const lockState = node.getStatus().getLockState(); + + lockState.addListener("changeLocked", this.__lockedChanged, this); + this.__lockedChanged(); + + lockState.addListener("changeCurrentUserGroupIds", this.__currentUserGroupIdsChanged, this); + this.__currentUserGroupIdsChanged(); + }, + + __addActionsLayout: function() { + const actionsLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox(10).set({ + alignX: "center" + })); + + const conversationButton = new qx.ui.form.Button().set({ + appearance: "form-button-outlined", + toolTipText: this.tr("Conversations"), + icon: "@FontAwesome5Solid/comments/16", + }); + conversationButton.addListener("execute", () => { + if (this.getNode()) { + const study = this.getNode().getStudy(); + osparc.study.Conversations.popUpInWindow(study.serialize()); + } + }); + actionsLayout.add(conversationButton); + + const avatarGroup = this.__avatarGroup = new osparc.ui.basic.AvatarGroup(26, "left", 50).set({ + hideMyself: true, + alignX: "center", + }); + actionsLayout.add(avatarGroup); + this.addWidgetToMessages(actionsLayout); + }, + + __lockedChanged: function() { + const lockState = this.getNode().getStatus().getLockState(); + if (lockState.isLocked()) { + this._setHeaderIcon("@FontAwesome5Solid/lock/20"); + this._setHeaderTitle(this.tr("The application is being used")); + } else { + this._setHeaderIcon("@FontAwesome5Solid/lock-open/20"); + this._setHeaderTitle(this.tr("The application is not being used")); + } + }, + + __currentUserGroupIdsChanged: function() { + const lockState = this.getNode().getStatus().getLockState(); + const currentUserGroupIds = lockState.getCurrentUserGroupIds(); + this.__avatarGroup.setUserGroupIds(currentUserGroupIds); + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/ui/switch/ThemeSwitcher.js b/services/static-webserver/client/source/class/osparc/ui/switch/ThemeSwitcher.js index 040e9022ad33..ff2903fe6084 100644 --- a/services/static-webserver/client/source/class/osparc/ui/switch/ThemeSwitcher.js +++ b/services/static-webserver/client/source/class/osparc/ui/switch/ThemeSwitcher.js @@ -13,6 +13,11 @@ qx.Class.define("osparc.ui.switch.ThemeSwitcher", { type: "static", statics: { + isLight: function() { + const currentTheme = qx.theme.manager.Meta.getInstance().getTheme(); + return currentTheme.basename === "ThemeLight"; + }, + getValidThemes: function() { return Object.values(qx.Theme.getAll()).filter(theme => theme.type === "meta"); }, diff --git a/services/static-webserver/client/source/class/osparc/ui/table/cellrenderer/ButtonRenderer.js b/services/static-webserver/client/source/class/osparc/ui/table/cellrenderer/ButtonRenderer.js index 445840c570d6..8fb29645d72c 100644 --- a/services/static-webserver/client/source/class/osparc/ui/table/cellrenderer/ButtonRenderer.js +++ b/services/static-webserver/client/source/class/osparc/ui/table/cellrenderer/ButtonRenderer.js @@ -38,6 +38,10 @@ qx.Class.define("osparc.ui.table.cellrenderer.ButtonRenderer", { } }, + statics: { + BUTTON_CLASS: "qx-material-button", + }, + members: { // Override _getContentHtml: function(cellInfo) { @@ -46,7 +50,7 @@ qx.Class.define("osparc.ui.table.cellrenderer.ButtonRenderer", { // Return the button with the image return ` -
diff --git a/services/static-webserver/client/source/class/osparc/ui/table/cellrenderer/ImageButtonRenderer.js b/services/static-webserver/client/source/class/osparc/ui/table/cellrenderer/ImageButtonRenderer.js index 8b9fd7896bd4..ac10f0017b92 100644 --- a/services/static-webserver/client/source/class/osparc/ui/table/cellrenderer/ImageButtonRenderer.js +++ b/services/static-webserver/client/source/class/osparc/ui/table/cellrenderer/ImageButtonRenderer.js @@ -18,9 +18,12 @@ qx.Class.define("osparc.ui.table.cellrenderer.ImageButtonRenderer", { extend: osparc.ui.table.cellrenderer.ButtonRenderer, - construct: function(clickAction, iconPath) { + construct: function(clickAction, iconPath, shouldShowFn = null) { this.base(arguments, clickAction); + this.__imageCache = {}; + this.__shouldShowFn = shouldShowFn; + this.setIconPath(iconPath); }, @@ -33,12 +36,64 @@ qx.Class.define("osparc.ui.table.cellrenderer.ImageButtonRenderer", { }, }, + statics: { + getClosestItems: function(target, className = osparc.ui.table.cellrenderer.ButtonRenderer.BUTTON_CLASS) { + return target.closest("."+className) + }, + }, + members: { + __imageCache: null, + __shouldShowFn: null, + + // overridden to play with it's visibility + createDataCellHtml: function(cellInfo, htmlArr) { + const shouldShow = this.__shouldShowFn + ? + this.__shouldShowFn(cellInfo) + : + true; + if (!shouldShow) { + return ""; // Hide button + } + return this.base(arguments, cellInfo, htmlArr); + }, + __applyIconPath: function(iconPath) { const resMgr = qx.util.ResourceManager.getInstance(); - const iconUrl = resMgr.toUri(iconPath); // Resolves to the correct URL of the asset + const iconUrl = resMgr.toUri(iconPath); + + // Create a data URI or use a more cache-friendly approach + // Use base64 encoding for small icons (best for caching) + this.__loadImageAsDataUri(iconUrl, iconPath); + }, + + __loadImageAsDataUri: function(iconUrl, iconPath) { + if (this.__imageCache[iconPath]) { + this.setButtonContent(this.__imageCache[iconPath]); + return; + } + + // Fetch and convert to data URI for permanent caching + fetch(iconUrl) + .then(response => response.blob()) + .then(blob => { + const reader = new FileReader(); + reader.onload = () => { + const dataUri = reader.result; + const content = `icon`; - this.setButtonContent(`icon`); + // Cache the data URI + this.__imageCache[iconPath] = content; + this.setButtonContent(content); + }; + reader.readAsDataURL(blob); + }) + .catch(err => { + console.warn("Failed to cache icon as data URI:", iconPath, err); + // Fallback to original method + this.setButtonContent(`icon`); + }); }, } }); diff --git a/services/static-webserver/client/source/class/osparc/ui/toolbar/ProgressBar.js b/services/static-webserver/client/source/class/osparc/ui/toolbar/ProgressBar.js deleted file mode 100644 index a558e3343e37..000000000000 --- a/services/static-webserver/client/source/class/osparc/ui/toolbar/ProgressBar.js +++ /dev/null @@ -1,46 +0,0 @@ -/* ************************************************************************ - - osparc - the simcore frontend - - https://osparc.io - - Copyright: - 2019 IT'IS Foundation, https://itis.swiss - - License: - MIT: https://opensource.org/licenses/MIT - - Authors: - * Ignacio Pascual (ignapas) - -************************************************************************ */ - -/** - * ProgressBar with its padding and margins adapted to be show inside a qx.ui.toolbar.ToolBar. - */ -qx.Class.define("osparc.ui.toolbar.ProgressBar", { - extend: qx.ui.indicator.ProgressBar, - - construct: function() { - this.base(arguments); - }, - - properties: { - appearance: { - refine: true, - init: "toolbar-progressbar" - } - }, - - members : { - // overridden - _applyVisibility : function(value, old) { - this.base(arguments, value, old); - // trigger a appearance recalculation of the parent - const parent = this.getLayoutParent(); - if (parent && parent instanceof qx.ui.toolbar.PartContainer) { - qx.ui.core.queue.Appearance.add(parent); - } - } - } -}); diff --git a/services/static-webserver/client/source/class/osparc/ui/window/Confirmation.js b/services/static-webserver/client/source/class/osparc/ui/window/Confirmation.js index aafb1aac589a..05cc501562f7 100644 --- a/services/static-webserver/client/source/class/osparc/ui/window/Confirmation.js +++ b/services/static-webserver/client/source/class/osparc/ui/window/Confirmation.js @@ -40,7 +40,7 @@ qx.Class.define("osparc.ui.window.Confirmation", { check: [null, "create", "warning", "delete"], init: null, nullable: true, - apply: "__applyConfirmAppearance" + event: "changeConfirmAction", }, confirmed: { @@ -50,7 +50,6 @@ qx.Class.define("osparc.ui.window.Confirmation", { }, members: { - _createChildControlImpl: function(id) { let control; switch (id) { @@ -64,6 +63,20 @@ qx.Class.define("osparc.ui.window.Confirmation", { this.setConfirmed(true); this.close(1); }, this); + this.bind("confirmAction", control, "appearance", { + converter: value => { + switch (value) { + case "create": + return "strong-button"; + case "warning": + return "warning-button"; + case "delete": + return "danger-button"; + default: + return "strong-button"; + } + } + }); const command = new qx.ui.command.Command("Enter"); control.setCommand(command); const btnsLayout = this.getChildControl("buttons-layout"); @@ -81,23 +94,5 @@ qx.Class.define("osparc.ui.window.Confirmation", { getCancelButton: function() { return this.getChildControl("cancel-button"); }, - - __applyConfirmAppearance: function(confirmationAction) { - const confirmButton = this.getChildControl("confirm-button"); - switch (confirmationAction) { - case "create": - confirmButton.setAppearance("strong-button"); - break; - case "warning": - confirmButton.setAppearance("warning-button"); - break; - case "delete": - confirmButton.setAppearance("danger-button"); - break; - default: - confirmButton.resetAppearance(); - break; - } - } } }); diff --git a/services/static-webserver/client/source/class/osparc/ui/window/Dialog.js b/services/static-webserver/client/source/class/osparc/ui/window/Dialog.js index 0d1c61b20aaf..10593a0a605d 100644 --- a/services/static-webserver/client/source/class/osparc/ui/window/Dialog.js +++ b/services/static-webserver/client/source/class/osparc/ui/window/Dialog.js @@ -22,7 +22,7 @@ qx.Class.define("osparc.ui.window.Dialog", { this.set({ autoDestroy: true, - layout: new qx.ui.layout.VBox(15), + layout: new qx.ui.layout.VBox(10), showMinimize: false, showMaximize: false, contentPadding: 15, @@ -41,7 +41,8 @@ qx.Class.define("osparc.ui.window.Dialog", { properties: { message: { check: "String", - apply: "_applyMessage" + init: "", + event: "changeMessage", } }, @@ -52,11 +53,28 @@ qx.Class.define("osparc.ui.window.Dialog", { _createChildControlImpl: function(id) { let control; switch (id) { + case "message-label": + control = new qx.ui.basic.Label().set({ + font: "text-14", + selectable: true, + rich: true + }); + this.bind("message", control, "value"); + this.addAt(control, 0); + break; + case "extra-widgets-layout": + control = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)).set({ + paddingTop: 10 + }); + this.addAt(control, 1, { + flex: 1 + }); + break; case "buttons-layout": control = new qx.ui.container.Composite(new qx.ui.layout.HBox(10).set({ alignX: "right" })); - this.add(control); + this.addAt(control, 2); break; case "cancel-button": { const btnsLayout = this.getChildControl("buttons-layout"); @@ -71,37 +89,16 @@ qx.Class.define("osparc.ui.window.Dialog", { }, __buildLayout: function() { - this.__messageLabel = new qx.ui.basic.Label().set({ - font: "text-14", - selectable: true, - rich: true - }); - this.add(this.__messageLabel, { - flex: 1 - }); - - this.__extraWidgetsLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(15)).set({ - paddingTop: 10 - }); - this.__extraWidgetsLayout.exclude(); - this.add(this.__extraWidgetsLayout, { - flex: 1 - }); - + this.getChildControl("message-label"); this.getChildControl("buttons-layout"); }, - _applyMessage: function(message) { - this.__messageLabel.setValue(message); - }, - addWidget: function(widget) { - this.__extraWidgetsLayout.show(); - this.__extraWidgetsLayout.add(widget); + this.getChildControl("extra-widgets-layout").add(widget); }, getExtraWidgetsLayout: function() { - return this.__extraWidgetsLayout; + return this.getChildControl("extra-widgets-layout"); }, /** diff --git a/services/static-webserver/client/source/class/osparc/ui/window/Progress.js b/services/static-webserver/client/source/class/osparc/ui/window/Progress.js index 45956468ece4..c41b4476a0a7 100644 --- a/services/static-webserver/client/source/class/osparc/ui/window/Progress.js +++ b/services/static-webserver/client/source/class/osparc/ui/window/Progress.js @@ -40,13 +40,11 @@ qx.Class.define("osparc.ui.window.Progress", { allowGrowY: false, allowGrowX: true, margin: 0, + decorator: "rounded", }); control.getChildControl("progress").set({ backgroundColor: "strong-main" }); - control.getContentElement().setStyles({ - "border-radius": "4px" - }); this.addAt(control, 1); break; } diff --git a/services/static-webserver/client/source/class/osparc/ui/window/TabbedView.js b/services/static-webserver/client/source/class/osparc/ui/window/TabbedView.js index 6706b152f92b..0395dc374d93 100644 --- a/services/static-webserver/client/source/class/osparc/ui/window/TabbedView.js +++ b/services/static-webserver/client/source/class/osparc/ui/window/TabbedView.js @@ -47,19 +47,6 @@ qx.Class.define("osparc.ui.window.TabbedView", { box.setLayout(new qx.ui.layout.VBox(10)); return box; }, - - /** - * Common layout for tooltip label - */ - createHelpLabel: function(message=null, font="text-13") { - const label = new qx.ui.basic.Label().set({ - value: message, - alignX: "left", - rich: true, - font: font - }); - return label; - } }, members: { diff --git a/services/static-webserver/client/source/class/osparc/ui/window/Window.js b/services/static-webserver/client/source/class/osparc/ui/window/Window.js index c8f8c304d308..22af331a5d04 100644 --- a/services/static-webserver/client/source/class/osparc/ui/window/Window.js +++ b/services/static-webserver/client/source/class/osparc/ui/window/Window.js @@ -36,8 +36,10 @@ qx.Class.define("osparc.ui.window.Window", { ); if (modalFrame) { modalFrame.addEventListener("click", () => { - if (this.isModal() && this.isClickAwayClose() && - parseInt(modalFrame.style.zIndex) === parseInt(thisDom.style.zIndex) - 1) { + if ( + this.isClickAwayClose() && + parseInt(modalFrame.style.zIndex) === parseInt(thisDom.style.zIndex) - 1 + ) { this.close(); } }); @@ -73,8 +75,8 @@ qx.Class.define("osparc.ui.window.Window", { showMinimize: false, showMaximize: false, resizable: true, - width: width, - minHeight: minHeight, + width, + minHeight, maxHeight: Math.max(minHeight, document.documentElement.clientHeight), modal: true, clickAwayClose: true @@ -116,6 +118,8 @@ qx.Class.define("osparc.ui.window.Window", { } } }, 1); + // keep it centered + window.addEventListener("resize", () => this.center()); } else { this.base(arguments); } diff --git a/services/static-webserver/client/source/class/osparc/user/UserAccount.js b/services/static-webserver/client/source/class/osparc/user/UserAccount.js new file mode 100644 index 000000000000..7b2bd228d4a8 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/user/UserAccount.js @@ -0,0 +1,127 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.user.UserAccount", { + extend: osparc.ui.window.TabbedView, + + construct: function(userGroupId) { + this.base(arguments); + + this.set({ + padding: 10, + }); + + this.getChildControl("thumbnail"); + const profilePage = this.getChildControl("profile-page"); + const extras = this.getChildControl("extras-page"); + this.bind("user", profilePage, "user"); + this.bind("extras", extras, "extras"); + + this.setUserGroupId(userGroupId); + }, + + properties: { + userGroupId: { + check: "Number", + init: null, + nullable: false, + apply: "__applyUserGroupId", + }, + + user: { + check: "osparc.data.model.User", + init: null, + nullable: false, + event: "changeUser", + }, + + extras: { + check: "Object", + init: null, + nullable: false, + event: "changeExtras", + }, + }, + + events: { + "updateCaption": "qx.event.type.Data", + "closeWindow": "qx.event.type.Event", + }, + + statics: { + THUMBNAIL_SIZE: 90, + }, + + members: { + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "thumbnail": + control = new osparc.ui.basic.Thumbnail(null, this.self().THUMBNAIL_SIZE, this.self().THUMBNAIL_SIZE).set({ + width: this.self().THUMBNAIL_SIZE, + height: this.self().THUMBNAIL_SIZE, + marginBottom: 10, + }); + control.getChildControl("image").set({ + anonymous: true, + decorator: "rounded", + }); + this.addWidgetToTabs(control); + break; + case "profile-page": + control = new osparc.user.UserProfile(); + this.addTab("Profile", "", control); + break; + case "extras-page": + control = new osparc.user.UserExtras(); + this.addTab("Extras", "", control); + break; + } + return control || this.base(arguments, id); + }, + + __applyUserGroupId: function(userGroupId) { + const params = { + url: { + gId: userGroupId + } + }; + osparc.data.Resources.fetch("poUsers", "searchByGroupId", params) + .then(usersData => { + if (usersData.length === 1) { + const userData = usersData[0]; + + const user = new osparc.data.model.User(userData); + user.setContactData(userData); + // remove the displayed properties from the contact info + Object.keys(qx.util.PropertyUtil.getProperties(osparc.data.model.User)).forEach(prop => delete userData[prop]); + const extras = osparc.utils.Utils.convertKeysToTitles(userData); + + this.fireDataEvent("updateCaption", user.getUserName()); + this.getChildControl("thumbnail").setSource(user.createThumbnail(this.self().THUMBNAIL_SIZE)); + this.setUser(user); + this.setExtras(extras); + } + }) + .catch(err => { + osparc.FlashMessenger.logError(err); + console.error(err); + this.fireEvent("closeWindow"); + }); + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/user/UserAccountWindow.js b/services/static-webserver/client/source/class/osparc/user/UserAccountWindow.js new file mode 100644 index 000000000000..373286639135 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/user/UserAccountWindow.js @@ -0,0 +1,46 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.user.UserAccountWindow", { + extend: osparc.ui.window.TabbedWindow, + + construct: function(userGroupId) { + this.base(arguments, "user-account-"+userGroupId, this.tr("User Account")); + + this.set({ + width: osparc.user.UserAccountWindow.WIDTH, + height: osparc.user.UserAccountWindow.HEIGHT, + }); + + const userAccount = new osparc.user.UserAccount(userGroupId); + userAccount.addListener("updateCaption", e => this.setCaption(e.getData())); + userAccount.addListener("closeWindow", () => this.close(), this); + this._setTabbedView(userAccount); + }, + + statics: { + WIDTH: 500, + HEIGHT: 500, + + openWindow: function(userGroupId) { + const userAccountWindow = new osparc.user.UserAccountWindow(userGroupId); + userAccountWindow.center(); + userAccountWindow.open(); + return userAccountWindow; + }, + }, +}); diff --git a/services/static-webserver/client/source/class/osparc/user/UserExtras.js b/services/static-webserver/client/source/class/osparc/user/UserExtras.js new file mode 100644 index 000000000000..7b017a8b2373 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/user/UserExtras.js @@ -0,0 +1,58 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.user.UserExtras", { + extend: qx.ui.core.Widget, + + construct: function() { + this.base(arguments); + + this._setLayout(new qx.ui.layout.VBox(10)); + }, + + properties: { + extras: { + check: "Object", + init: null, + nullable: true, + event: "changeExtras", + apply: "__applyExtras", + } + }, + + members: { + __applyExtras: function(extras) { + if (!extras) { + return; + } + + for (const key in extras) { + const value = extras[key]; + if (osparc.utils.Utils.isDateLike(value)) { + extras[key] = osparc.utils.Utils.formatDateAndTime(new Date(value)); + } + } + + const jsonViewer = new osparc.widget.JsonFormatterWidget(extras); + const scroll = new qx.ui.container.Scroll(); + scroll.add(jsonViewer); + this._add(scroll, { + flex: 1 + }); + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/user/UserProfile.js b/services/static-webserver/client/source/class/osparc/user/UserProfile.js new file mode 100644 index 000000000000..8d3edc459e10 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/user/UserProfile.js @@ -0,0 +1,249 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.user.UserProfile", { + extend: qx.ui.core.Widget, + + construct: function() { + this.base(arguments); + + this._setLayout(new qx.ui.layout.VBox(10)); + }, + + statics: { + TOP_GRID: { + USERNAME: 0, + FULLNAME: 1, + EMAIL: 2, + PHONE: 3, + USER_ID: 4, + GROUP_ID: 5, + }, + + MIDDLE_GRID: { + INSTITUTION: 0, + ADDRESS: 1, + CITY: 2, + STATE: 3, + COUNTRY: 4, + POSTAL_CODE: 5, + }, + + createLabel: function() { + return new qx.ui.basic.Label().set({ + selectable: true, + }); + }, + }, + + properties: { + user: { + check: "osparc.data.model.User", + init: null, + nullable: true, + event: "changeUser", + apply: "__applyUser", + } + }, + + members: { + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "top-info": { + const grid = new qx.ui.layout.Grid(10, 6); + grid.setColumnWidth(0, 80); + grid.setColumnFlex(1, 1); + grid.setColumnAlign(0, "right", "middle"); + control = new qx.ui.container.Composite(grid); + this._add(control); + break; + } + case "middle-info": { + const grid = new qx.ui.layout.Grid(10, 6); + grid.setColumnWidth(0, 80); + grid.setColumnFlex(1, 1); + grid.setColumnAlign(0, "right", "middle"); + control = new qx.ui.container.Composite(grid); + this._add(control); + break; + } + case "userName": { + this.getChildControl("top-info").add(new qx.ui.basic.Label("UserName"), { + row: this.self().TOP_GRID.USERNAME, + column: 0 + }); + control = this.self().createLabel(); + this.getChildControl("top-info").add(control, { + row: this.self().TOP_GRID.USERNAME, + column: 1 + }); + break; + } + case "fullname": { + this.getChildControl("top-info").add(new qx.ui.basic.Label("Full Name"), { + row: this.self().TOP_GRID.FULLNAME, + column: 0 + }); + control = this.self().createLabel(); + this.getChildControl("top-info").add(control, { + row: this.self().TOP_GRID.FULLNAME, + column: 1 + }); + break; + } + case "email": { + this.getChildControl("top-info").add(new qx.ui.basic.Label("Email"), { + row: this.self().TOP_GRID.EMAIL, + column: 0 + }); + control = this.self().createLabel(); + this.getChildControl("top-info").add(control, { + row: this.self().TOP_GRID.EMAIL, + column: 1 + }); + break; + } + case "phone": { + this.getChildControl("top-info").add(new qx.ui.basic.Label("Phone"), { + row: this.self().TOP_GRID.PHONE, + column: 0 + }); + control = this.self().createLabel(); + this.getChildControl("top-info").add(control, { + row: this.self().TOP_GRID.PHONE, + column: 1 + }); + break; + } + case "user-id": { + this.getChildControl("top-info").add(new qx.ui.basic.Label("User ID"), { + row: this.self().TOP_GRID.USER_ID, + column: 0 + }); + control = this.self().createLabel(); + this.getChildControl("top-info").add(control, { + row: this.self().TOP_GRID.USER_ID, + column: 1 + }); + break; + } + case "group-id": { + this.getChildControl("top-info").add(new qx.ui.basic.Label("Group ID"), { + row: this.self().TOP_GRID.GROUP_ID, + column: 0 + }); + control = this.self().createLabel(); + this.getChildControl("top-info").add(control, { + row: this.self().TOP_GRID.GROUP_ID, + column: 1 + }); + break; + } + case "institution": + this.getChildControl("middle-info").add(new qx.ui.basic.Label("Institution"), { + row: this.self().MIDDLE_GRID.INSTITUTION, + column: 0 + }); + control = this.self().createLabel(); + this.getChildControl("middle-info").add(control, { + row: this.self().MIDDLE_GRID.INSTITUTION, + column: 1 + }); + break; + case "address": + this.getChildControl("middle-info").add(new qx.ui.basic.Label("Address"), { + row: this.self().MIDDLE_GRID.ADDRESS, + column: 0 + }); + control = this.self().createLabel(); + this.getChildControl("middle-info").add(control, { + row: this.self().MIDDLE_GRID.ADDRESS, + column: 1 + }); + break; + case "city": + this.getChildControl("middle-info").add(new qx.ui.basic.Label("City"), { + row: this.self().MIDDLE_GRID.CITY, + column: 0 + }); + control = this.self().createLabel(); + this.getChildControl("middle-info").add(control, { + row: this.self().MIDDLE_GRID.CITY, + column: 1 + }); + break; + case "state": + this.getChildControl("middle-info").add(new qx.ui.basic.Label("State"), { + row: this.self().MIDDLE_GRID.STATE, + column: 0 + }); + control = this.self().createLabel(); + this.getChildControl("middle-info").add(control, { + row: this.self().MIDDLE_GRID.STATE, + column: 1 + }); + break; + case "country": + this.getChildControl("middle-info").add(new qx.ui.basic.Label("Country"), { + row: this.self().MIDDLE_GRID.COUNTRY, + column: 0 + }); + control = this.self().createLabel(); + this.getChildControl("middle-info").add(control, { + row: this.self().MIDDLE_GRID.COUNTRY, + column: 1 + }); + break; + case "postal-code": + this.getChildControl("middle-info").add(new qx.ui.basic.Label("Postal Code"), { + row: this.self().MIDDLE_GRID.POSTAL_CODE, + column: 0 + }); + control = this.self().createLabel(); + this.getChildControl("middle-info").add(control, { + row: this.self().MIDDLE_GRID.POSTAL_CODE, + column: 1 + }); + break; + } + return control || this.base(arguments, id); + }, + + __applyUser: function(user) { + if (!user) { + return; + } + + // top grid + this.getChildControl("userName").setValue(user.getUserName()); + this.getChildControl("fullname").setValue([user.getFirstName(), user.getLastName()].filter(Boolean).join(" ")); + this.getChildControl("email").setValue(user.getEmail()); + this.getChildControl("phone").setValue(user.getPhone() || "-"); + this.getChildControl("user-id").setValue(String(user.getUserId())); + this.getChildControl("group-id").setValue(String(user.getGroupId())); + + // middle grid + this.getChildControl("institution").setValue(user.getInstitution() || "-"); + this.getChildControl("address").setValue(user.getAddress() || "-"); + this.getChildControl("city").setValue(user.getCity() || "-"); + this.getChildControl("state").setValue(user.getState() || "-"); + this.getChildControl("country").setValue(user.getCountry() || "-"); + this.getChildControl("postal-code").setValue(user.getPostalCode() || "-"); + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/utils/Avatar.js b/services/static-webserver/client/source/class/osparc/utils/Avatar.js index fc6030c8fcc8..6149f217d952 100644 --- a/services/static-webserver/client/source/class/osparc/utils/Avatar.js +++ b/services/static-webserver/client/source/class/osparc/utils/Avatar.js @@ -34,7 +34,8 @@ qx.Class.define("osparc.utils.Avatar", { type: "static", statics: { - emailToThumbnail: function(email = "", username = "??", size = 32) { + emailToThumbnail: function(email, userName = "??", size = 32) { + email = email || ""; // MD5 (Message-Digest Algorithm) by WebToolkit const MD5 = function(s) { function L(k, d) { @@ -254,7 +255,7 @@ qx.Class.define("osparc.utils.Avatar", { }; const emailHash = MD5(email); - const defaultImageUrl = `https://ui-avatars.com/api/${username}/${size}`; + const defaultImageUrl = `https://ui-avatars.com/api/${userName}/${size}`; return `https://www.gravatar.com/avatar/${emailHash}?d=${defaultImageUrl}&s=${size}&r=g`; }, } diff --git a/services/static-webserver/client/source/class/osparc/utils/DisabledPlugins.js b/services/static-webserver/client/source/class/osparc/utils/DisabledPlugins.js index f43c8082f1eb..27bb5a29e53d 100644 --- a/services/static-webserver/client/source/class/osparc/utils/DisabledPlugins.js +++ b/services/static-webserver/client/source/class/osparc/utils/DisabledPlugins.js @@ -30,6 +30,7 @@ qx.Class.define("osparc.utils.DisabledPlugins", { META_MODELING: "WEBSERVER_META_MODELING", FUNCTIONS: "WEBSERVER_FUNCTIONS", LICENSES: "WEBSERVER_LICENSES", + REALTIME_COLLABORATION: "WEBSERVER_REALTIME_COLLABORATION", isExportDisabled: function() { return this.__isPluginDisabled(this.EXPORT); @@ -53,16 +54,18 @@ qx.Class.define("osparc.utils.DisabledPlugins", { return this.__isPluginDisabled(this.FUNCTIONS); }, - isLicensesDisabled: function() { - return this.__isPluginDisabled(this.LICENSES); + isLicensesEnabled: function() { + return !this.__isPluginDisabled(this.LICENSES); + }, + + isRTCEnabled: function() { + return !this.__isPluginDisabled(this.REALTIME_COLLABORATION); }, __isPluginDisabled: function(key) { - const statics = osparc.store.Store.getInstance().get("statics"); - if (statics) { - if ("pluginsDisabled" in statics) { - return statics["pluginsDisabled"].includes(key); - } + const pluginsDisabled = osparc.store.StaticInfo.getValue("pluginsDisabled"); + if (pluginsDisabled) { + return pluginsDisabled.includes(key); } return false; } diff --git a/services/static-webserver/client/source/class/osparc/utils/Icons.js b/services/static-webserver/client/source/class/osparc/utils/Icons.js index 6fc5b52935b0..4fb976460836 100644 --- a/services/static-webserver/client/source/class/osparc/utils/Icons.js +++ b/services/static-webserver/client/source/class/osparc/utils/Icons.js @@ -23,21 +23,21 @@ qx.Class.define("osparc.utils.Icons", { if (iconSize) { return "@FontAwesome5Solid/user/" + iconSize; } - return "@FontAwesome5Solid/user/14"; + return osparc.dashboard.CardBase.SHARED_USER; }, organization: function(iconSize) { if (iconSize) { return "@FontAwesome5Solid/users/" + iconSize; } - return "@FontAwesome5Solid/users/14"; + return osparc.dashboard.CardBase.SHARED_ORGS; }, everyone: function(iconSize) { if (iconSize) { return "@FontAwesome5Solid/globe/" + iconSize; } - return "@FontAwesome5Solid/globe/14"; + return osparc.dashboard.CardBase.SHARED_ALL; } } }); diff --git a/services/static-webserver/client/source/class/osparc/utils/LibVersions.js b/services/static-webserver/client/source/class/osparc/utils/LibVersions.js index 349aad4c286e..02a4ce835b1c 100644 --- a/services/static-webserver/client/source/class/osparc/utils/LibVersions.js +++ b/services/static-webserver/client/source/class/osparc/utils/LibVersions.js @@ -126,11 +126,7 @@ qx.Class.define("osparc.utils.LibVersions", { }, getBackendLibs: function() { - const statics = osparc.store.Store.getInstance().get("statics"); - if ("thirdPartyReferences" in statics) { - return statics["thirdPartyReferences"]; - } - return []; + return osparc.store.StaticInfo.getValue("thirdPartyReferences") || []; } } }); diff --git a/services/static-webserver/client/source/class/osparc/utils/Resources.js b/services/static-webserver/client/source/class/osparc/utils/Resources.js index 9d5f3c331f91..42e6a7aa13a1 100644 --- a/services/static-webserver/client/source/class/osparc/utils/Resources.js +++ b/services/static-webserver/client/source/class/osparc/utils/Resources.js @@ -39,6 +39,14 @@ qx.Class.define("osparc.utils.Resources", { return ((hypertoolData["resourceType"] === "hypertool") && ("uuid" in hypertoolData)); }, + isStudyLike: function(resourceData) { + return this.isStudy(resourceData) || this.isTemplate(resourceData) || this.isTutorial(resourceData) || this.isHypertool(resourceData); + }, + + isFunction: function(functionData) { + return ((functionData["resourceType"] === "function") && ("uuid" in functionData)); + }, + isService: function(serviceData) { return ((serviceData["resourceType"] === "service") && ("key" in serviceData) && ("version" in serviceData)); }, diff --git a/services/static-webserver/client/source/class/osparc/utils/Utils.js b/services/static-webserver/client/source/class/osparc/utils/Utils.js index e2b4bce2a3c6..c7bbdaf277c6 100644 --- a/services/static-webserver/client/source/class/osparc/utils/Utils.js +++ b/services/static-webserver/client/source/class/osparc/utils/Utils.js @@ -91,6 +91,45 @@ qx.Class.define("osparc.utils.Utils", { FLOATING_Z_INDEX: 1000001 + 1, + toolTipTextOnDisabledWidget: function(widget, toolTipText) { + if (widget && widget.getContentElement()) { + const el = widget.getContentElement(); + el.removeAttribute("title"); + el.setAttribute("title", toolTipText); + } + }, + + errorsToForm: function(form, errors) { + const items = form.getItems(); + // reset validity + Object.values(items).forEach(item => item.setValid(true)); + errors.forEach(error => { + const msg = error.message; + const field = error.field; + if (field && field in items) { + const item = items[field]; + item.setValid(false); + item.setInvalidMessage(msg); + } + }); + }, + + getBounds: function(widget) { + const bounds = widget.getBounds(); + const cel = widget.getContentElement(); + if (cel) { + const domeEle = cel.getDomElement(); + if (domeEle) { + const rect = domeEle.getBoundingClientRect(); + bounds.left = parseInt(rect.x); + bounds.top = parseInt(rect.y); + bounds.width = parseInt(rect.width); + bounds.height = parseInt(rect.height); + } + } + return bounds; + }, + replaceIconWithThumbnail: function(widget, thumbnailUrl, size = 24) { if (thumbnailUrl) { const thumbnail = new osparc.ui.basic.Thumbnail(thumbnailUrl, size, size).set({ @@ -114,6 +153,24 @@ qx.Class.define("osparc.utils.Utils", { } }, + getThumbnailProps: function(size = 32) { + return { + alignY: "middle", + scale: true, + allowGrowX: true, + allowGrowY: true, + allowShrinkX: true, + allowShrinkY: true, + decorator: "rounded", + maxWidth: size, + maxHeight: size, + }; + }, + + createThumbnail: function(size = 32) { + return new qx.ui.basic.Image().set(this.getThumbnailProps(size)); + }, + disableAutocomplete: function(control) { if (control && control.getContentElement()) { control.getContentElement().setAttribute("autocomplete", "off"); @@ -140,7 +197,11 @@ qx.Class.define("osparc.utils.Utils", { source = imgSrc; } }) - .finally(() => image.setSource(source)); + .finally(() => { + if (image.getContentElement() && imgSrc) { // check if the image is still there + image.setSource(source); + } + }); }, addWhiteSpaces: function(integer) { @@ -152,8 +213,8 @@ qx.Class.define("osparc.utils.Utils", { }, composeTabName: function() { - let newName = osparc.store.StaticInfo.getInstance().getDisplayName(); - const platformName = osparc.store.StaticInfo.getInstance().getPlatformName(); + let newName = osparc.store.StaticInfo.getDisplayName(); + const platformName = osparc.store.StaticInfo.getPlatformName(); if (osparc.utils.Utils.isInZ43()) { newName += " Z43"; } @@ -301,23 +362,32 @@ qx.Class.define("osparc.utils.Utils", { }, makeButtonBlink: function(button, nTimes = 1) { - const onTime = 1000; - const oldBgColor = button.getBackgroundColor(); + const baseColor = button.getBackgroundColor(); + const blinkColor = "strong-main"; + const interval = 500; let count = 0; - const blinkIt = btn => { - count++; - btn.setBackgroundColor("strong-main"); - setTimeout(() => { - btn && btn.setBackgroundColor(oldBgColor); - }, onTime); - }; + // If a blink is already in progress, cancel it + if (button._blinkingIntervalId) { + clearInterval(button._blinkingIntervalId); + button.setBackgroundColor(baseColor); // reset to base + } + + const blinkInterval = setInterval(() => { + if (button && button.getContentElement()) { + button.setBackgroundColor((count % 2 === 0) ? blinkColor : baseColor); + count++; + + if (count >= nTimes * 2) { + clearInterval(blinkInterval); + button.setBackgroundColor(baseColor); + button._blinkingIntervalId = null; // cleanup + } + } + }, interval); - // make it "blink": show it as strong button during onTime" nTimes - blinkIt(button); - const intervalId = setInterval(() => { - (count < nTimes) ? blinkIt(button) : clearInterval(intervalId); - }, 2*onTime); + // Store interval ID on the button + button._blinkingIntervalId = blinkInterval; }, hardRefresh: function() { @@ -450,7 +520,7 @@ qx.Class.define("osparc.utils.Utils", { isMouseOnElement: function(element, event, offset = 0) { const domElement = element.getContentElement().getDomElement(); - const boundRect = domElement.getBoundingClientRect(); + const boundRect = domElement && domElement.getBoundingClientRect(); if (boundRect && event.x > boundRect.x - offset && event.y > boundRect.y - offset && @@ -476,7 +546,7 @@ qx.Class.define("osparc.utils.Utils", { }, isDevelopmentPlatform: function() { - const platformName = osparc.store.StaticInfo.getInstance().getPlatformName(); + const platformName = osparc.store.StaticInfo.getPlatformName(); return (["dev", "master"].includes(platformName)); }, @@ -510,10 +580,15 @@ qx.Class.define("osparc.utils.Utils", { return button; }, + isDateLike: function(v) { + if (typeof v === "string") return !isNaN(new Date(v)); + return false; + }, + /** - * @param value {Date Object} Date Object + * @param date {Date Object} Date Object */ - formatDate: function(value) { + formatDate: function(date) { // create a date format like "Oct. 19, 11:31 AM" if it's this year const dateFormat = new qx.util.format.DateFormat( qx.locale.Date.getDateFormat("medium") @@ -526,20 +601,20 @@ qx.Class.define("osparc.utils.Utils", { const tomorrow = new Date(); tomorrow.setDate(tomorrow.getDate() + 1); - if (today.toDateString() === value.toDateString()) { + if (today.toDateString() === date.toDateString()) { dateStr = qx.locale.Manager.tr("Today"); - } else if (yesterday.toDateString() === value.toDateString()) { + } else if (yesterday.toDateString() === date.toDateString()) { dateStr = qx.locale.Manager.tr("Yesterday"); - } else if (tomorrow.toDateString() === value.toDateString()) { + } else if (tomorrow.toDateString() === date.toDateString()) { dateStr = qx.locale.Manager.tr("Tomorrow"); } else { const currentYear = today.getFullYear(); - if (value.getFullYear() === currentYear) { + if (date.getFullYear() === currentYear) { // Remove the year if it's the current year const shortDateFormat = new qx.util.format.DateFormat("MMM d"); - dateStr = shortDateFormat.format(value); + dateStr = shortDateFormat.format(date); } else { - dateStr = dateFormat.format(value); + dateStr = dateFormat.format(date); } } return dateStr; @@ -552,21 +627,54 @@ qx.Class.define("osparc.utils.Utils", { }, /** - * @param value {Date Object} Date Object + * @param date {Date Object} Date Object */ - formatTime: function(value, long = false) { + formatTime: function(date, long = false) { const timeFormat = new qx.util.format.DateFormat( qx.locale.Date.getTimeFormat(long ? "long" : "short") ); - const timeStr = timeFormat.format(value); + const timeStr = timeFormat.format(date); return timeStr; }, /** - * @param value {Date Object} Date Object + * @param date {Date Object} Date Object */ - formatDateAndTime: function(value) { - return osparc.utils.Utils.formatDate(value) + " " + osparc.utils.Utils.formatTime(value); + formatDateAndTime: function(date) { + return osparc.utils.Utils.formatDate(date) + " " + osparc.utils.Utils.formatTime(date); + }, + + /** + * @param {Date} date - The date to format. + * @returns {String} - The formatted date string with city name and timezone. Sep 4, 1986, 17:00 Zurich (GMT+02:00) + */ + formatDateWithCityAndTZ: function(date) { + // Short date/time formatter + const options = { + year: "numeric", // 1986 + month: "short", // Sep + day: "numeric", // 4 + hour: "numeric", // 9 + minute: "2-digit", + hour12: false, // 24h format + }; + + const dtf = new Intl.DateTimeFormat("en-US", options); + const formatted = dtf.format(date); + + // Timezone city + const tz = Intl.DateTimeFormat().resolvedOptions().timeZone; + const city = tz.split("/").pop().replace("_", " "); + + // UTC offset (minutes → +HH:MM) + const offsetMinutes = -date.getTimezoneOffset(); // JS returns opposite sign + const sign = offsetMinutes >= 0 ? "+" : "-"; + const absMinutes = Math.abs(offsetMinutes); + const hours = String(Math.floor(absMinutes / 60)).padStart(2, "0"); + const minutes = String(absMinutes % 60).padStart(2, "0"); + const offsetStr = `GMT${sign}${hours}:${minutes}`; + + return `${formatted} ${city} (${offsetStr})`; }, formatMsToHHMMSS: function(ms) { @@ -598,20 +706,20 @@ qx.Class.define("osparc.utils.Utils", { }, getReleaseTag: function() { - const rData = osparc.store.StaticInfo.getInstance().getReleaseData(); + const rData = osparc.store.StaticInfo.getReleaseData(); const platformVersion = osparc.utils.LibVersions.getPlatformVersion(); let text = (rData["tag"] && rData["tag"] !== "latest") ? rData["tag"] : platformVersion.version; return text; }, getReleaseLink: function() { - const rData = osparc.store.StaticInfo.getInstance().getReleaseData(); + const rData = osparc.store.StaticInfo.getReleaseData(); return rData["url"] || osparc.utils.LibVersions.getVcsRefUrl(); }, createReleaseNotesLink: function() { let text = "osparc-simcore " + this.getReleaseTag(); - const platformName = osparc.store.StaticInfo.getInstance().getPlatformName(); + const platformName = osparc.store.StaticInfo.getPlatformName(); text += platformName.length ? ` (${platformName})` : ""; const url = this.self().getReleaseLink(); const versionLink = new osparc.ui.basic.LinkLabel(); @@ -634,23 +742,32 @@ qx.Class.define("osparc.utils.Utils", { msg += "
"; msg += qx.locale.Manager.tr("Please contact us via email:"); msg += "
"; - const supportEmail = osparc.store.VendorInfo.getInstance().getSupportEmail(); + const supportEmail = osparc.store.VendorInfo.getSupportEmail(); msg += supportEmail; return msg; }, // used for showing it to Guest users createAccountMessage: function() { - const productName = osparc.store.StaticInfo.getInstance().getDisplayName(); + const productName = osparc.store.StaticInfo.getDisplayName(); const manuals = osparc.store.Support.getManuals(); const manualLink = (manuals && manuals.length) ? manuals[0].url : ""; - const supportEmail = osparc.store.VendorInfo.getInstance().getSupportEmail(); - const mailto = osparc.store.Support.mailToLink(supportEmail, "Request Account " + productName); let msg = ""; msg += qx.locale.Manager.tr("To use all "); msg += this.createHTMLLink(productName + " features", manualLink); + + if (osparc.product.Utils.getCreateAccountAction() === "REQUEST_ACCOUNT_FORM") { + // if the product is configured to show a form to request an account, + // then show a link to it in the message + msg += qx.locale.Manager.tr(", please request an account in the following link:"); + msg += "
"; + msg += osparc.store.Support.requestAccountLink(); + return msg; + } msg += qx.locale.Manager.tr(", please send us an e-mail to create an account:"); msg += "
"; + const supportEmail = osparc.store.VendorInfo.getSupportEmail(); + const mailto = osparc.store.Support.mailToLink(supportEmail, "Request Account " + productName); msg += mailto; return msg; }, @@ -669,6 +786,10 @@ qx.Class.define("osparc.utils.Utils", { (c ^ window.crypto.getRandomValues(new Uint8Array(1))[0] & 15 >> c / 4).toString(16)); }, + uuidToShort: function(uuid) { + return uuid.split("-")[0]; + }, + isInZ43: function() { return window.location.hostname.includes("speag"); }, @@ -689,10 +810,6 @@ qx.Class.define("osparc.utils.Utils", { widget.getContentElement().setStyle("background-color", "transparent"); }, - removeBorder: function(widget) { - widget.getContentElement().setStyle("border", "0px solid"); - }, - hideBorder: function(widget) { widget.getContentElement().setStyle("border", "1px solid transparent"); }, @@ -1097,6 +1214,18 @@ qx.Class.define("osparc.utils.Utils", { return str; }, + camelToTitle: function(str) { + return str + .replace(/([A-Z])/g, ' $1') // insert space before capital letters + .replace(/^./, c => c.toUpperCase()); // capitalize first letter + }, + + convertKeysToTitles: function(obj) { + return Object.fromEntries( + Object.entries(obj).map(([key, value]) => [this.camelToTitle(key), value]) + ); + }, + setIdToWidget: (qWidget, id) => { if (qWidget.getContentElement && qWidget.getContentElement() && id) { qWidget.getContentElement().setAttribute("osparc-test-id", id); diff --git a/services/static-webserver/client/source/class/osparc/viewer/NodeViewer.js b/services/static-webserver/client/source/class/osparc/viewer/NodeViewer.js index 2596b4b1dbcc..521bbb7e9d64 100644 --- a/services/static-webserver/client/source/class/osparc/viewer/NodeViewer.js +++ b/services/static-webserver/client/source/class/osparc/viewer/NodeViewer.js @@ -25,13 +25,7 @@ qx.Class.define("osparc.viewer.NodeViewer", { this._setLayout(new qx.ui.layout.VBox()); - const params = { - url: { - studyId - }, - data: osparc.utils.Utils.getClientSessionID() - }; - osparc.data.Resources.fetch("studies", "open", params) + osparc.store.Study.getInstance().openStudy(studyId) .then(studyData => { // create study const study = new osparc.data.model.Study(studyData); @@ -47,7 +41,11 @@ qx.Class.define("osparc.viewer.NodeViewer", { }, this); } }) - .catch(err => console.error(err)); + .catch(err => { + console.error(err); + osparc.FlashMessenger.logError(err); + qx.core.Init.getApplication().logout(); + }); }, properties: { @@ -80,31 +78,13 @@ qx.Class.define("osparc.viewer.NodeViewer", { const iframeHandler = node.getIframeHandler(); if (iframeHandler) { iframeHandler.checkState(); - iframeHandler.addListener("iframeChanged", () => this.__iFrameChanged(), this); - iframeHandler.getIFrame().addListener("load", () => this.__iFrameChanged(), this); - this.__iFrameChanged(); - + osparc.desktop.WorkbenchView.listenToIframeStateChanges(node, this); this.__attachSocketEventHandlers(); } else { console.error(node.getLabel() + " iframe handler not ready"); } }, - __iFrameChanged: function() { - this._removeAll(); - - if (this.getNode() && this.getNode().getIframeHandler()) { - const iframeHandler = this.getNode().getIframeHandler(); - const loadingPage = iframeHandler.getLoadingPage(); - const iFrame = iframeHandler.getIFrame(); - const src = iFrame.getSource(); - const iFrameView = (src === null || src === "about:blank") ? loadingPage : iFrame; - this._add(iFrameView, { - flex: 1 - }); - } - }, - __attachSocketEventHandlers: function() { this.__listenToNodeUpdated(); this.__listenToNodeProgress(); diff --git a/services/static-webserver/client/source/class/osparc/vipMarket/LicensedItemDetails.js b/services/static-webserver/client/source/class/osparc/vipMarket/LicensedItemDetails.js index 509924c3ae79..cf6aaaca5c54 100644 --- a/services/static-webserver/client/source/class/osparc/vipMarket/LicensedItemDetails.js +++ b/services/static-webserver/client/source/class/osparc/vipMarket/LicensedItemDetails.js @@ -195,12 +195,7 @@ qx.Class.define("osparc.vipMarket.LicensedItemDetails", { iconPosition: "right", cursor: "pointer", }); - manufacturerLink.getChildControl("icon").set({ - maxWidth: 32, - maxHeight: 32, - scale: true, - decorator: "rounded", - }); + manufacturerLink.getChildControl("icon").set(osparc.utils.Utils.getThumbnailProps(32)); manufacturerLink.addListener("tap", () => window.open(licensedResource.getManufacturerLink())); headerLayout.add(manufacturerLink, { column: 1, diff --git a/services/static-webserver/client/source/class/osparc/vipMarket/LicensedItemListItem.js b/services/static-webserver/client/source/class/osparc/vipMarket/LicensedItemListItem.js index 9756cc5931ad..755b977d481d 100644 --- a/services/static-webserver/client/source/class/osparc/vipMarket/LicensedItemListItem.js +++ b/services/static-webserver/client/source/class/osparc/vipMarket/LicensedItemListItem.js @@ -127,16 +127,7 @@ qx.Class.define("osparc.vipMarket.LicensedItemListItem", { let control; switch (id) { case "thumbnail": - control = new qx.ui.basic.Image().set({ - alignY: "middle", - scale: true, - allowGrowX: true, - allowGrowY: true, - allowShrinkX: true, - allowShrinkY: true, - maxWidth: 32, - maxHeight: 32 - }); + control = osparc.utils.Utils.createThumbnail(32); this._add(control, { row: 0, column: 0, diff --git a/services/static-webserver/client/source/class/osparc/vipMarket/SortModelsButtons.js b/services/static-webserver/client/source/class/osparc/vipMarket/SortModelsButtons.js index da3ed278f2bf..a82b0172cfb2 100644 --- a/services/static-webserver/client/source/class/osparc/vipMarket/SortModelsButtons.js +++ b/services/static-webserver/client/source/class/osparc/vipMarket/SortModelsButtons.js @@ -77,13 +77,6 @@ qx.Class.define("osparc.vipMarket.SortModelsButtons", { "sortBy": "qx.event.type.Data" }, - statics: { - DefaultSorting: { - "sort": "name", - "order": "down" - } - }, - members: { __buttonExecuted: function(btn) { this.set({ diff --git a/services/static-webserver/client/source/class/osparc/widget/DateTimeChooser.js b/services/static-webserver/client/source/class/osparc/widget/DateTimeChooser.js new file mode 100644 index 000000000000..ac5b86607572 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/widget/DateTimeChooser.js @@ -0,0 +1,111 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + + +qx.Class.define("osparc.widget.DateTimeChooser", { + extend: osparc.ui.window.Window, + + construct: function(winTitle, value) { + this.base(arguments, winTitle || this.tr("Choose a Date and Time")); + + const width = 260; + const height = 26; + this.set({ + layout: new qx.ui.layout.VBox(10), + autoDestroy: true, + modal: true, + width, + height, + showMaximize: false, + showMinimize: false, + showClose: true, + resizable: false, + clickAwayClose: false, + }); + + const dateTimeField = this.getChildControl("date-time-field"); + if (value) { + dateTimeField.setValue(value); + } + this.getChildControl("cancel-button"); + this.getChildControl("save-button"); + + this.center(); + + this.__attachEventHandlers(); + }, + + events: { + "dateChanged": "qx.event.type.Data", + }, + + members: { + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "date-time-field": + control = new osparc.ui.form.DateTimeField(); + this.add(control); + break; + case "buttons-layout": + control = new qx.ui.container.Composite(new qx.ui.layout.HBox(5).set({ + alignX: "right" + })); + this.add(control); + break; + case "cancel-button": + control = new qx.ui.form.Button(this.tr("Cancel")).set({ + appearance: "form-button-text", + }); + control.addListener("execute", () => this.close(), this); + this.getChildControl("buttons-layout").add(control); + break; + case "save-button": { + control = new qx.ui.form.Button(this.tr("Save")).set({ + appearance: "form-button", + }); + control.addListener("execute", e => { + const dateTimeField = this.getChildControl("date-time-field"); + const data = { + newValue: dateTimeField.getValue() + }; + this.fireDataEvent("dateChanged", data); + }, this); + this.getChildControl("buttons-layout").add(control); + break; + } + } + return control || this.base(arguments, id); + }, + + __attachEventHandlers: function() { + let command = new qx.ui.command.Command("Enter"); + command.addListener("execute", () => { + this.getChildControl("save-button").execute(); + command.dispose(); + command = null; + }); + + let commandEsc = new qx.ui.command.Command("Esc"); + commandEsc.addListener("execute", () => { + this.close(); + commandEsc.dispose(); + commandEsc = null; + }); + } + } +}); diff --git a/services/static-webserver/client/source/class/osparc/widget/IntlTelInput.js b/services/static-webserver/client/source/class/osparc/widget/IntlTelInput.js deleted file mode 100644 index ae6503d52227..000000000000 --- a/services/static-webserver/client/source/class/osparc/widget/IntlTelInput.js +++ /dev/null @@ -1,136 +0,0 @@ -/* ************************************************************************ - - osparc - the simcore frontend - - https://osparc.io - - Copyright: - 2022 IT'IS Foundation, https://itis.swiss - - License: - MIT: https://opensource.org/licenses/MIT - - Authors: - * Odei Maiz (odeimaiz) - -************************************************************************ */ - -/* global intlTelInput */ - -/** - * @ignore(intlTelInput) - */ - -qx.Class.define("osparc.widget.IntlTelInput", { - extend: qx.ui.core.Widget, - - construct: function() { - this.base(arguments); - - this._setLayout(new qx.ui.layout.HBox(5)); - - this.getContentElement().setStyles({ - "overflow": "visible" // needed for countries dropdown menu - }); - - const randId = Math.floor(Math.random() * 100); - const html = ``; - const phoneNumber = new qx.ui.embed.Html(html).set({ - marginTop: 2, - marginLeft: 2, - marginRight: 2, - minWidth: 185, - maxHeight: 25 - }); - this._add(phoneNumber); - phoneNumber.addListenerOnce("appear", () => { - const convertInputToPhoneInput = () => { - const domElement = document.querySelector(`#phone-${randId}`); - this.__itiInput = this.__inputToPhoneInput(domElement); - phoneNumber.getContentElement().setStyles({ - "overflow": "visible" // needed for countries dropdown menu - }); - }; - const intlTelInputLib = osparc.wrapper.IntlTelInput.getInstance(); - if (intlTelInputLib.getLibReady()) { - convertInputToPhoneInput(); - } else { - intlTelInputLib.addListenerOnce("changeLibReady", e => { - if (e.getData()) { - convertInputToPhoneInput(); - } - }); - } - }); - - const feedbackCheck = this.__feedbackCheck = new qx.ui.basic.Image().set({ - paddingTop: 3 - }); - feedbackCheck.exclude(); - this._add(feedbackCheck); - }, - - statics: { - updateStyle: function(itiInput, checkIcon) { - itiInput.a.style["width"] = checkIcon && checkIcon.isVisible() ? "185px" : "215px"; - itiInput.a.style["height"] = "26px"; - itiInput.a.style["borderWidth"] = "0px"; - itiInput.a.style["backgroundColor"] = qx.theme.manager.Color.getInstance().resolve("input_background"); - itiInput.a.style["color"] = qx.theme.manager.Color.getInstance().resolve("text"); - } - }, - - members: { - __itiInput: null, - __feedbackCheck: null, - - getNumber: function() { - return this.__itiInput.getNumber(); - }, - - isValidNumber: function() { - return this.__itiInput.isValidNumber(); - }, - - verifyPhoneNumber: function() { - const isValid = this.isValidNumber(); - this.__feedbackCheck.set({ - toolTipText: "E.164: " + this.getNumber(), - source: isValid ? "@FontAwesome5Solid/check/18" : "@FontAwesome5Solid/exclamation-triangle/18", - textColor: isValid ? "text" : "failed-red" - }); - this.__feedbackCheck.show(); - if (!isValid) { - const validationError = this.__itiInput.getValidationError(); - const errorMap = { - 0: this.tr("Invalid number"), - 1: this.tr("Invalid country code"), - 2: this.tr("Number too short"), - 3: this.tr("Number too long") - }; - const errorMsg = validationError in errorMap ? errorMap[validationError] : "Invalid number"; - this.__feedbackCheck.set({ - toolTipText: errorMsg + ". " + this.__feedbackCheck.getToolTipText() - }); - } - this.self().updateStyle(this.__itiInput, this.__feedbackCheck); - }, - - __inputToPhoneInput: function(input) { - const iti = intlTelInput(input, { - initialCountry: "auto", - geoIpLookup: callback => { - fetch("https://ipapi.co/json") - .then(res => res.json()) - .then(data => callback(data.country_code)) - .catch(() => callback("ch")); - }, - preferredCountries: [] - }); - const themeManager = qx.theme.manager.Meta.getInstance(); - themeManager.addListener("changeTheme", () => this.self().updateStyle(iti)); - this.self().updateStyle(iti); - return iti; - } - } -}); diff --git a/services/static-webserver/client/source/class/osparc/widget/JsonFormatterWidget.js b/services/static-webserver/client/source/class/osparc/widget/JsonFormatterWidget.js new file mode 100644 index 000000000000..852c66c96cf3 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/widget/JsonFormatterWidget.js @@ -0,0 +1,109 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + + +qx.Class.define("osparc.widget.JsonFormatterWidget", { + extend: qx.ui.core.Widget, + + construct: function(json) { + this.base(arguments); + + this._setLayout(new qx.ui.layout.Grow()); + + this.set({ + allowGrowX: true, + allowGrowY: true, + width: null, + height: null + }); + + if (json) { + this.setJson(json); + } + }, + + members: { + __formatterEl: null, + __root: null, + + _createContentElement: function() { + this.__root = new qx.html.Element("div"); + this.__root.addClass("osparc-json-formatter-root"); + this.__root.setStyles({ + width: "100%", + height: "100%", + boxSizing: "border-box", + overflow: "auto" // ensure local overflow is visible + }); + return this.__root; + }, + + _getContentHint: function() { + if (this.__formatterEl) { + return { + width: this.__formatterEl.scrollWidth, + height: this.__formatterEl.scrollHeight + }; + } + return { width: 100, height: 50 }; + }, + + setJson: function(json) { + if (!this.getContentElement().getDomElement()) { + this.addListenerOnce("appear", () => this._mountJson(json), this); + } else { + this._mountJson(json); + } + }, + + _mountJson: function(json) { + if (this.__formatterEl && this.__formatterEl.parentNode) { + this.__formatterEl.parentNode.removeChild(this.__formatterEl); + this.__formatterEl = null; + } + + let jsonObj = json; + if (typeof json === "string") { + try { + jsonObj = JSON.parse(json); + } catch (e) { + console.warn("setJson(): invalid JSON string, rendering raw", e); + } + } + + if (typeof JSONFormatter === "undefined") { + console.error("JSONFormatter is not available"); + return; + } + + const formatter = new JSONFormatter(jsonObj, 2, {}); + this.__formatterEl = formatter.render(); + + const rootDom = this.getContentElement().getDomElement(); + if (rootDom) { + rootDom.appendChild(this.__formatterEl); + } + this.invalidateLayoutCache(); // notify qooxdoo to recalc size + }, + }, + + destruct: function() { + if (this.__formatterEl && this.__formatterEl.parentNode) { + this.__formatterEl.parentNode.removeChild(this.__formatterEl); + } + } +}); diff --git a/services/static-webserver/client/source/class/osparc/widget/NodeOptions.js b/services/static-webserver/client/source/class/osparc/widget/NodeOptions.js index 7cf743845894..961b5cc337e3 100644 --- a/services/static-webserver/client/source/class/osparc/widget/NodeOptions.js +++ b/services/static-webserver/client/source/class/osparc/widget/NodeOptions.js @@ -36,7 +36,8 @@ qx.Class.define("osparc.widget.NodeOptions", { properties: { node: { check: "osparc.data.model.Node", - nullable: false + nullable: false, + apply: "__buildLayout", } }, @@ -46,7 +47,7 @@ qx.Class.define("osparc.widget.NodeOptions", { this.show(); }, - buildLayout: async function() { + __buildLayout: function() { const node = this.getNode(); const sections = []; @@ -83,7 +84,7 @@ qx.Class.define("osparc.widget.NodeOptions", { // Update Resource Limits if ( - await osparc.data.Permissions.getInstance().checkCanDo("override_services_specifications") && + osparc.data.Permissions.getInstance().checkMyGroupCanDo("override_services_specifications") && (node.isComputational() || node.isDynamic()) ) { const updateResourceLimitsView = new osparc.node.UpdateResourceLimitsView(node); diff --git a/services/static-webserver/client/source/class/osparc/widget/NodeOutputs.js b/services/static-webserver/client/source/class/osparc/widget/NodeOutputs.js index c8da28a0c48e..0e4efb1ab4ca 100644 --- a/services/static-webserver/client/source/class/osparc/widget/NodeOutputs.js +++ b/services/static-webserver/client/source/class/osparc/widget/NodeOutputs.js @@ -46,7 +46,7 @@ qx.Class.define("osparc.widget.NodeOutputs", { this.set({ node, - ports: node.getMetaData().outputs + ports: node.getMetadata().outputs }); node.addListener("changeOutputs", () => this.__outputsChanged(), this); @@ -141,6 +141,7 @@ qx.Class.define("osparc.widget.NodeOutputs", { focusable: false, toolTipText: this.tr("Connects a Probe to this output") }); + osparc.utils.Utils.setIdToWidget(probeBtn, "connect_probe_btn_" + portKey); this.bind("offerProbes", probeBtn, "visibility", { converter: val => val ? "visible" : "excluded" }); @@ -198,18 +199,21 @@ qx.Class.define("osparc.widget.NodeOutputs", { valueWidget = new osparc.ui.basic.LinkLabel(); if ("store" in value) { // it's a file - const download = true; - const locationId = value.store; - const fileId = value.path; const filename = value.filename || osparc.file.FilePicker.getFilenameFromPath(value); valueWidget.setValue(filename); valueWidget.eTag = value["eTag"]; - osparc.store.Data.getInstance().getPresignedLink(download, locationId, fileId) - .then(presignedLinkData => { - if ("resp" in presignedLinkData && presignedLinkData.resp) { - valueWidget.setUrl(presignedLinkData.resp.link); - } - }); + const download = true; + const locationId = value.store; + const fileId = value.path; + // request the presigned link only when the widget is shown + valueWidget.addListenerOnce("appear", () => { + osparc.store.Data.getInstance().getPresignedLink(download, locationId, fileId) + .then(presignedLinkData => { + if ("resp" in presignedLinkData && presignedLinkData.resp) { + valueWidget.setUrl(presignedLinkData.resp.link); + } + }); + }); } else if ("downloadLink" in value) { // it's a link const filename = (value.filename && value.filename.length > 0) ? value.filename : osparc.file.FileDownloadLink.extractLabelFromLink(value["downloadLink"]); diff --git a/services/static-webserver/client/source/class/osparc/widget/NodeTreeItem.js b/services/static-webserver/client/source/class/osparc/widget/NodeTreeItem.js index d424973944f6..ba0d3db1c8da 100644 --- a/services/static-webserver/client/source/class/osparc/widget/NodeTreeItem.js +++ b/services/static-webserver/client/source/class/osparc/widget/NodeTreeItem.js @@ -149,6 +149,7 @@ qx.Class.define("osparc.widget.NodeTreeItem", { updateMarker(); const deleteBtn = this.getChildControl("delete-button"); + // do not allow modifying the pipeline node.getStudy().bind("pipelineRunning", deleteBtn, "enabled", { converter: running => !running }); diff --git a/services/static-webserver/client/source/class/osparc/widget/NodesTree.js b/services/static-webserver/client/source/class/osparc/widget/NodesTree.js index e88930b09c57..309389183865 100644 --- a/services/static-webserver/client/source/class/osparc/widget/NodesTree.js +++ b/services/static-webserver/client/source/class/osparc/widget/NodesTree.js @@ -74,34 +74,40 @@ qx.Class.define("osparc.widget.NodesTree", { statics: { __getSortingValue: function(node) { - if (node.isFilePicker()) { - return osparc.service.Utils.getSorting("file"); - } else if (node.isParameter()) { - return osparc.service.Utils.getSorting("parameter"); - } else if (node.isIterator()) { - return osparc.service.Utils.getSorting("iterator"); - } else if (node.isProbe()) { - return osparc.service.Utils.getSorting("probe"); + if (node.getMetadata()) { + if (node.isFilePicker()) { + return osparc.service.Utils.getSorting("file"); + } else if (node.isParameter()) { + return osparc.service.Utils.getSorting("parameter"); + } else if (node.isIterator()) { + return osparc.service.Utils.getSorting("iterator"); + } else if (node.isProbe()) { + return osparc.service.Utils.getSorting("probe"); + } + return osparc.service.Utils.getSorting(node.getMetadata().type); } - return osparc.service.Utils.getSorting(node.getMetaData().type); + return null; }, __getIcon: function(node) { let icon = null; - if (node.isFilePicker()) { - icon = osparc.service.Utils.getIcon("file"); - } else if (node.isParameter()) { - icon = osparc.service.Utils.getIcon("parameter"); - } else if (node.isIterator()) { - icon = osparc.service.Utils.getIcon("iterator"); - } else if (node.isProbe()) { - icon = osparc.service.Utils.getIcon("probe"); - } else { - icon = osparc.service.Utils.getIcon(node.getMetaData().type); - } - if (icon) { - icon += "14"; + if (node.getMetadata()) { + if (node.isFilePicker()) { + icon = osparc.service.Utils.getIcon("file"); + } else if (node.isParameter()) { + icon = osparc.service.Utils.getIcon("parameter"); + } else if (node.isIterator()) { + icon = osparc.service.Utils.getIcon("iterator"); + } else if (node.isProbe()) { + icon = osparc.service.Utils.getIcon("probe"); + } else { + icon = osparc.service.Utils.getIcon(node.getMetadata().type); + } + if (icon) { + icon += "14"; + } } + return icon; }, @@ -125,22 +131,32 @@ qx.Class.define("osparc.widget.NodesTree", { id: node.getNodeId(), label: "Node", children: [], - icon: this.__getIcon(node), + icon: "", iconColor: "text", sortingValue: this.__getSortingValue(node), node }; + const nodeModel = qx.data.marshal.Json.createModel(nodeData, true); node.bind("label", nodeModel, "label"); - if (node.isDynamic()) { - node.getStatus().bind("interactive", nodeModel, "iconColor", { - converter: status => osparc.service.StatusUI.getColor(status) - }); - } else if (node.isComputational()) { - node.getStatus().bind("running", nodeModel, "iconColor", { - converter: status => osparc.service.StatusUI.getColor(status) - }); + const populateWithMetadata = () => { + if (node.isDynamic()) { + node.getStatus().bind("interactive", nodeModel, "iconColor", { + converter: status => osparc.service.StatusUI.getColor(status) + }); + } else if (node.isComputational()) { + node.getStatus().bind("running", nodeModel, "iconColor", { + converter: status => osparc.service.StatusUI.getColor(status) + }); + } + nodeModel.setIcon(this.__getIcon(node)); } + if (node.getMetadata()) { + populateWithMetadata(); + } else { + node.addListenerOnce("changeMetadata", () => populateWithMetadata(), this); + } + return nodeModel; } }, @@ -284,13 +300,13 @@ qx.Class.define("osparc.widget.NodesTree", { const height = osparc.info.CardLarge.HEIGHT; if (nodeId === study.getUuid()) { const studyDetails = new osparc.info.StudyLarge(study); - const title = this.tr("Study Information"); + const title = this.tr("Project Information"); osparc.ui.window.Window.popUpInWindow(studyDetails, title, width, height).set({ maxHeight: height }); } else { const node = study.getWorkbench().getNode(nodeId); - const metadata = node.getMetaData(); + const metadata = node.getMetadata(); const serviceDetails = new osparc.info.ServiceLarge(metadata, { nodeId, label: node.getLabel(), diff --git a/services/static-webserver/client/source/class/osparc/widget/PersistentIframe.js b/services/static-webserver/client/source/class/osparc/widget/PersistentIframe.js index 7734141dfca1..cdd5a45d580a 100644 --- a/services/static-webserver/client/source/class/osparc/widget/PersistentIframe.js +++ b/services/static-webserver/client/source/class/osparc/widget/PersistentIframe.js @@ -301,11 +301,13 @@ qx.Class.define("osparc.widget.PersistentIframe", { }, __handleIframeMessage: function(data, nodeId) { - if (data["type"]) { - switch (data["type"]) { + const action = data["type"]; + if (action) { + const message = data["message"]; + switch (action) { + case "changeTheme": case "theme": { // switch theme driven by the iframe - const message = data["message"]; if (message && message.includes("osparc;theme=")) { const themeName = message.replace("osparc;theme=", ""); const validThemes = osparc.ui.switch.ThemeSwitcher.getValidThemes(); @@ -319,7 +321,7 @@ qx.Class.define("osparc.widget.PersistentIframe", { } case "openMarket": { if (osparc.product.Utils.showS4LStore()) { - const category = data["message"] && data["message"]["category"]; + const category = message && message["category"]; setTimeout(() => osparc.vipMarket.MarketWindow.openWindow(nodeId, category), 100); } break; @@ -332,17 +334,41 @@ qx.Class.define("osparc.widget.PersistentIframe", { } case "openFunction": { // this is the MetaModeling service trying to show function/template information - if (data["message"] && data["message"]["functionId"]) { - const templateId = data["message"]["functionId"]; - osparc.store.Templates.fetchTemplate(templateId) - .then(templateData => { - templateData["resourceType"] = "template"; - const resourceDetails = new osparc.dashboard.ResourceDetails(templateData).set({ - showOpenButton: false, - }); - osparc.dashboard.ResourceDetails.popUpInWindow(resourceDetails); - }) - .catch(() => osparc.FlashMessenger.logError(this.tr("Function not found"))); + let functionData = null; + if (message && message["uuid"]) { + // new version, the uuid is from the function + functionData = { + "uuid": message["uuid"], + "resourceType": "function", + }; + } else if (message && message["functionId"]) { + // old version, the uuid is from the template + functionData = { + "uuid": message["functionId"], + "resourceType": "functionedTemplate", + }; + } + if (functionData) { + const { + resourceDetails, + window, + } = osparc.dashboard.ResourceDetails.popUpInWindow(functionData); + resourceDetails.set({ + showOpenButton: false, + }); + window.setCaption("Function Details"); + } + break; + } + // { type: "openSupport", message: {question: "", answer: ""} } + case "openSupport": { + const supportCenterWindow = osparc.support.SupportCenter.openWindow(); + // for now prefill the text box with the question + if (message && message["question"]) { + supportCenterWindow.proposeConversation( + osparc.support.Conversation.SYSTEM_MESSAGE_TYPE.ESCALATE_TO_SUPPORT, + `From your last question: "${message["question"]}"` + ); } break; } diff --git a/services/static-webserver/client/source/class/osparc/widget/ProgressSequence.js b/services/static-webserver/client/source/class/osparc/widget/ProgressSequence.js index 09cadba98af7..e8ff462772c0 100644 --- a/services/static-webserver/client/source/class/osparc/widget/ProgressSequence.js +++ b/services/static-webserver/client/source/class/osparc/widget/ProgressSequence.js @@ -27,6 +27,7 @@ qx.Class.define("osparc.widget.ProgressSequence", { backgroundColor: "window-popup-background", paddingBottom: 8, minWidth: 400, + maxWidth: 400, }); this.__initLayout(title); diff --git a/services/static-webserver/client/source/class/osparc/widget/Renamer.js b/services/static-webserver/client/source/class/osparc/widget/Renamer.js index fbba9f5184e4..e43e6fd48460 100644 --- a/services/static-webserver/client/source/class/osparc/widget/Renamer.js +++ b/services/static-webserver/client/source/class/osparc/widget/Renamer.js @@ -40,8 +40,8 @@ qx.Class.define("osparc.widget.Renamer", { construct: function(oldLabel = "", subtitle = "", winTitle) { this.base(arguments, winTitle || this.tr("Rename")); - const maxWidth = 350; - const minWidth = 150; + const maxWidth = 400; + const minWidth = 250; const labelWidth = oldLabel ? Math.min(Math.max(parseInt(oldLabel.length*4), minWidth), maxWidth) : minWidth; this.set({ layout: new qx.ui.layout.VBox(5), @@ -63,66 +63,93 @@ qx.Class.define("osparc.widget.Renamer", { "labelChanged": "qx.event.type.Data" }, + properties: { + maxChars: { + check: "Number", + init: 50, + apply: "__applyMaxChars", + } + }, + members: { - __save: null, + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "main-layout": + control = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); + this.add(control); + break; + case "text-field": + control = new qx.ui.form.TextField().set({ + placeholder: this.tr("Type text"), + allowGrowX: true + }); + this.getChildControl("main-layout").add(control, { + flex: 1 + }); + break; + case "save-button": + control = new qx.ui.form.Button(this.tr("Save")).set({ + appearance: "form-button", + padding: [1, 5] + }); + this.getChildControl("main-layout").add(control); + break; + case "subtitle": + control = new qx.ui.basic.Label().set({ + font: "text-12" + }); + this.add(control); + break; + } + return control || this.base(arguments, id); + }, __populateNodeLabelEditor: function(oldLabel, labelWidth) { - const nodeLabelEditor = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); - - // Create a text field in which to edit the data - const labelEditor = new qx.ui.form.TextField(oldLabel).set({ - placeholder: this.tr("Type text"), - allowGrowX: true, - minWidth: labelWidth + const textField = this.getChildControl("text-field").set({ + value: oldLabel, + minWidth: labelWidth, }); - nodeLabelEditor.add(labelEditor, { - flex: 1 - }); - - this.addListener("appear", e => { - labelEditor.focus(); - if (labelEditor.getValue()) { - labelEditor.setTextSelection(0, labelEditor.getValue().length); - } - }, this); - // Create the "Save" button to close the cell editor - const save = this.__save = new qx.ui.form.Button(this.tr("Save")); - save.set({ - appearance: "form-button", - padding: [1, 5] - }); - save.addListener("execute", e => { - const newLabel = labelEditor.getValue(); + const saveButton = this.getChildControl("save-button"); + saveButton.addListener("execute", () => { + const newLabel = textField.getValue(); const data = { newLabel }; this.fireDataEvent("labelChanged", data); }, this); - nodeLabelEditor.add(save); - this.add(nodeLabelEditor); + this.addListener("appear", () => { + textField.focus(); + if (textField.getValue()) { + textField.setTextSelection(0, textField.getValue().length); + } + }, this); + }, + + __applyMaxChars: function(value) { + this.getChildControl("text-field").setMaxLength(value); + + this.__addSubtitle(this.tr("%1 characters max", value)); }, - __addSubtitle: function(subtitleLabel) { - if (subtitleLabel) { - const subtitle = new qx.ui.basic.Label(subtitleLabel).set({ - font: "text-12" - }); - this.add(subtitle); + __addSubtitle: function(subtitleText) { + if (subtitleText) { + this.getChildControl("subtitle").setValue(subtitleText); } }, __attachEventHandlers: function() { let command = new qx.ui.command.Command("Enter"); - command.addListener("execute", e => { - this.__save.execute(); + command.addListener("execute", () => { + this.getChildControl("save-button").execute(); command.dispose(); command = null; }); let commandEsc = new qx.ui.command.Command("Esc"); - commandEsc.addListener("execute", e => { + commandEsc.addListener("execute", () => { this.close(); commandEsc.dispose(); commandEsc = null; diff --git a/services/static-webserver/client/source/class/osparc/widget/SectionBox.js b/services/static-webserver/client/source/class/osparc/widget/SectionBox.js new file mode 100644 index 000000000000..8e1d76dbe6a4 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/widget/SectionBox.js @@ -0,0 +1,112 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + + +qx.Class.define("osparc.widget.SectionBox", { + extend: qx.ui.core.Widget, + include: [qx.ui.core.MRemoteChildrenHandling, qx.ui.core.MLayoutHandling], + + /** + * @param legend {String?} Section title + * @param icon {String?} Section icon + */ + construct: function(legend, icon) { + this.base(arguments); + + this._setLayout(new qx.ui.layout.Canvas()); + + // ensure child controls exist + this.getChildControl("frame"); + this.getChildControl("legend"); + + if (legend) { + this.setLegend(legend); + } + if (icon) { + this.setIcon(icon); + } + }, + + properties: { + legend: { + check: "String", + init: "", + event: "changeLegend", + }, + + icon: { + check: "String", + init: "", + event: "changeIcon", + }, + + legendBackgroundColor: { + check: "Color", + init: "background-main-1", + event: "changeLegendBackgroundColor", + }, + }, + + members: { + _frame: null, + + // Children you add to this widget will be forwarded into the frame: + getChildrenContainer: function() { + return this._frame || this.getChildControl("frame"); + }, + + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "frame": + control = this._frame = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)).set({ + decorator: new qx.ui.decoration.Decorator().set({ + width: 1, + style: "solid", + color: "background-main-2", + radius: 4, + }), + padding: [10 + 6, 10, 10, 10], + backgroundColor: "transparent", + }); + // full size, but pushed down by frameTop + this._add(control, { left: 0, right: 0, bottom: 0, top: 10 }); + break; + case "legend": + control = new qx.ui.basic.Atom().set({ + font: "text-14", + padding: [0, 6], + }); + this.bind("legend", control, "label"); + this.bind("icon", control, "icon"); + this.bind("legendBackgroundColor", control, "backgroundColor"); + this._add(control, { left: 16, top: 0 }); + break; + } + return control || this.base(arguments, id); + }, + + addHelper: function(message, font) { + font = font || "text-13"; + const label = new qx.ui.basic.Label(message).set({ + font, + }); + this.add(label); + return label; + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/widget/StudyDataManager.js b/services/static-webserver/client/source/class/osparc/widget/StudyDataManager.js index dc093a1be033..5e684c719489 100644 --- a/services/static-webserver/client/source/class/osparc/widget/StudyDataManager.js +++ b/services/static-webserver/client/source/class/osparc/widget/StudyDataManager.js @@ -34,15 +34,15 @@ qx.Class.define("osparc.widget.StudyDataManager", { extend: qx.ui.core.Widget, /** - * @param studyId {String} StudyId + * @param studyData {Object} Study Data * @param nodeId {String} NodeId */ - construct: function(studyId, nodeId) { + construct: function(studyData, nodeId) { this.base(arguments); this._setLayout(new qx.ui.layout.VBox(10)); - this.setStudyId(studyId); + this.setStudyData(studyData); if (nodeId) { this.setNodeId(nodeId); @@ -53,8 +53,8 @@ qx.Class.define("osparc.widget.StudyDataManager", { }, statics: { - popUpInWindow: function(studyId, nodeId, title) { - const studyDataManager = new osparc.widget.StudyDataManager(studyId, nodeId); + popUpInWindow: function(studyData, nodeId, title) { + const studyDataManager = new osparc.widget.StudyDataManager(studyData, nodeId); if (!title) { title = osparc.product.Utils.getStudyAlias({firstUpperCase: true}) + qx.locale.Manager.tr(" Files"); } @@ -63,8 +63,8 @@ qx.Class.define("osparc.widget.StudyDataManager", { }, properties: { - studyId: { - check: "String", + studyData: { + check: "Object", init: null, nullable: false }, @@ -82,6 +82,10 @@ qx.Class.define("osparc.widget.StudyDataManager", { switch (id) { case "tree-folder-view": control = new osparc.file.TreeFolderView(); + control + .getChildControl("folder-viewer") + .getChildControl("selected-file-layout") + .setDeleteEnabled(osparc.data.model.Study.canIDelete(this.getStudyData()["accessRights"])); this._add(control, { flex: 1 }); @@ -102,14 +106,15 @@ qx.Class.define("osparc.widget.StudyDataManager", { __reloadTree: function() { const treeFolderView = this.getChildControl("tree-folder-view"); + const studyId = this.getStudyData()["uuid"]; const foldersTree = treeFolderView.getChildControl("folder-tree"); foldersTree.resetCache(); if (this.getNodeId()) { - foldersTree.populateNodeTree(this.getStudyId(), this.getNodeId()); - treeFolderView.requestSize(this.getStudyId(), this.getNodeId()); - } else if (this.getStudyId()) { - foldersTree.populateStudyTree(this.getStudyId()); - treeFolderView.requestSize(this.getStudyId()); + foldersTree.populateNodeTree(studyId, this.getNodeId()); + treeFolderView.requestSize(studyId, this.getNodeId()); + } else if (studyId) { + foldersTree.populateStudyTree(studyId); + treeFolderView.requestSize(studyId); } const folderViewer = treeFolderView.getChildControl("folder-viewer"); diff --git a/services/static-webserver/client/source/class/osparc/widget/StudyTitleOnlyTree.js b/services/static-webserver/client/source/class/osparc/widget/StudyTitleOnlyTree.js index 6d3499b23649..1a00479f1612 100644 --- a/services/static-webserver/client/source/class/osparc/widget/StudyTitleOnlyTree.js +++ b/services/static-webserver/client/source/class/osparc/widget/StudyTitleOnlyTree.js @@ -45,7 +45,7 @@ qx.Class.define("osparc.widget.StudyTitleOnlyTree", { __openStudyInfo: function() { const studyDetails = new osparc.info.StudyLarge(this.getStudy()); - const title = this.tr("Study Information"); + const title = this.tr("Project Information"); const width = osparc.info.CardLarge.WIDTH; const height = osparc.info.CardLarge.HEIGHT; osparc.ui.window.Window.popUpInWindow(studyDetails, title, width, height).set({ diff --git a/services/static-webserver/client/source/class/osparc/widget/logger/LoggerModel.js b/services/static-webserver/client/source/class/osparc/widget/logger/LoggerModel.js index 953c4448cafb..c62dc4ad22b9 100644 --- a/services/static-webserver/client/source/class/osparc/widget/logger/LoggerModel.js +++ b/services/static-webserver/client/source/class/osparc/widget/logger/LoggerModel.js @@ -89,7 +89,7 @@ qx.Class.define("osparc.widget.logger.LoggerModel", { iconSource = "osparc/icons/circle-exclamation-solid.svg"; break; case logLevels.ERROR: - iconSource = "osparc/icons/circle-xmark-solid.svg"; + iconSource = "osparc/icons/circle-xmark-red.svg"; break; } return iconSource; diff --git a/services/static-webserver/client/source/class/osparc/workbench/Annotation.js b/services/static-webserver/client/source/class/osparc/workbench/Annotation.js index 7444094680a9..09583b40cc99 100644 --- a/services/static-webserver/client/source/class/osparc/workbench/Annotation.js +++ b/services/static-webserver/client/source/class/osparc/workbench/Annotation.js @@ -19,17 +19,12 @@ qx.Class.define("osparc.workbench.Annotation", { extend: qx.core.Object, /** - * @param svgLayer {Object} SVG canvas * @param data {Object} data containing type, color, attributes and (optional) id * @param id {String} data */ - construct: function(svgLayer, data, id) { + construct: function(data, id) { this.base(); - if (svgLayer) { - this.__svgLayer = svgLayer; - } - if (id === undefined) { id = osparc.utils.Utils.uuidV4(); } @@ -39,14 +34,21 @@ qx.Class.define("osparc.workbench.Annotation", { } this.set({ id, - type: data.type, color, - attributes: data.attributes + attributes: data.attributes, + type: data.type, }); }, statics: { - DEFAULT_COLOR: "#FFFF01" + DEFAULT_COLOR: "#FFFF01", + + TYPES: { + NOTE: "note", + RECT: "rect", + TEXT: "text", + CONVERSATION: "conversation", + }, }, properties: { @@ -56,66 +58,108 @@ qx.Class.define("osparc.workbench.Annotation", { }, type: { - check: ["note", "rect", "text"], - nullable: false + check: [ + "note", // osparc.workbench.Annotation.TYPES.NOTE + "rect", // osparc.workbench.Annotation.TYPES.RECT + "text", // osparc.workbench.Annotation.TYPES.TEXT + "conversation", // osparc.workbench.Annotation.TYPES.CONVERSATION + ], + nullable: false, }, color: { check: "Color", event: "changeColor", init: "#FFFF01", - apply: "__applyColor" + nullable: true, + apply: "__applyColor", }, attributes: { check: "Object", + nullable: false + }, + + svgCanvas: { + init: null, nullable: false, - apply: "__drawAnnotation" + apply: "__drawAnnotation", }, representation: { init: null - } + }, }, events: { "annotationClicked": "qx.event.type.Data", "annotationStartedMoving": "qx.event.type.Event", "annotationMoving": "qx.event.type.Event", - "annotationStoppedMoving": "qx.event.type.Event" + "annotationStoppedMoving": "qx.event.type.Event", + "annotationChanged": "qx.event.type.Event", }, members: { - __svgLayer: null, - - __drawAnnotation: async function(attrs) { - if (this.__svgLayer === null) { + __drawAnnotation: function(svgLayer) { + if (svgLayer === null) { return; } + const attrs = this.getAttributes(); let representation = null; switch (this.getType()) { - case "note": { + case this.self().TYPES.NOTE: { const user = osparc.store.Groups.getInstance().getUserByGroupId(attrs.recipientGid); - representation = this.__svgLayer.drawAnnotationNote(attrs.x, attrs.y, user ? user.getLabel() : "", attrs.text); + representation = svgLayer.drawAnnotationNote(attrs.x, attrs.y, user ? user.getLabel() : "", attrs.text); break; } - case "rect": - representation = this.__svgLayer.drawAnnotationRect(attrs.width, attrs.height, attrs.x, attrs.y, this.getColor()); + case this.self().TYPES.RECT: + representation = svgLayer.drawAnnotationRect(attrs.width, attrs.height, attrs.x, attrs.y, this.getColor()); break; - case "text": - representation = this.__svgLayer.drawAnnotationText(attrs.x, attrs.y, attrs.text, this.getColor(), attrs.fontSize); + case this.self().TYPES.TEXT: + representation = svgLayer.drawAnnotationText(attrs.x, attrs.y, attrs.text, this.getColor(), attrs.fontSize); break; + case this.self().TYPES.CONVERSATION: { + representation = svgLayer.drawAnnotationConversation(attrs.x, attrs.y, attrs.text); + const conversationId = attrs.conversationId; + if (conversationId) { + osparc.store.ConversationsProject.getInstance().addListener("conversationRenamed", e => { + const data = e.getData(); + if (conversationId === data["conversationId"]) { + this.setText(data.name); + } + }, this); + } + break; + } } + if (representation) { + // handle click events + switch (this.getType()) { + case this.self().TYPES.NOTE: + case this.self().TYPES.RECT: + case this.self().TYPES.TEXT: + representation.node.addEventListener("click", e => { + this.fireDataEvent("annotationClicked", e.ctrlKey); + e.stopPropagation(); + }, this); + break; + case this.self().TYPES.CONVERSATION: + representation["clickables"].forEach(clickable => { + clickable.click(() => { + this.fireDataEvent("annotationClicked", false); + }, this); + }); + break; + } + + // handle moving events osparc.wrapper.Svg.makeDraggable(representation); - representation.node.addEventListener("click", e => { - this.fireDataEvent("annotationClicked", e.ctrlKey); - e.stopPropagation(); - }, this); representation.on("dragstart", () => this.fireEvent("annotationStartedMoving")); representation.on("dragmove", () => this.fireEvent("annotationMoving")); representation.on("dragend", () => this.fireEvent("annotationStoppedMoving")); + this.setRepresentation(representation); } }, @@ -124,24 +168,39 @@ qx.Class.define("osparc.workbench.Annotation", { const representation = this.getRepresentation(); if (representation) { switch (this.getType()) { - case "rect": + case this.self().TYPES.RECT: osparc.wrapper.Svg.updateItemColor(representation, color); break; - case "text": + case this.self().TYPES.TEXT: osparc.wrapper.Svg.updateTextColor(representation, color); break; } + this.fireEvent("annotationChanged"); } }, getRepresentationPosition: function() { const representation = this.getRepresentation(); if (representation) { - const attrs = osparc.wrapper.Svg.getRectAttributes(representation); - return { - x: parseInt(attrs.x), - y: parseInt(attrs.y) - }; + switch (this.getType()) { + case this.self().TYPES.RECT: + case this.self().TYPES.TEXT: + case this.self().TYPES.NOTE: { + const attrs = osparc.wrapper.Svg.getRectAttributes(representation); + return { + x: parseInt(attrs.x), + y: parseInt(attrs.y), + }; + } + case this.self().TYPES.CONVERSATION: { + const x = representation.transform().x; + const y = representation.transform().y; + return { + x, + y, + }; + } + } } return null; }, @@ -166,6 +225,7 @@ qx.Class.define("osparc.workbench.Annotation", { this.getAttributes().x = x; this.getAttributes().y = y; } + this.fireEvent("annotationChanged"); }, setText: function(newText) { @@ -173,6 +233,7 @@ qx.Class.define("osparc.workbench.Annotation", { const representation = this.getRepresentation(); if (representation) { osparc.wrapper.Svg.updateText(representation, newText); + this.fireEvent("annotationChanged"); } }, @@ -181,18 +242,24 @@ qx.Class.define("osparc.workbench.Annotation", { const representation = this.getRepresentation(); if (representation) { osparc.wrapper.Svg.updateTextSize(representation, fontSize); + this.fireEvent("annotationChanged"); } }, setSelected: function(selected) { + const svgCanvas = this.getSvgCanvas(); + if (svgCanvas === null) { + return; + }; + const representation = this.getRepresentation(); if (representation) { switch (this.getType()) { - case "rect": - case "text": { + case this.self().TYPES.RECT: + case this.self().TYPES.TEXT: { if (selected) { if (!("bBox" in representation.node)) { - const bBox = this.__svgLayer.drawBoundingBox(this); + const bBox = svgCanvas.drawBoundingBox(this); representation.node["bBox"] = bBox; } } else if ("bBox" in representation.node) { @@ -206,11 +273,12 @@ qx.Class.define("osparc.workbench.Annotation", { }, serialize: function() { - return { + const serializeData = { type: this.getType(), attributes: this.getAttributes(), - color: this.getColor() + color: this.getColor(), // TYPES.NOTE and TYPES.CONVERSATION do not need a color but backend expects it }; + return serializeData; } } }); diff --git a/services/static-webserver/client/source/class/osparc/workbench/BaseNodeUI.js b/services/static-webserver/client/source/class/osparc/workbench/BaseNodeUI.js deleted file mode 100644 index bd43406a5183..000000000000 --- a/services/static-webserver/client/source/class/osparc/workbench/BaseNodeUI.js +++ /dev/null @@ -1,340 +0,0 @@ -/* ************************************************************************ - - osparc - the simcore frontend - - https://osparc.io - - Copyright: - 2021 IT'IS Foundation, https://itis.swiss - - License: - MIT: https://opensource.org/licenses/MIT - - Authors: - * Odei Maiz (odeimaiz) - -************************************************************************ */ - -qx.Class.define("osparc.workbench.BaseNodeUI", { - extend: qx.ui.window.Window, - include: osparc.filter.MFilterable, - implement: osparc.filter.IFilterable, - type: "abstract", - - construct: function() { - this.base(); - - const grid = new qx.ui.layout.Grid(4, 1); - grid.setColumnFlex(1, 1); - - this.set({ - appearance: "node-ui-cap", - layout: grid, - showMinimize: false, - showMaximize: false, - showClose: false, - showStatusbar: false, - resizable: false, - allowMaximize: false, - contentPadding: this.self().CONTENT_PADDING - }); - - this.getContentElement().setStyles({ - "border-radius": "4px" - }); - - this.subscribeToFilterGroup("workbench"); - - const captionBar = this.getChildControl("captionbar"); - captionBar.set({ - cursor: "move", - paddingRight: 0, - paddingLeft: this.self().PORT_WIDTH - }); - - const menuBtn = this.__getMenuButton(); - captionBar.add(menuBtn, { - row: 0, - column: this.self().CAPTION_POS.MENU - }); - - const captionTitle = this.getChildControl("title"); - captionTitle.set({ - rich: true, - cursor: "move" - }); - captionTitle.addListener("appear", () => { - qx.event.Timer.once(() => { - const labelDom = captionTitle.getContentElement().getDomElement(); - const maxWidth = parseInt(labelDom.style.width); - // eslint-disable-next-line no-underscore-dangle - const width = captionTitle.__contentSize.width; - if (width > maxWidth) { - this.getNode().bind("label", captionTitle, "toolTipText"); - } - }, this, 50); - }); - - this.__nodeMoving = false; - }, - - properties: { - scale: { - check: "Number", - event: "changeScale", - nullable: false - }, - - isMovable: { - check: "Boolean", - init: true, - nullable: false - } - }, - - statics: { - PORT_HEIGHT: 18, - PORT_WIDTH: 11, - CONTENT_PADDING: 2, - PORT_CONNECTED: "@FontAwesome5Regular/dot-circle/18", - PORT_DISCONNECTED: "@FontAwesome5Regular/circle/18", - - CAPTION_POS: { - ICON: 0, // from qooxdoo - TITLE: 1, // from qooxdoo - LOCK: 2, - MARKER: 3, - DEPRECATED: 4, - MENU: 5 - }, - - captionHeight: function() { - return osparc.theme.Appearance.appearances["node-ui-cap/captionbar"].style().height || - osparc.theme.Appearance.appearances["node-ui-cap/captionbar"].style().minHeight; - } - }, - - events: { - "renameNode": "qx.event.type.Data", - "infoNode": "qx.event.type.Data", - "markerClicked": "qx.event.type.Data", - "removeNode": "qx.event.type.Data", - "edgeDragStart": "qx.event.type.Data", - "edgeDragOver": "qx.event.type.Data", - "edgeDrop": "qx.event.type.Data", - "edgeDragEnd": "qx.event.type.Data", - "nodeMovingStart": "qx.event.type.Event", - "nodeMoving": "qx.event.type.Event", - "nodeMovingStop": "qx.event.type.Event" - }, - - members: { - __inputLayout: null, - __outputLayout: null, - _optionsMenu: null, - _markerBtn: null, - _deleteBtn: null, - __nodeMoving: null, - - __getMenuButton: function() { - const optionsMenu = this._optionsMenu = new qx.ui.menu.Menu().set({ - position: "bottom-right" - }); - - const renameBtn = new qx.ui.menu.Button().set({ - label: this.tr("Rename"), - icon: "@FontAwesome5Solid/i-cursor/10" - }); - renameBtn.getChildControl("shortcut").setValue("F2"); - renameBtn.addListener("execute", () => this.fireDataEvent("renameNode", this.getNodeId())); - optionsMenu.add(renameBtn); - - const markerBtn = this._markerBtn = new qx.ui.menu.Button().set({ - icon: "@FontAwesome5Solid/bookmark/10", - visibility: "excluded" - }); - optionsMenu.add(markerBtn); - - const infoBtn = new qx.ui.menu.Button().set({ - label: this.tr("Information..."), - icon: "@FontAwesome5Solid/info/10" - }); - infoBtn.getChildControl("shortcut").setValue("I"); - infoBtn.addListener("execute", () => this.fireDataEvent("infoNode", this.getNodeId())); - optionsMenu.add(infoBtn); - - const deleteBtn = this._deleteBtn = new qx.ui.menu.Button().set({ - label: this.tr("Delete"), - icon: "@FontAwesome5Solid/trash/10" - }); - deleteBtn.getChildControl("shortcut").setValue("Del"); - deleteBtn.addListener("execute", () => this.fireDataEvent("removeNode", this.getNodeId())); - optionsMenu.add(deleteBtn); - - const menuBtn = new qx.ui.form.MenuButton().set({ - menu: optionsMenu, - icon: "@FontAwesome5Solid/ellipsis-v/9", - height: 18, - width: 18, - allowGrowX: false, - allowGrowY: false - }); - return menuBtn; - }, - - getInputPort: function() { - return this.__inputLayout; - }, - - getOutputPort: function() { - return this.__outputLayout; - }, - - _createPort: function(isInput, placeholder = false) { - let port = null; - const width = this.self().PORT_HEIGHT; - if (placeholder) { - port = new qx.ui.core.Spacer(width, width); - } else { - port = new qx.ui.basic.Image().set({ - source: this.self().PORT_DISCONNECTED, // disconnected by default - height: width, - draggable: true, - droppable: true, - width: width, - alignY: "top", - backgroundColor: "background-main" - }); - port.setCursor("pointer"); - port.getContentElement().setStyles({ - "border-radius": width+"px" - }); - port.isInput = isInput; - } - // make the ports exit the NodeUI - port.set({ - marginLeft: isInput ? (-10 + this.self().CONTENT_PADDING) : 0, - marginRight: isInput ? 0 : (-10 - this.self().CONTENT_PADDING) - }); - - this.add(port, { - row: 0, - column: isInput ? 0 : 2 - }); - - if (isInput) { - this.__inputLayout = port; - } else { - this.__outputLayout = port; - } - - return port; - }, - - getEdgePoint: function(port) { - const bounds = this.getCurrentBounds(); - const captionHeight = Math.max(this.getChildControl("captionbar").getSizeHint().height, this.self().captionHeight()); - const x = port.isInput ? bounds.left - 6 : bounds.left + bounds.width - 1; - const y = bounds.top + captionHeight + this.self().PORT_HEIGHT/2 + 2; - return [x, y]; - }, - - getCurrentBounds: function() { - let bounds = this.getBounds(); - let cel = this.getContentElement(); - if (cel) { - let domeEle = cel.getDomElement(); - if (domeEle) { - bounds.left = parseInt(domeEle.style.left); - bounds.top = parseInt(domeEle.style.top); - } - } - return bounds; - }, - - __scaleCoordinates: function(x, y) { - return { - x: parseInt(x / this.getScale()), - y: parseInt(y / this.getScale()) - }; - }, - - _setPositionFromEvent: function(e) { - const sideBarWidth = this.__dragRange.left; - const navigationBarHeight = this.__dragRange.top; - const native = e.getNativeEvent(); - const x = native.clientX + this.__dragLeft - sideBarWidth; - const y = native.clientY + this.__dragTop - navigationBarHeight; - const coords = this.__scaleCoordinates(x, y); - const insets = this.getLayoutParent().getInsets(); - this.setDomPosition(coords.x - (insets.left || 0), coords.y - (insets.top || 0)); - return coords; - }, - - // override qx.ui.core.MMovable - _onMovePointerMove: function(e) { - // Only react when dragging is active - if (!this.hasState("move") || !this.getIsMovable()) { - return; - } - e.stopPropagation(); - if (this.__nodeMoving === false) { - this.__nodeMoving = true; - this.fireEvent("nodeMovingStart"); - } - this.fireEvent("nodeMoving"); - }, - - // override qx.ui.core.MMovable - _onMovePointerUp : function(e) { - if (this.hasListener("roll")) { - this.removeListener("roll", this._onMoveRoll, this); - } - - // Only react when dragging is active - if (!this.hasState("move") || !this.getIsMovable()) { - return; - } - - this._onMovePointerMove(e); - - this.__nodeMoving = false; - this.fireEvent("nodeMovingStop"); - - // Remove drag state - this.removeState("move"); - - this.releaseCapture(); - - e.stopPropagation(); - }, - - // implement osparc.filter.IFilterable - _filter: function() { - this.setOpacity(0.4); - }, - - // implement osparc.filter.IFilterable - _unfilter: function() { - this.setOpacity(1); - }, - - /** - * @abstract - */ - _shouldApplyFilter: function(data) { - throw new Error("Abstract method called!"); - }, - - // implement osparc.filter.IFilterable - _shouldReactToFilter: function(data) { - if (data.text && data.text.length > 1) { - return true; - } - if (data.tags && data.tags.length) { - return true; - } - return false; - } - } -}); diff --git a/services/static-webserver/client/source/class/osparc/workbench/DiskUsageIndicator.js b/services/static-webserver/client/source/class/osparc/workbench/DiskUsageIndicator.js index e733be3b6bcf..ed4729731ed3 100644 --- a/services/static-webserver/client/source/class/osparc/workbench/DiskUsageIndicator.js +++ b/services/static-webserver/client/source/class/osparc/workbench/DiskUsageIndicator.js @@ -82,7 +82,7 @@ qx.Class.define("osparc.workbench.DiskUsageIndicator", { const indicator = this.getChildControl("disk-indicator") control = new qx.ui.basic.Label().set({ value: "", - font: "text-13", + font: "text-12", textColor: "contrasted-text-light", alignX: "center", alignY: "middle", @@ -103,6 +103,10 @@ qx.Class.define("osparc.workbench.DiskUsageIndicator", { // Subscribe to disk usage data for the new node this._subscribe(node); + + node.getStatus().bind("interactive", this, "visibility", { + converter: state => state === "ready" ? "visible" : "excluded" + }); }, _subscribe: function(node) { diff --git a/services/static-webserver/client/source/class/osparc/workbench/EdgeUI.js b/services/static-webserver/client/source/class/osparc/workbench/EdgeUI.js index 40e72e134f99..c7e496069f6e 100644 --- a/services/static-webserver/client/source/class/osparc/workbench/EdgeUI.js +++ b/services/static-webserver/client/source/class/osparc/workbench/EdgeUI.js @@ -48,7 +48,7 @@ qx.Class.define("osparc.workbench.EdgeUI", { representation.hint = hint; if (edge.getInputNode()) { - edge.getInputNode().getStatus().addListener("changeModified", () => this.__updateEdgeState()); + edge.getInputNode().getStatus().addListener("changeOutput", () => this.__updateEdgeState()); } edge.addListener("changePortConnected", () => this.__updateEdgeState()); @@ -69,13 +69,8 @@ qx.Class.define("osparc.workbench.EdgeUI", { }, statics: { - getEdgeColor: function(modified) { - let newColor = null; - if (modified === null) { - newColor = qx.theme.manager.Color.getInstance().resolve("workbench-edge-comp-active"); - } else { - newColor = osparc.service.StatusUI.getColor(modified ? "failed" : "ready"); - } + getEdgeColor: function(outputState) { + const newColor = osparc.service.StatusUI.getColor(outputState); const colorHex = qx.theme.manager.Color.getInstance().resolve(newColor); return colorHex; }, @@ -89,10 +84,10 @@ qx.Class.define("osparc.workbench.EdgeUI", { __updateEdgeState: function() { const inputNode = this.getEdge().getInputNode(); const portConnected = this.getEdge().isPortConnected(); - const modified = inputNode ? inputNode.getStatus().getModified() : false; + const output = inputNode ? inputNode.getStatus().getOutput() : null; // color - const colorHex = this.self().getEdgeColor(modified); + const colorHex = this.self().getEdgeColor(output); osparc.wrapper.Svg.updateCurveColor(this.getRepresentation(), colorHex); // shape @@ -102,7 +97,7 @@ qx.Class.define("osparc.workbench.EdgeUI", { const hint = this.getHint(); if (this.getEdge().isPortConnected() === false) { hint.setText(this.self().noPortsConnectedText(this.getEdge())); - } else if (modified) { + } else if (output === "out-of-date") { hint.setText("Out-of-date"); } else { hint.setText(null); diff --git a/services/static-webserver/client/source/class/osparc/workbench/NodeUI.js b/services/static-webserver/client/source/class/osparc/workbench/NodeUI.js index d1535459f944..a4c381eb948d 100644 --- a/services/static-webserver/client/source/class/osparc/workbench/NodeUI.js +++ b/services/static-webserver/client/source/class/osparc/workbench/NodeUI.js @@ -32,7 +32,7 @@ */ qx.Class.define("osparc.workbench.NodeUI", { - extend: osparc.workbench.BaseNodeUI, + extend: qx.ui.window.Window, /** * @param node {osparc.data.model.Node} Node owning the widget @@ -40,9 +40,57 @@ qx.Class.define("osparc.workbench.NodeUI", { construct: function(node) { this.base(arguments); + const grid = new qx.ui.layout.Grid(2, 1); + grid.setColumnFlex(1, 1); + + this.set({ + appearance: "node-ui-cap", + layout: grid, + showMinimize: false, + showMaximize: false, + showClose: false, + showStatusbar: false, + resizable: false, + allowMaximize: false, + contentPadding: this.self().CONTENT_PADDING + }); + + const captionBar = this.getChildControl("captionbar"); + captionBar.set({ + cursor: "move", + paddingRight: 0, + paddingLeft: this.self().PORT_DIAMETER - 6, + }); + + const menuBtn = this.__getMenuButton(); + captionBar.add(menuBtn, { + row: 0, + column: this.self().CAPTION_POS.MENU + }); + + const captionTitle = this.getChildControl("title"); + captionTitle.set({ + rich: true, + cursor: "move" + }); + + this.__nodeMoving = false; + this.setNode(node); this.__resetNodeUILayout(); + + captionTitle.addListener("appear", () => { + qx.event.Timer.once(() => { + const labelDom = captionTitle.getContentElement().getDomElement(); + const maxWidth = parseInt(labelDom.style.width); + // eslint-disable-next-line no-underscore-dangle + const width = captionTitle.__contentSize.width; + if (width > maxWidth) { + this.getNode().bind("label", captionTitle, "toolTipText"); + } + }, this, 50); + }); }, properties: { @@ -53,8 +101,8 @@ qx.Class.define("osparc.workbench.NodeUI", { }, type: { - check: ["normal", "file", "parameter", "iterator", "probe"], - init: "normal", + check: ["computational", "dynamic", "file", "parameter", "iterator", "probe", "unknown"], + init: null, nullable: false, apply: "__applyType" }, @@ -63,27 +111,73 @@ qx.Class.define("osparc.workbench.NodeUI", { check: "String", nullable: true, apply: "__applyThumbnail" - } + }, + scale: { + check: "Number", + event: "changeScale", + nullable: false + }, + + isMovable: { + check: "Boolean", + init: true, + nullable: false + }, }, statics: { NODE_WIDTH: 180, NODE_HEIGHT: 80, FILE_NODE_WIDTH: 120, + PORT_DIAMETER: 18, + PORT_MARGIN_TOP: 4, + CONTENT_PADDING: 2, + PORT_CONNECTED: "@FontAwesome5Regular/dot-circle/18", + PORT_DISCONNECTED: "@FontAwesome5Regular/circle/18", + + CAPTION_POS: { + ICON: 0, // from qooxdoo + TITLE: 1, // from qooxdoo + MODIFIED_STAR: 2, + LOCK: 3, + MARKER: 4, + DEPRECATED: 5, + MENU: 6, + }, + + captionHeight: function() { + return osparc.theme.Appearance.appearances["node-ui-cap/captionbar"].style().height || + osparc.theme.Appearance.appearances["node-ui-cap/captionbar"].style().minHeight; + }, }, events: { + "renameNode": "qx.event.type.Data", + "infoNode": "qx.event.type.Data", + "markerClicked": "qx.event.type.Data", + "removeNode": "qx.event.type.Data", + "edgeDragStart": "qx.event.type.Data", + "edgeDragOver": "qx.event.type.Data", + "edgeDrop": "qx.event.type.Data", + "edgeDragEnd": "qx.event.type.Data", + "nodeMovingStart": "qx.event.type.Event", + "nodeMoving": "qx.event.type.Data", + "nodeMovingStop": "qx.event.type.Event", "updateNodeDecorator": "qx.event.type.Event", "requestOpenLogger": "qx.event.type.Event", + "requestOpenServiceCatalog": "qx.event.type.Data", + "highlightEdge": "qx.event.type.Data", }, members: { __thumbnail: null, __svgWorkbenchCanvas: null, - - getNodeType: function() { - return "service"; - }, + __inputLayout: null, + __outputLayout: null, + __optionsMenu: null, + __markerBtn: null, + __deleteBtn: null, + __nodeMoving: null, getNodeId: function() { return this.getNode().getNodeId(); @@ -92,6 +186,21 @@ qx.Class.define("osparc.workbench.NodeUI", { _createChildControlImpl: function(id) { let control; switch (id) { + case "modified-star": + control = new qx.ui.basic.Label("*").set({ + font: "text-14", + toolTipText: this.tr("Needs to run to update the outputs"), + padding: 4, + paddingTop: 0, + paddingBottom: 0, + visibility: "excluded", + alignY: "top", + }); + this.getChildControl("captionbar").add(control, { + row: 0, + column: this.self().CAPTION_POS.MODIFIED_STAR + }); + break; case "lock": control = new qx.ui.basic.Image().set({ source: "@FontAwesome5Solid/lock/12", @@ -100,7 +209,7 @@ qx.Class.define("osparc.workbench.NodeUI", { }); this.getChildControl("captionbar").add(control, { row: 0, - column: osparc.workbench.BaseNodeUI.CAPTION_POS.LOCK + column: this.self().CAPTION_POS.LOCK }); break; case "marker": @@ -111,7 +220,7 @@ qx.Class.define("osparc.workbench.NodeUI", { }); this.getChildControl("captionbar").add(control, { row: 0, - column: osparc.workbench.BaseNodeUI.CAPTION_POS.MARKER + column: this.self().CAPTION_POS.MARKER }); control.addListener("tap", () => this.fireDataEvent("markerClicked", this.getNode().getNodeId())); break; @@ -122,55 +231,74 @@ qx.Class.define("osparc.workbench.NodeUI", { }); this.getChildControl("captionbar").add(control, { row: 0, - column: osparc.workbench.BaseNodeUI.CAPTION_POS.DEPRECATED + column: this.self().CAPTION_POS.DEPRECATED }); break; - case "chips": { - control = new qx.ui.container.Composite(new qx.ui.layout.Flow(3, 3).set({ + case "middle-container": + control = new qx.ui.container.Composite(new qx.ui.layout.HBox(2).set({ alignY: "middle" })).set({ - margin: [3, 4] + padding: 4 }); - let nodeType = this.getNode().getMetaData().type; + this.add(control, { + row: 0, + column: 1 + }); + break; + case "node-type-chip": { + control = new osparc.ui.basic.Chip(); + let nodeType = this.getNode().getMetadata().type; if (this.getNode().isIterator()) { nodeType = "iterator"; + } else if (this.getNode().isParameter()) { + nodeType = "parameter"; } else if (this.getNode().isProbe()) { nodeType = "probe"; } const type = osparc.service.Utils.getType(nodeType); if (type) { - const chip = new osparc.ui.basic.Chip().set({ - icon: type.icon + "14", + control.set({ + icon: type.icon + "13", toolTipText: type.label }); - control.add(chip); + } else if (this.getNode().isUnknown()) { + control.set({ + icon: "@FontAwesome5Solid/question/14", + toolTipText: "Unknown", + }); } - const nodeStatus = new osparc.ui.basic.NodeStatusUI(this.getNode()); - control.add(nodeStatus); - const statusLabel = nodeStatus.getChildControl("label"); + this.getChildControl("middle-container").addAt(control, 0); + break; + } + case "node-status-ui": { + control = new osparc.ui.basic.NodeStatusUI(this.getNode()).set({ + maxHeight: 20, + font: "text-10", + }); + const statusLabel = control.getChildControl("label").set({ + maxWidth: 80, + }); const requestOpenLogger = () => this.fireEvent("requestOpenLogger"); const evaluateLabel = () => { const failed = statusLabel.getValue() === "Unsuccessful"; statusLabel.setCursor(failed ? "pointer" : "auto"); - if (nodeStatus.hasListener("tap")) { - nodeStatus.removeListener("tap", requestOpenLogger); + if (control.hasListener("tap")) { + control.removeListener("tap", requestOpenLogger); } if (failed) { - nodeStatus.addListener("tap", requestOpenLogger); + control.addListener("tap", requestOpenLogger); } }; evaluateLabel(); statusLabel.addListener("changeValue", evaluateLabel); - this.add(control, { - row: 0, - column: 1 - }); + this.getChildControl("middle-container").addAt(control, 1); break; } case "progress": control = new qx.ui.indicator.ProgressBar().set({ height: 10, - margin: 4 + margin: 4, + decorator: "rounded", }); this.add(control, { row: 1, @@ -178,6 +306,14 @@ qx.Class.define("osparc.workbench.NodeUI", { colSpan: 3 }); break; + case "avatar-group": + control = new osparc.ui.basic.AvatarGroup(20, "right").set({ + hideMyself: true, + }); + this.getChildControl("middle-container").addAt(control, 2, { + flex: 1 + }); + break; case "usage-indicator": control = new osparc.workbench.DiskUsageIndicator(); this.add(control, { @@ -191,34 +327,37 @@ qx.Class.define("osparc.workbench.NodeUI", { }, __resetNodeUILayout: function() { - this.set({ - width: this.self(arguments).NODE_WIDTH, - maxWidth: this.self(arguments).NODE_WIDTH, - minWidth: this.self(arguments).NODE_WIDTH - }); + this.__setNodeUIWidth(this.self().NODE_WIDTH); this.resetThumbnail(); - this.__createWindowLayout(); + // make sure metadata is ready + if (this.getNode().getMetadata()) { + this.__createContentLayout(); + } else { + this.getNode().addListenerOnce("changeMetadata", () => this.__createContentLayout(), this); + } }, - __createWindowLayout: function() { + __createContentLayout: function() { const node = this.getNode(); + if (node) { + this.getChildControl("node-type-chip"); + this.getChildControl("node-status-ui"); - this.getChildControl("chips").show(); - - if (node.isComputational() || node.isFilePicker() || node.isIterator()) { - this.getChildControl("progress").show(); + if (node.isComputational() || node.isFilePicker() || node.isIterator()) { + this.getChildControl("progress"); + } } }, - populateNodeLayout: function(svgWorkbenchCanvas) { + __populateNodeLayout: function(svgWorkbenchCanvas) { const node = this.getNode(); node.bind("label", this, "caption", { onUpdate: () => { setTimeout(() => this.fireEvent("updateNodeDecorator"), 50); } }); - const metadata = node.getMetaData(); + const metadata = node.getMetadata(); this.__createPorts(true, Boolean((metadata && metadata.inputs && Object.keys(metadata.inputs).length))); this.__createPorts(false, Boolean((metadata && metadata.outputs && Object.keys(metadata.outputs).length))); if (node.isComputational() || node.isFilePicker()) { @@ -226,7 +365,11 @@ qx.Class.define("osparc.workbench.NodeUI", { converter: val => val === null ? 0 : val }); } - if (node.isFilePicker()) { + if (node.isComputational()) { + this.setType("computational"); + } else if (node.isDynamic()) { + this.setType("dynamic"); + } else if (node.isFilePicker()) { this.setType("file"); } else if (node.isParameter()) { this.setType("parameter"); @@ -235,24 +378,46 @@ qx.Class.define("osparc.workbench.NodeUI", { this.setType("iterator"); } else if (node.isProbe()) { this.setType("probe"); + } else if (node.isUnknown()) { + this.setType("unknown"); + } + this.addListener("resize", () => { + setTimeout(() => this.fireEvent("updateNodeDecorator"), 50); + }); + + if (node.getPropsForm()) { + node.getPropsForm().addListener("highlightEdge", e => this.fireDataEvent("highlightEdge", e.getData()), this); + } + }, + + populateNodeLayout: function(svgWorkbenchCanvas) { + if (this.getNode().getMetadata()) { + this.__populateNodeLayout(svgWorkbenchCanvas); + } else { + this.getNode().addListenerOnce("changeMetadata", () => this.__populateNodeLayout(svgWorkbenchCanvas), this); } }, __applyNode: function(node) { + node.addListener("changePosition", e => { + this.moveNodeTo(e.getData()); + this.fireEvent("nodeMovingStop"); + }); + if (node.isDynamic()) { const startButton = new qx.ui.menu.Button().set({ label: this.tr("Start"), icon: "@FontAwesome5Solid/play/10" }); node.attachHandlersToStartButton(startButton); - this._optionsMenu.addAt(startButton, 0); + this.__optionsMenu.addAt(startButton, 0); const stopButton = new qx.ui.menu.Button().set({ label: this.tr("Stop"), icon: "@FontAwesome5Solid/stop/10" }); node.attachHandlersToStopButton(stopButton); - this._optionsMenu.addAt(stopButton, 1); + this.__optionsMenu.addAt(stopButton, 1); } if (node.getKey().includes("parameter/int")) { @@ -261,27 +426,61 @@ qx.Class.define("osparc.workbench.NodeUI", { icon: "@FontAwesome5Solid/sync-alt/10" }); makeIterator.addListener("execute", () => node.convertToIterator("int"), this); - this._optionsMenu.add(makeIterator); + this.__optionsMenu.add(makeIterator); } else if (node.getKey().includes("data-iterator/int-range")) { const convertToParameter = new qx.ui.menu.Button().set({ label: this.tr("Convert to Parameter"), icon: "@FontAwesome5Solid/sync-alt/10" }); convertToParameter.addListener("execute", () => node.convertToParameter("int"), this); - this._optionsMenu.add(convertToParameter); + this.__optionsMenu.add(convertToParameter); } - const lock = this.getChildControl("lock"); - if (node.getPropsForm()) { - node.getPropsForm().bind("enabled", lock, "visibility", { - converter: val => val ? "excluded" : "visible" + const nodeStatus = node.getStatus(); + const modifiedStar = this.getChildControl("modified-star"); + const evaluateShowStar = () => { + const modified = nodeStatus.getModified(); + const isRunning = osparc.data.model.NodeStatus.isComputationalRunning(node); + modifiedStar.set({ + visibility: modified && !isRunning ? "visible" : "excluded" }); - } - this._markerBtn.show(); - this.getNode().bind("marker", this._markerBtn, "label", { + }; + evaluateShowStar(); + nodeStatus.addListener("changeModified", evaluateShowStar); + nodeStatus.addListener("changeRunning", evaluateShowStar); + + const lockState = nodeStatus.getLockState(); + const lock = this.getChildControl("lock"); + lockState.bind("locked", lock, "visibility", { + converter: locked => { + if (locked) { + if (node.isDynamic()) { + // if it's dynamic, don't show the lock if it's me using it + const myGroupId = osparc.auth.Data.getInstance().getGroupId(); + const currentUserGroupIds = lockState.getCurrentUserGroupIds(); + return currentUserGroupIds.includes(myGroupId) ? "excluded" : "visible"; + } else { + return "visible"; + } + } + return "excluded"; + } + }); + + const updateUserGroupIds = () => { + const currentUserGroupIds = lockState.getCurrentUserGroupIds(); + const avatarGroup = this.getChildControl("avatar-group"); + avatarGroup.setUserGroupIds(currentUserGroupIds); + avatarGroup.setVisibility(currentUserGroupIds.length ? "visible" : "excluded"); + }; + updateUserGroupIds(); + lockState.addListener("changeCurrentUserGroupIds", updateUserGroupIds); + + this.__markerBtn.show(); + this.getNode().bind("marker", this.__markerBtn, "label", { converter: val => val ? this.tr("Remove Marker") : this.tr("Add Marker") }); - this._markerBtn.addListener("execute", () => node.toggleMarker()); + this.__markerBtn.addListener("execute", () => node.toggleMarker()); const marker = this.getChildControl("marker"); const updateMarker = () => { @@ -295,7 +494,8 @@ qx.Class.define("osparc.workbench.NodeUI", { node.addListener("changeMarker", () => updateMarker()); updateMarker(); - node.getStudy().bind("pipelineRunning", this._deleteBtn, "enabled", { + // do not allow modifying the pipeline + node.getStudy().bind("pipelineRunning", this.__deleteBtn, "enabled", { converter: running => !running }); @@ -308,7 +508,7 @@ qx.Class.define("osparc.workbench.NodeUI", { textColor: osparc.service.StatusUI.getColor("deprecated") }); let ttMsg = osparc.service.Utils.DEPRECATED_SERVICE_TEXT; - const deprecatedDateMsg = osparc.service.Utils.getDeprecationDateText(node.getMetaData()); + const deprecatedDateMsg = osparc.service.Utils.getDeprecationDateText(node.getMetadata()); if (deprecatedDateMsg) { ttMsg = ttMsg + "
" + deprecatedDateMsg; } @@ -365,6 +565,9 @@ qx.Class.define("osparc.workbench.NodeUI", { case "probe": this.__turnIntoProbeUI(); break; + case "unknown": + this.__turnIntoUnknownUI(); + break; } }, @@ -390,8 +593,8 @@ qx.Class.define("osparc.workbench.NodeUI", { const width = this.self().FILE_NODE_WIDTH; this.__setNodeUIWidth(width); - const chipContainer = this.getChildControl("chips"); - chipContainer.exclude(); + const middleContainer = this.getChildControl("middle-container"); + middleContainer.exclude(); if (this.hasChildControl("progress")) { this.getChildControl("progress").exclude(); @@ -418,17 +621,14 @@ qx.Class.define("osparc.workbench.NodeUI", { }, __turnIntoParameterUI: function() { - const width = 100; + const width = 120; this.__setNodeUIWidth(width); - const label = new qx.ui.basic.Label().set({ - font: "text-18" + const valueLabel = new qx.ui.basic.Label().set({ + paddingLeft: 4, + font: "text-14" }); - const chipContainer = this.getChildControl("chips"); - chipContainer.add(label); - - this.getNode().bind("outputs", label, "value", { - converter: outputs => { + const outputToValue = outputs => { if ("out_1" in outputs && "value" in outputs["out_1"]) { const val = outputs["out_1"]["value"]; if (Array.isArray(val)) { @@ -437,8 +637,18 @@ qx.Class.define("osparc.workbench.NodeUI", { return String(val); } return ""; - } + } + this.getNode().bind("outputs", valueLabel, "value", { + converter: outputs => outputToValue(outputs) + }); + this.getNode().bind("outputs", valueLabel, "toolTipText", { + converter: outputs => outputToValue(outputs) }); + const middleContainer = this.getChildControl("middle-container"); + middleContainer.add(valueLabel, { + flex: 1 + }); + this.fireEvent("updateNodeDecorator"); }, @@ -468,20 +678,32 @@ qx.Class.define("osparc.workbench.NodeUI", { }, __turnIntoProbeUI: function() { - const width = 150; + const width = 120; this.__setNodeUIWidth(width); const linkLabel = new osparc.ui.basic.LinkLabel().set({ - paddingLeft: 5, - font: "text-12" + paddingLeft: 4, + font: "text-14", + rich: false, // this will make the ellipsis work + }); + const middleContainer = this.getChildControl("middle-container"); + middleContainer.add(linkLabel, { + flex: 1 }); - const chipContainer = this.getChildControl("chips"); - chipContainer.add(linkLabel); this.getNode().getPropsForm().addListener("linkFieldModified", () => this.__setProbeValue(linkLabel), this); this.__setProbeValue(linkLabel); }, + __turnIntoUnknownUI: function() { + const width = 110; + this.__setNodeUIWidth(width); + + this.setEnabled(false); + + this.fireEvent("updateNodeDecorator"); + }, + __checkTurnIntoIteratorUI: function() { const outputs = this.getNode().getOutputs(); const portKey = "out_1"; @@ -528,9 +750,9 @@ qx.Class.define("osparc.workbench.NodeUI", { converter: outputs => { if (portKey in outputs && "value" in outputs[portKey] && outputs[portKey]["value"]) { const val = outputs[portKey]["value"]; - if (this.getNode().getMetaData()["key"].includes("probe/array")) { + if (this.getNode().getMetadata()["key"].includes("probe/array")) { return "[" + val.join(",") + "]"; - } else if (this.getNode().getMetaData()["key"].includes("probe/file")) { + } else if (this.getNode().getMetadata()["key"].includes("probe/file")) { const filename = val.filename || osparc.file.FilePicker.getFilenameFromPath(val); populateLinkLabel(val); return filename; @@ -548,37 +770,40 @@ qx.Class.define("osparc.workbench.NodeUI", { __createPorts: function(isInput, draw) { if (draw === false) { - this._createPort(isInput, true); + this.__createPort(isInput, true); return; } - const port = this._createPort(isInput); + const port = this.__createPort(isInput); + port.addListener("tap", () => { + this.fireDataEvent("requestOpenServiceCatalog", isInput); + }, this); port.addListener("mouseover", () => { - port.setSource(osparc.workbench.BaseNodeUI.PORT_CONNECTED); + port.setSource(this.self().PORT_CONNECTED); }, this); port.addListener("mouseout", () => { const isConnected = isInput ? this.getNode().getInputConnected() : this.getNode().getOutputConnected(); port.set({ - source: isConnected ? osparc.workbench.BaseNodeUI.PORT_CONNECTED : osparc.workbench.BaseNodeUI.PORT_DISCONNECTED + source: isConnected ? this.self().PORT_CONNECTED : this.self().PORT_DISCONNECTED }); }, this); if (isInput) { this.getNode().getStatus().bind("dependencies", port, "textColor", { converter: dependencies => { - if (dependencies !== null) { - return osparc.service.StatusUI.getColor(dependencies.length ? "modified" : "ready"); + if (dependencies) { + return dependencies.length ? "failed-red" : "ready-green"; } - return osparc.service.StatusUI.getColor(); + return "workbench-edge"; } }); this.getNode().bind("inputConnected", port, "source", { - converter: isConnected => isConnected ? osparc.workbench.BaseNodeUI.PORT_CONNECTED : osparc.workbench.BaseNodeUI.PORT_DISCONNECTED + converter: isConnected => isConnected ? this.self().PORT_CONNECTED : this.self().PORT_DISCONNECTED }); } else { this.getNode().getStatus().bind("output", port, "textColor", { converter: output => osparc.service.StatusUI.getColor(output) }); this.getNode().bind("outputConnected", port, "source", { - converter: isConnected => isConnected ? osparc.workbench.BaseNodeUI.PORT_CONNECTED : osparc.workbench.BaseNodeUI.PORT_DISCONNECTED + converter: isConnected => isConnected ? this.self().PORT_CONNECTED : this.self().PORT_DISCONNECTED }); } @@ -607,21 +832,14 @@ qx.Class.define("osparc.workbench.NodeUI", { }; }, - // override qx.ui.core.MMovable - _onMovePointerMove: function(e) { - // Only react when dragging is active - if (!this.hasState("move") || !this.getIsMovable()) { - return; - } - const coords = this._setPositionFromEvent(e); - this.getNode().setPosition(coords); - this.base(arguments, e); + moveNodeTo: function(pos) { + this.moveTo(pos.x, pos.y); }, setPosition: function(pos) { const node = this.getNode(); node.setPosition(pos); - this.moveTo(node.getPosition().x, node.getPosition().y); + this.moveNodeTo(pos); }, snapToGrid: function() { @@ -633,11 +851,10 @@ qx.Class.define("osparc.workbench.NodeUI", { const snapGrid = 20; const snapX = Math.round(x/snapGrid)*snapGrid; const snapY = Math.round(y/snapGrid)*snapGrid; - node.setPosition({ + this.setPosition({ x: snapX, y: snapY }); - this.moveTo(node.getPosition().x, node.getPosition().y); }, __applyThumbnail: function(thumbnailSrc) { @@ -664,36 +881,185 @@ qx.Class.define("osparc.workbench.NodeUI", { } }, - __filterText: function(text) { - const label = this.getNode().getLabel() - .trim() - .toLowerCase(); - if (label.indexOf(text) === -1) { - return true; + __getMenuButton: function() { + const optionsMenu = this.__optionsMenu = new qx.ui.menu.Menu().set({ + position: "bottom-right" + }); + + const renameBtn = new qx.ui.menu.Button().set({ + label: this.tr("Rename"), + icon: "@FontAwesome5Solid/i-cursor/10" + }); + renameBtn.getChildControl("shortcut").setValue("F2"); + renameBtn.addListener("execute", () => this.fireDataEvent("renameNode", this.getNodeId())); + optionsMenu.add(renameBtn); + + const markerBtn = this.__markerBtn = new qx.ui.menu.Button().set({ + icon: "@FontAwesome5Solid/bookmark/10", + visibility: "excluded" + }); + optionsMenu.add(markerBtn); + + const infoBtn = new qx.ui.menu.Button().set({ + label: this.tr("Information..."), + icon: "@FontAwesome5Solid/info/10" + }); + infoBtn.getChildControl("shortcut").setValue("I"); + infoBtn.addListener("execute", () => this.fireDataEvent("infoNode", this.getNodeId())); + optionsMenu.add(infoBtn); + + const deleteBtn = this.__deleteBtn = new qx.ui.menu.Button().set({ + label: this.tr("Delete"), + icon: "@FontAwesome5Solid/trash/10" + }); + deleteBtn.getChildControl("shortcut").setValue("Del"); + deleteBtn.addListener("execute", () => this.fireDataEvent("removeNode", this.getNodeId())); + optionsMenu.add(deleteBtn); + + const menuBtn = new qx.ui.form.MenuButton().set({ + menu: optionsMenu, + icon: "@FontAwesome5Solid/ellipsis-v/9", + height: 18, + width: 18, + allowGrowX: false, + allowGrowY: false + }); + return menuBtn; + }, + + getInputPort: function() { + return this.__inputLayout; + }, + + getOutputPort: function() { + return this.__outputLayout; + }, + + __createPort: function(isInput, placeholder = false) { + let port = null; + const width = this.self().PORT_DIAMETER; + if (placeholder) { + port = new qx.ui.core.Spacer(width, width); + } else { + port = new qx.ui.basic.Image().set({ + source: this.self().PORT_DISCONNECTED, // disconnected by default + height: width, + width: width, + marginTop: this.self().PORT_MARGIN_TOP, + draggable: true, + droppable: true, + alignY: "top", + backgroundColor: "background-main" + }); + port.setCursor("pointer"); + port.getContentElement().setStyles({ + "border-radius": width+"px" + }); + port.isInput = isInput; + } + // make the ports exit the NodeUI + port.set({ + marginLeft: isInput ? (-10 + this.self().CONTENT_PADDING) : 0, + marginRight: isInput ? 0 : (-10 - this.self().CONTENT_PADDING) + }); + + this.add(port, { + row: 0, + column: isInput ? 0 : 2 + }); + + if (isInput) { + this.__inputLayout = port; + } else { + this.__outputLayout = port; } - return false; + + return port; + }, + + getEdgePoint: function(port) { + const bounds = this.getCurrentBounds(); + const captionHeight = Math.max(this.getChildControl("captionbar").getSizeHint().height, this.self().captionHeight()); + const x = port.isInput ? bounds.left - 6 : bounds.left + bounds.width - 1; + const y = bounds.top + captionHeight + this.self().PORT_DIAMETER/2 + this.self().PORT_MARGIN_TOP + 2; + return [x, y]; }, - __filterTags: function(tags) { - if (tags && tags.length) { - const category = this.getNode().getMetaData().category || ""; - const type = this.getNode().getMetaData().type || ""; - if (!tags.includes(osparc.utils.Utils.capitalize(category.trim())) && !tags.includes(osparc.utils.Utils.capitalize(type.trim()))) { - return true; + getCurrentBounds: function() { + let bounds = this.getBounds(); + let cel = this.getContentElement(); + if (cel) { + let domeEle = cel.getDomElement(); + if (domeEle) { + bounds.left = parseInt(domeEle.style.left); + bounds.top = parseInt(domeEle.style.top); } } - return false; + return bounds; }, - // implement osparc.filter.IFilterable - _shouldApplyFilter: function(data) { - if (data.text) { - return this.__filterText(data.text); + __scaleCoordinates: function(x, y) { + return { + x: parseInt(x / this.getScale()), + y: parseInt(y / this.getScale()) + }; + }, + + __setPositionFromEvent: function(e) { + // this.__dragRange is defined in qx.ui.core.MMovable + const sideBarWidth = this.__dragRange.left; + const navigationBarHeight = this.__dragRange.top; + const native = e.getNativeEvent(); + const x = native.clientX + this.__dragLeft - sideBarWidth; + const y = native.clientY + this.__dragTop - navigationBarHeight; + const coords = this.__scaleCoordinates(x, y); + const insets = this.getLayoutParent().getInsets(); + this.setDomPosition(coords.x - (insets.left || 0), coords.y - (insets.top || 0)); + return coords; + }, + + // override qx.ui.core.MMovable + _onMovePointerMove: function(e) { + // Only react when dragging is active + if (!this.hasState("move") || !this.getIsMovable()) { + return; } - if (data.tags && data.tags.length) { - return this.__filterTags(data.tags); + const coords = this.__setPositionFromEvent(e); + e.stopPropagation(); + if (this.__nodeMoving === false) { + this.__nodeMoving = true; + this.fireEvent("nodeMovingStart"); } - return false; - } + this.fireDataEvent("nodeMoving", coords); + }, + + // override qx.ui.core.MMovable + _onMovePointerUp : function(e) { + if (this.hasListener("roll")) { + this.removeListener("roll", this._onMoveRoll, this); + } + + // Only react when dragging is active + if (!this.hasState("move") || !this.getIsMovable()) { + return; + } + + this._onMovePointerMove(e); + + this.__nodeMoving = false; + + // Only consolidate position when it stops moving + const coords = this.__setPositionFromEvent(e); + this.getNode().setPosition(coords); + + this.fireEvent("nodeMovingStop"); + + // Remove drag state + this.removeState("move"); + + this.releaseCapture(); + + e.stopPropagation(); + }, } }); diff --git a/services/static-webserver/client/source/class/osparc/workbench/ServiceCatalog.js b/services/static-webserver/client/source/class/osparc/workbench/ServiceCatalog.js index 4bb6b8693987..26b5f7a9e327 100644 --- a/services/static-webserver/client/source/class/osparc/workbench/ServiceCatalog.js +++ b/services/static-webserver/client/source/class/osparc/workbench/ServiceCatalog.js @@ -82,7 +82,26 @@ qx.Class.define("osparc.workbench.ServiceCatalog", { statics: { LATEST: "latest", Width: 580, - Height: 500 + Height: 500, + + canItBeOpened: function(study) { + if (study) { + if (study.isReadOnly()) { + osparc.FlashMessenger.logError("Nodes can't be added to a read-only project"); + return false; + } + if (!osparc.data.model.Study.canIWrite(study.getAccessRights())) { + osparc.FlashMessenger.logError("You don't have permissions to add nodes to this project"); + return false; + } + if (study.isPipelineRunning()) { + osparc.FlashMessenger.logError(osparc.data.model.Workbench.CANT_ADD_NODE); + return false; + } + return true; + } + return true; + }, }, members: { @@ -201,7 +220,8 @@ qx.Class.define("osparc.workbench.ServiceCatalog", { const excludeDeprecated = true; osparc.store.Services.getServicesLatestList(excludeFrontend, excludeDeprecated) .then(servicesList => { - this.__servicesLatest = servicesList.filter(service => service !== null); + // first check metadata is complete + this.__servicesLatest = servicesList.filter(service => service !== null && service.inputs && service.outputs); this.__updateList(); }); }, diff --git a/services/static-webserver/client/source/class/osparc/workbench/SvgWidget.js b/services/static-webserver/client/source/class/osparc/workbench/SvgWidget.js index ba8af4fc2f67..b99cb59cd6fa 100644 --- a/services/static-webserver/client/source/class/osparc/workbench/SvgWidget.js +++ b/services/static-webserver/client/source/class/osparc/workbench/SvgWidget.js @@ -105,6 +105,10 @@ qx.Class.define("osparc.workbench.SvgWidget", { return osparc.wrapper.Svg.drawAnnotationRect(this.__canvas, width, height, x, y, color); }, + drawAnnotationConversation: function(x, y, title) { + return osparc.wrapper.Svg.drawAnnotationConversation(this.__canvas, x, y, title); + }, + drawDashedRect: function(width, height, x = 0, y = 0) { return osparc.wrapper.Svg.drawDashedRect(this.__canvas, width, height, x, y); }, diff --git a/services/static-webserver/client/source/class/osparc/workbench/WorkbenchUI.js b/services/static-webserver/client/source/class/osparc/workbench/WorkbenchUI.js index a9b633ed0086..85a336fe17a6 100644 --- a/services/static-webserver/client/source/class/osparc/workbench/WorkbenchUI.js +++ b/services/static-webserver/client/source/class/osparc/workbench/WorkbenchUI.js @@ -33,9 +33,6 @@ * */ -const BUTTON_SIZE = 38; -const NODE_INPUTS_WIDTH = 210; - qx.Class.define("osparc.workbench.WorkbenchUI", { extend: qx.ui.core.Widget, @@ -56,16 +53,8 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { }, statics: { - getDashedBorderStyle(isRight) { - const side = isRight ? "right" : "left"; - const borderStyle = {}; - borderStyle["background-image"] = `linear-gradient(to bottom, #3D3D3D 50%, rgba(255, 255, 255, 0) 0%)`; - borderStyle["background-position"] = side; - borderStyle["background-size"] = "5px 50px"; - borderStyle["background-repeat"] = "repeat-y"; - return borderStyle; - }, - + BUTTON_SIZE: 38, + NODE_INPUTS_WIDTH: 210, ZOOM_VALUES: [ 0.1, 0.2, @@ -83,7 +72,17 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { 2, 2.5, 3 - ] + ], + + getDashedBorderStyle(isRight) { + const side = isRight ? "right" : "left"; + const borderStyle = {}; + borderStyle["background-image"] = `linear-gradient(to bottom, #3D3D3D 50%, rgba(255, 255, 255, 0) 0%)`; + borderStyle["background-position"] = side; + borderStyle["background-size"] = "5px 50px"; + borderStyle["background-repeat"] = "repeat-y"; + return borderStyle; + }, }, events: { @@ -132,13 +131,12 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { __dropHereUI: null, __selectionRectInitPos: null, __selectionRectRepr: null, + __rectAnnotationRepr: null, __panning: null, __isDraggingFile: null, __isDraggingLink: null, __annotations: null, - __annotatingNote: null, - __annotatingRect: null, - __annotatingText: null, + __annotating: null, __annotationInitPos: null, __selectedAnnotations: null, __annotationEditor: null, @@ -228,8 +226,8 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { __addDeleteItemButton: function() { const deleteItemButton = this.__deleteItemButton = new qx.ui.form.Button().set({ icon: "@FontAwesome5Solid/trash/18", - width: BUTTON_SIZE, - height: BUTTON_SIZE, + width: this.self().BUTTON_SIZE, + height: this.self().BUTTON_SIZE, visibility: "excluded" }); deleteItemButton.addListener("execute", () => { @@ -272,8 +270,8 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { const label = isInput ? this.tr("INPUTS") : this.tr("OUTPUTS"); const inputOutputNodesLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(5)); inputOutputNodesLayout.set({ - width: NODE_INPUTS_WIDTH, - maxWidth: NODE_INPUTS_WIDTH, + width: this.self().NODE_INPUTS_WIDTH, + maxWidth: this.self().NODE_INPUTS_WIDTH, allowGrowX: false, padding: [0, 6] }); @@ -294,30 +292,38 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { this.openServiceCatalog(nodePos); }, - openServiceCatalog: function(nodePos) { - if (this.getStudy().isReadOnly()) { - return null; + __openServiceCatalogWithContext: function(nodeUI, isNodeInput) { + const freePos = this.getStudy().getWorkbench().getFreePosition(nodeUI.getNode(), isNodeInput); + const srvCat = this.openServiceCatalog(freePos); + if (srvCat) { + if (isNodeInput) { + srvCat.setContext(null, nodeUI.getNodeId()); + } else { + srvCat.setContext(nodeUI.getNodeId(), null); + } } - if (this.getStudy().isPipelineRunning()) { - osparc.FlashMessenger.logError(osparc.data.model.Workbench.CANT_ADD_NODE); - return null; + }, + + openServiceCatalog: function(nodePos) { + if (osparc.workbench.ServiceCatalog.canItBeOpened(this.getStudy())) { + const srvCat = new osparc.workbench.ServiceCatalog(); + srvCat.addListener("addService", async e => { + const { + service, + nodeLeftId, + nodeRightId + } = e.getData(); + const nodeUI = await this.__addNode(service, nodePos); + if (nodeUI && nodeLeftId !== null || nodeRightId !== null) { + const newNodeId = nodeUI.getNodeId(); + this._createEdgeBetweenNodes(nodeLeftId ? nodeLeftId : newNodeId, nodeRightId ? nodeRightId : newNodeId, true); + } + }, this); + srvCat.center(); + srvCat.open(); + return srvCat; } - const srvCat = new osparc.workbench.ServiceCatalog(); - srvCat.addListener("addService", async e => { - const { - service, - nodeLeftId, - nodeRightId - } = e.getData(); - const nodeUI = await this.__addNode(service, nodePos); - if (nodeUI && nodeLeftId !== null || nodeRightId !== null) { - const newNodeId = nodeUI.getNodeId(); - this._createEdgeBetweenNodes(nodeLeftId ? nodeLeftId : newNodeId, nodeRightId ? nodeRightId : newNodeId, true); - } - }, this); - srvCat.center(); - srvCat.open(); - return srvCat; + return null; }, __createTemporaryNodeUI: function(pos) { @@ -329,43 +335,54 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { }); temporaryNodeUI.getContentElement().addClass("rotate"); this.__workbenchLayout.add(temporaryNodeUI); - temporaryNodeUI.rect = this.__svgLayer.drawDashedRect(boxWidth, boxHeight); + temporaryNodeUI["rect"] = this.__svgLayer.drawDashedRect(boxWidth, boxHeight); temporaryNodeUI.setLayoutProperties({ left: pos.x + parseInt(boxWidth/2) - parseInt(circleSize/2), top: pos.y + parseInt(boxHeight/2) - parseInt(circleSize/2) }); - osparc.wrapper.Svg.updateItemPos(temporaryNodeUI.rect, pos.x, pos.y); + osparc.wrapper.Svg.updateItemPos(temporaryNodeUI["rect"], pos.x, pos.y); return temporaryNodeUI; }, __removeTemporaryNodeUI: function(temporaryNodeUI) { temporaryNodeUI.exclude(); - osparc.wrapper.Svg.removeItem(temporaryNodeUI.rect); + osparc.wrapper.Svg.removeItem(temporaryNodeUI["rect"]); this.__workbenchLayout.add(temporaryNodeUI); temporaryNodeUI = null; }, __addNode: async function(service, pos) { // render temporary node - let tempNodeUI = this.__createTemporaryNodeUI(pos); + let dashedNodeUI = this.__createTemporaryNodeUI(pos); let nodeUI = null; try { const node = await this.__getWorkbench().createNode(service.getKey(), service.getVersion()); - nodeUI = this._createNodeUI(node.getNodeId()); - this._addNodeUIToWorkbench(nodeUI, pos); - qx.ui.core.queue.Layout.flush(); - this.__createDragDropMechanism(nodeUI); + nodeUI = this.addNode(node, pos); } catch (err) { console.error(err); } finally { // remove temporary node - this.__removeTemporaryNodeUI(tempNodeUI); + this.__removeTemporaryNodeUI(dashedNodeUI); } return nodeUI; }, + addNode: function(node, pos) { + if (pos === undefined) { + pos = { + x: 0, + y: 0, + }; + } + const nodeUI = this._createNodeUI(node.getNodeId()); + this._addNodeUIToWorkbench(nodeUI, pos); + qx.ui.core.queue.Layout.flush(); + this.__createDragDropMechanism(nodeUI); + return nodeUI; + }, + __getNodesBounds: function() { if (this.__nodesUI.length === 0) { return null; @@ -420,7 +437,7 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { nodeUI.addListener("appear", () => this.__updateNodeUIPos(nodeUI), this); - const isStudyReadOnly = this.getStudy().isReadOnly(); + const isStudyReadOnly = this.isPropertyInitialized("study") ? this.getStudy().isReadOnly() : true; nodeUI.set({ movable: !isStudyReadOnly, selectable: !isStudyReadOnly, @@ -445,7 +462,8 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { __itemMoving: function(itemId, xDiff, yDiff) { this.getSelectedNodeUIs().forEach(selectedNodeUI => { if (itemId !== selectedNodeUI.getNodeId()) { - selectedNodeUI.setPosition({ + // do not touch the position, just move the node, this will happen in __itemStoppedMoving + selectedNodeUI.moveNodeTo({ x: selectedNodeUI.initPos.x + xDiff, y: selectedNodeUI.initPos.y + yDiff }); @@ -466,10 +484,24 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { this.getSelectedNodeUIs().forEach(selectedNodeUI => delete selectedNodeUI["initPos"]); this.getSelectedAnnotations().forEach(selectedAnnotation => delete selectedAnnotation["initPos"]); - if (nodeUI && osparc.Preferences.getInstance().isSnapNodeToGrid()) { - nodeUI.snapToGrid(); - // make sure nodeUI is moved, then update edges - setTimeout(() => this.__updateNodeUIPos(nodeUI), 10); + // the moving item could be an annotation, so we need to check if it is a nodeUI + if (nodeUI) { + this.getSelectedNodeUIs().forEach(selectedNodeUI => { + if (nodeUI !== selectedNodeUI) { + // now set the position + const layoutProps = selectedNodeUI.getLayoutProperties(); + selectedNodeUI.setPosition({ + x: layoutProps.left, + y: layoutProps.top, + }); + } + }); + + if (osparc.Preferences.getInstance().isSnapNodeToGrid()) { + this.getSelectedNodeUIs().forEach(selectedNodeUI => selectedNodeUI.snapToGrid()); + // make sure nodeUI is moved, then update edges + setTimeout(() => this.__updateNodeUIPos(nodeUI), 10); + } } this.__updateWorkbenchBounds(); @@ -497,12 +529,13 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { this.__itemStartedMoving(); }, this); - nodeUI.addListener("nodeMoving", () => { + nodeUI.addListener("nodeMoving", e => { this.__updateNodeUIPos(nodeUI); if ("initPos" in nodeUI) { // multi node move - const xDiff = nodeUI.getNode().getPosition().x - nodeUI.initPos.x; - const yDiff = nodeUI.getNode().getPosition().y - nodeUI.initPos.y; + const coords = e.getData(); + const xDiff = coords.x - nodeUI.initPos.x; + const yDiff = coords.y - nodeUI.initPos.y; this.__itemMoving(nodeUI.getNodeId(), xDiff, yDiff); } }, this); @@ -515,7 +548,7 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { nodeUI.addListener("dbltap", e => { this.fireDataEvent("nodeSelected", nodeUI.getNodeId()); - if (nodeUI.getNode().canNodeStart()) { + if (nodeUI.getNode().canNodeStart() && !nodeUI.getNode().getStudy().getDisableServiceAutoStart()) { nodeUI.getNode().requestStartNode(); } e.stopPropagation(); @@ -524,7 +557,13 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { __addAnnotationListeners: function(annotation) { annotation.addListener("annotationStartedMoving", () => { - this.__selectAnnotation(annotation); + if ([ + osparc.workbench.Annotation.TYPES.NOTE, + osparc.workbench.Annotation.TYPES.RECT, + osparc.workbench.Annotation.TYPES.TEXT, + ].includes(annotation.getType())) { + this.__selectAnnotation(annotation); + } this.__itemStartedMoving(); }, this); @@ -643,30 +682,38 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { const nodeUI = new osparc.workbench.NodeUI(node); this.bind("scale", nodeUI, "scale"); node.addListener("keyChanged", () => this.__selectNode(nodeUI), this); + node.addListener("edgeCreated", e => { + const data = e.getData(); + const { nodeId1, nodeId2 } = data; + this._createEdgeBetweenNodes(nodeId1, nodeId2, false); + }); + node.addListener("edgeRemoved", e => { + const data = e.getData(); + const { nodeId1, nodeId2 } = data; + this.__removeEdgeBetweenNodes(nodeId1, nodeId2); + }); nodeUI.populateNodeLayout(this.__svgLayer); nodeUI.addListener("renameNode", e => this.__openNodeRenamer(e.getData()), this); nodeUI.addListener("markerClicked", e => this.__openMarkerEditor(e.getData()), this); nodeUI.addListener("infoNode", e => this.__openNodeInfo(e.getData()), this); nodeUI.addListener("removeNode", e => this.fireDataEvent("removeNode", e.getData()), this); - - if (nodeUI.getNode().getPropsForm()) { - nodeUI.getNode().getPropsForm().addListener("highlightEdge", e => { - const { - highlight, - fromNodeId, - toNodeId, - } = e.getData(); - const edgeFound = this.__edgesUI.find(edgeUI => { - const edge = edgeUI.getEdge(); - const inputNode = edge.getInputNode(); - const outputNode = edge.getOutputNode(); - return (inputNode.getNodeId() === fromNodeId && outputNode.getNodeId() === toNodeId) - }); - if (edgeFound) { - edgeFound.setHighlighted(highlight); - } + nodeUI.addListener("highlightEdge", e => { + const { + highlight, + fromNodeId, + toNodeId, + } = e.getData(); + const edgeFound = this.__edgesUI.find(edgeUI => { + const edge = edgeUI.getEdge(); + const inputNode = edge.getInputNode(); + const outputNode = edge.getOutputNode(); + return (inputNode.getNodeId() === fromNodeId && outputNode.getNodeId() === toNodeId) }); - } + if (edgeFound) { + edgeFound.setHighlighted(highlight); + } + }); + nodeUI.addListener("requestOpenServiceCatalog", e => this.__openServiceCatalogWithContext(nodeUI, e.getData()), this); return nodeUI; }, @@ -808,38 +855,62 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { const edgeUI = new osparc.workbench.EdgeUI(edge, edgeRepresentation); this.__edgesUI.push(edgeUI); - const hint = edgeUI.getHint(); - const that = this; + this.__decorateEdgeUI(edgeUI); + } + }, + + __decorateEdgeUI: function(edgeUI) { + const hint = edgeUI.getHint(); + const edgeRepresentation = edgeUI.getRepresentation(); + const that = this; + [ + edgeRepresentation.widerCurve.node, + edgeRepresentation.node + ].forEach(svgEl => { + svgEl.addEventListener("click", e => { + // this is needed to get out of the context of svg + that.__setSelectedItem(edgeUI.getEdgeId()); // eslint-disable-line no-underscore-dangle + e.stopPropagation(); + }, this); + + const topOffset = 20; [ - edgeRepresentation.widerCurve.node, - edgeRepresentation.node - ].forEach(svgEl => { - svgEl.addEventListener("click", e => { - // this is needed to get out of the context of svg - that.__setSelectedItem(edgeUI.getEdgeId()); // eslint-disable-line no-underscore-dangle - e.stopPropagation(); + "mouseover", + "mousemove" + ].forEach(ev => { + svgEl.addEventListener(ev, e => { + const leftOffset = -(parseInt(hint.getHintBounds().width/2)); + const properties = { + top: e.clientY + topOffset, + left: e.clientX + leftOffset + }; + hint.setLayoutProperties(properties); + if (hint.getText()) { + hint.show(); + } }, this); - - const topOffset = 20; - [ - "mouseover", - "mousemove" - ].forEach(ev => { - svgEl.addEventListener(ev, e => { - const leftOffset = -(parseInt(hint.getHintBounds().width/2)); - const properties = { - top: e.clientY + topOffset, - left: e.clientX + leftOffset - }; - hint.setLayoutProperties(properties); - if (hint.getText()) { - hint.show(); - } - }, this); - }); }); - edgeUI.getRepresentation().widerCurve.node.addEventListener("mouseout", () => hint.exclude(), this); - this.__svgLayer.addListener("mouseout", () => hint.exclude(), this); + }); + edgeRepresentation.widerCurve.node.addEventListener("mouseout", () => hint.exclude(), this); + this.__svgLayer.addListener("mouseout", () => hint.exclude(), this); + }, + + __getEdgeUIBetweenNodes: function(node1Id, node2Id) { + const foundEdgeUI = this.__edgesUI.find(edgeUi => { + const edgeObj = edgeUi.getEdge(); + const inputNode = edgeObj.getInputNode(); + const outputNode = edgeObj.getOutputNode(); + if (inputNode.getNodeId() === node1Id && outputNode.getNodeId() === node2Id) { + return true; + } + }); + return foundEdgeUI; + }, + + __removeEdgeBetweenNodes: function(node1Id, node2Id) { + const edgeUI = this.__getEdgeUIBetweenNodes(node1Id, node2Id); + if (edgeUI) { + this.__removeEdge(edgeUI); } }, @@ -850,11 +921,7 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { }, __updateEdges: function(nodeUI) { - let edgesInvolved = []; - if (nodeUI.getNodeType() === "service") { - edgesInvolved = this.__getWorkbench().getConnectedEdges(nodeUI.getNodeId()); - } - + const edgesInvolved = this.__getWorkbench().getConnectedEdges(nodeUI.getNodeId()); edgesInvolved.forEach(edgeId => { const edgeUI = this.__getEdgeUI(edgeId); if (edgeUI) { @@ -984,11 +1051,11 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { osparc.workbench.SvgWidget.updateCurve(this.__tempEdgeRepr, x1, y1, x2, y2); } const portLabel = port.isInput ? nodeUI.getInputPort() : nodeUI.getOutputPort(); - portLabel.setSource(osparc.workbench.BaseNodeUI.PORT_CONNECTED); + portLabel.setSource(osparc.workbench.NodeUI.PORT_CONNECTED); if (!this.__tempEdgeIsInput) { - const modified = nodeUI.getNode().getStatus().getModified(); - const colorHex = osparc.workbench.EdgeUI.getEdgeColor(modified); + const output = nodeUI.getNode().getStatus().getOutput(); + const colorHex = osparc.workbench.EdgeUI.getEdgeColor(output); osparc.wrapper.Svg.updateCurveColor(this.__tempEdgeRepr, colorHex); } }, @@ -1003,7 +1070,7 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { const isConnected = this.__tempEdgeIsInput ? nodeUI.getNode().getInputConnected() : nodeUI.getNode().getOutputConnected(); const portLabel = this.__tempEdgeIsInput ? nodeUI.getInputPort() : nodeUI.getOutputPort(); portLabel.set({ - source: isConnected ? osparc.workbench.BaseNodeUI.PORT_CONNECTED : osparc.workbench.BaseNodeUI.PORT_DISCONNECTED + source: isConnected ? osparc.workbench.NodeUI.PORT_CONNECTED : osparc.workbench.NodeUI.PORT_DISCONNECTED }); } @@ -1036,21 +1103,18 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { }, getNodeUI: function(nodeId) { - return this.__nodesUI.find(nodeUI => nodeUI.getNodeType() === "service" && nodeUI.getNodeId() === nodeId); + return this.__nodesUI.find(nodeUI => nodeUI.getNodeId() === nodeId); }, __getEdgeUI: function(edgeId) { - for (let i = 0; i < this.__edgesUI.length; i++) { - if (this.__edgesUI[i].getEdgeId() === edgeId) { - return this.__edgesUI[i]; - } - } - return null; + return this.__edgesUI.find(edgeUI => edgeUI.getEdgeId() === edgeId); }, clearNode(nodeId) { const nodeUI = this.getNodeUI(nodeId); - this.__clearNodeUI(nodeUI); + if (nodeUI) { + this.__clearNodeUI(nodeUI); + } }, clearEdge: function(edgeId) { @@ -1183,18 +1247,18 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { }, __renderAnnotations: function(studyUI) { - const initData = studyUI.getAnnotationsInitData(); - const annotations = initData ? initData : studyUI.getAnnotations(); - Object.entries(annotations).forEach(([annotationId, annotation]) => { - if (annotation instanceof osparc.workbench.Annotation) { - this.__addAnnotation(annotation.serialize(), annotationId); - } else { - this.__addAnnotation(annotation, annotationId); - } + const annotations = studyUI.getAnnotations(); + Object.values(annotations).forEach(annotation => { + this.__renderAnnotation(annotation); }); - if (initData) { - studyUI.nullAnnotationsInitData(); - } + studyUI.addListener("annotationAdded", e => { + const annotation = e.getData(); + this.__renderAnnotation(annotation); + }, this); + studyUI.addListener("annotationRemoved", e => { + const annotationId = e.getData(); + this.__removeAnnotation(annotationId); + }, this); }, __setSelectedItem: function(newID) { @@ -1215,16 +1279,7 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { edge.setSelected(true); } else if (this.__isSelectedItemAnAnnotation()) { const annotation = this.__getAnnotation(newID); - this.__setSelectedAnnotations([annotation]); - const annotationEditor = this.__getAnnotationEditorView(); - annotationEditor.setAnnotation(annotation); - annotationEditor.makeItModal(); - annotationEditor.addListener("deleteAnnotation", () => { - annotationEditor.exclude(); - this.__removeAnnotation(annotation.getId()); - this.resetSelection(); - }, this); - annotation.addListener("changeColor", e => this.__annotationLastColor = e.getData()); + this.__annotationSelected(annotation); } else { this.fireDataEvent("changeSelectedNode", newID); } @@ -1246,6 +1301,28 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { return this.__isSelectedItemAnAnnotation() ? this.__annotations[this.__selectedItemId] : null; }, + __annotationSelected: function(annotation) { + this.__setSelectedAnnotations([annotation]); + switch (annotation.getType()) { + case osparc.workbench.Annotation.TYPES.CONVERSATION: { + this.__popUpConversation(annotation.getAttributes()["conversationId"], annotation.getId()); + break; + } + default: { + const annotationEditor = this.__getAnnotationEditorView(); + annotationEditor.setAnnotation(annotation); + annotationEditor.makeItModal(); + annotationEditor.addListener("deleteAnnotation", () => { + annotationEditor.exclude(); + this.__removeAnnotation(annotation.getId()); + this.resetSelection(); + }, this); + annotation.addListener("changeColor", e => this.__annotationLastColor = e.getData()); + break; + } + } + }, + __scaleCoordinates: function(x, y) { return { x: parseInt(x / this.getScale()), @@ -1329,23 +1406,11 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { }, addServiceInput: { "text": "\uf090", // in - "action": () => { - const freePos = this.getStudy().getWorkbench().getFreePosition(nodeUI.getNode(), true); - const srvCat = this.openServiceCatalog(freePos); - if (srvCat) { - srvCat.setContext(null, nodeUI.getNodeId()); - } - } + "action": () => this.__openServiceCatalogWithContext(nodeUI, true) }, addServiceOutput: { "text": "\uf08b", // out - "action": () => { - const freePos = this.getStudy().getWorkbench().getFreePosition(nodeUI.getNode(), false); - const srvCat = this.openServiceCatalog(freePos); - if (srvCat) { - srvCat.setContext(nodeUI.getNodeId(), null); - } - } + "action": () => this.__openServiceCatalogWithContext(nodeUI, false) }, noAction: { "text": "\uf05e", // verboten @@ -1408,7 +1473,7 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { __mouseDownOnSVG: function(e) { if (e.isLeftPressed()) { - if (this.__annotatingNote || this.__annotatingRect || this.__annotatingText) { + if (this.__annotating) { this.__annotationInitPos = this.__pointerEventToWorkbenchPos(e); } else { this.__selectionRectInitPos = this.__pointerEventToWorkbenchPos(e); @@ -1419,7 +1484,7 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { __mouseMove: function(e) { if (this.__isDraggingLink) { this.__draggingLink(e, true); - } else if (this.__tempEdgeRepr === null && (this.__annotatingNote || this.__annotatingRect || this.__annotatingText) && this.__annotationInitPos && e.isLeftPressed()) { + } else if (this.__tempEdgeRepr === null && this.__annotating && this.__annotationInitPos && e.isLeftPressed()) { this.__drawingAnnotation(e); } else if (this.__tempEdgeRepr === null && this.__selectionRectInitPos && e.isLeftPressed()) { this.__drawingSelectionRect(e); @@ -1449,25 +1514,8 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { if (this.__annotationInitPos) { this.__annotationInitPos = null; } - if (this.__annotatingNote || this.__annotatingRect || this.__annotatingText) { - let annotationType = null; - if (this.__annotatingNote) { - annotationType = "note"; - } else if (this.__annotatingRect) { - annotationType = "rect"; - } else if (this.__annotatingText) { - annotationType = "text"; - } - if (this.__consolidateAnnotation(annotationType, annotationInitPos, this.__rectAnnotationRepr)) { - if (this.__rectAnnotationRepr) { - osparc.wrapper.Svg.removeItem(this.__rectAnnotationRepr); - this.__rectAnnotationRepr = null; - } - this.__annotatingNote = false; - this.__annotatingRect = false; - this.__annotatingText = false; - this.__toolHint.setValue(null); - } + if (this.__annotating) { + this.__consolidateAnnotation(annotationInitPos); } if (this.__panning) { @@ -1603,23 +1651,27 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { }, startAnnotationsNote: function() { - this.__annotatingNote = true; - this.__annotatingRect = false; - this.__annotatingText = false; + this.__annotating = osparc.workbench.Annotation.TYPES.NOTE; this.__toolHint.setValue(this.tr("Pick the position")); }, startAnnotationsRect: function() { - this.__annotatingNote = false; - this.__annotatingRect = true; - this.__annotatingText = false; + this.__annotating = osparc.workbench.Annotation.TYPES.RECT; this.__toolHint.setValue(this.tr("Draw a rectangle")); }, startAnnotationsText: function(workbenchPos) { - this.__annotatingNote = false; - this.__annotatingText = true; - this.__annotatingRect = false; + this.__annotating = osparc.workbench.Annotation.TYPES.TEXT; + if (workbenchPos) { + this.__annotationInitPos = workbenchPos; + this.__mouseUp(); + } else { + this.__toolHint.setValue(this.tr("Pick the position")); + } + }, + + startConversation: function(workbenchPos) { + this.__annotating = osparc.workbench.Annotation.TYPES.CONVERSATION; if (workbenchPos) { this.__annotationInitPos = workbenchPos; this.__mouseUp(); @@ -1659,7 +1711,7 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { __openNodeInfo: function(nodeId) { if (nodeId) { const node = this.getStudy().getWorkbench().getNode(nodeId); - const metadata = node.getMetaData(); + const metadata = node.getMetadata(); const serviceDetails = new osparc.info.ServiceLarge(metadata, { nodeId, label: node.getLabel(), @@ -1821,7 +1873,7 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { }); dropHereNodeUI.exclude(); this.__workbenchLayout.add(dropHereNodeUI); - dropHereNodeUI.rect = this.__svgLayer.drawDashedRect(boxWidth, boxHeight); + dropHereNodeUI["rect"] = this.__svgLayer.drawDashedRect(boxWidth, boxHeight); } let dropHere = this.__dropHereUI; if (show) { @@ -1832,11 +1884,11 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { top: posY - parseInt(dropMeBounds.height/2)- parseInt(boxHeight/2) }); if ("rect" in dropHere) { - osparc.wrapper.Svg.updateItemPos(dropHere.rect, posX - boxWidth, posY - boxHeight); + osparc.wrapper.Svg.updateItemPos(dropHere["rect"], posX - boxWidth, posY - boxHeight); } } else { dropHere.exclude(); - osparc.wrapper.Svg.removeItem(dropHere.rect); + osparc.wrapper.Svg.removeItem(dropHere["rect"]); dropHere = null; } }, @@ -1902,87 +1954,132 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { const y = Math.min(initPos.y, currentPos.y); const width = Math.abs(initPos.x - currentPos.x); const height = Math.abs(initPos.y - currentPos.y); - if ([null, undefined].includes(this.__rectAnnotationRepr)) { + if (this.__rectAnnotationRepr) { + osparc.wrapper.Svg.updateRect(this.__rectAnnotationRepr, width, height, x, y); + } else { const color = this.__annotationLastColor ? this.__annotationLastColor : osparc.workbench.Annotation.DEFAULT_COLOR; this.__rectAnnotationRepr = this.__svgLayer.drawAnnotationRect(width, height, x, y, color); - } else { - osparc.wrapper.Svg.updateRect(this.__rectAnnotationRepr, width, height, x, y); } }, - __consolidateAnnotation: function(type, initPos, annotation) { + __consolidateAnnotation: function(initPos) { + const annotationTypes = osparc.workbench.Annotation.TYPES; + if (type === annotationTypes.RECT && !this.__rectAnnotationRepr) { + osparc.FlashMessenger.logAs(this.tr("Draw a rectangle first"), "WARNING"); + return; + } + + const type = this.__annotating; const color = this.__annotationLastColor ? this.__annotationLastColor : osparc.workbench.Annotation.DEFAULT_COLOR; const serializeData = { type, color, attributes: {} }; - if (type === "rect") { - if ([null, undefined].includes(annotation)) { - osparc.FlashMessenger.logAs(this.tr("Draw a rectangle first"), "WARNING"); - return false; - } - serializeData.attributes = osparc.wrapper.Svg.getRectAttributes(annotation); + if (type === annotationTypes.RECT) { + serializeData.attributes = osparc.wrapper.Svg.getRectAttributes(this.__rectAnnotationRepr); } else { serializeData.attributes = initPos; } - if (type === "note") { - const noteEditor = new osparc.editor.AnnotationNoteCreator(this.getStudy()); - const win = osparc.editor.AnnotationNoteCreator.popUpInWindow(noteEditor); - noteEditor.addListener("addNote", () => { - const gid = noteEditor.getRecipientGid(); - serializeData.attributes.recipientGid = gid; - serializeData.attributes.text = noteEditor.getNote(); - const user = osparc.store.Groups.getInstance().getUserByGroupId(gid) - if (user) { - osparc.notification.Notifications.postNewAnnotationNote(user.getUserId(), this.getStudy().getUuid()); - } + + switch (type) { + case annotationTypes.NOTE: { + const noteEditor = new osparc.editor.AnnotationNoteCreator(this.getStudy()); + const win = osparc.editor.AnnotationNoteCreator.popUpInWindow(noteEditor); + noteEditor.addListener("addNote", () => { + const gid = noteEditor.getRecipientGid(); + serializeData.attributes.recipientGid = gid; + serializeData.attributes.text = noteEditor.getNote(); + const user = osparc.store.Groups.getInstance().getUserByGroupId(gid) + if (user) { + osparc.notification.Notifications.pushNewAnnotationNote(user.getUserId(), this.getStudy().getUuid()); + } + this.__addAnnotation(serializeData); + win.close(); + }, this); + noteEditor.addListener("cancel", () => win.close()); + break; + } + case annotationTypes.RECT: { this.__addAnnotation(serializeData); - win.close(); - }, this); - noteEditor.addListener("cancel", () => win.close()); - } else if (type === "rect") { - this.__addAnnotation(serializeData); - } else if (type === "text") { - const tempAnnotation = new osparc.workbench.Annotation(null, { - type: "text", - color, - attributes: { - text: "", - fontSize: 12 + if (this.__rectAnnotationRepr) { + osparc.wrapper.Svg.removeItem(this.__rectAnnotationRepr); + this.__rectAnnotationRepr = null; } - }); - const annotationEditor = new osparc.editor.AnnotationEditor(tempAnnotation); - annotationEditor.addAddButtons(); - tempAnnotation.addListener("changeColor", e => this.__annotationLastColor = e.getData()); - annotationEditor.addListener("appear", () => { - const textField = annotationEditor.getChildControl("text-field"); - textField.focus(); - textField.activate(); - }); - const win = osparc.ui.window.Window.popUpInWindow(annotationEditor, "Add Text Annotation", 220, 135).set({ - clickAwayClose: true, - showClose: true - }); - annotationEditor.addListener("addAnnotation", () => { - win.close(); - const form = annotationEditor.getForm(); - serializeData.attributes.text = form.getItem("text").getValue(); - serializeData.attributes.color = form.getItem("color").getValue(); - serializeData.color = form.getItem("color").getValue(); - serializeData.attributes.fontSize = form.getItem("size").getValue(); - this.__addAnnotation(serializeData); - }, this); - win.open(); + break; + } + case annotationTypes.TEXT: { + const tempAnnotation = new osparc.workbench.Annotation({ + type: annotationTypes.TEXT, + color, + attributes: { + text: "", + fontSize: 12 + } + }); + const annotationEditor = new osparc.editor.AnnotationEditor(tempAnnotation); + annotationEditor.addAddButtons(); + tempAnnotation.addListener("changeColor", e => this.__annotationLastColor = e.getData()); + annotationEditor.addListener("appear", () => { + const textField = annotationEditor.getChildControl("text-field"); + textField.focus(); + textField.activate(); + }); + const win = osparc.ui.window.Window.popUpInWindow(annotationEditor, "Add Text Annotation", 220, 135).set({ + clickAwayClose: true, + showClose: true + }); + annotationEditor.addListener("addAnnotation", () => { + win.close(); + const form = annotationEditor.getForm(); + serializeData.attributes.text = form.getItem("text").getValue(); + serializeData.attributes.color = form.getItem("color").getValue(); + serializeData.color = form.getItem("color").getValue(); + serializeData.attributes.fontSize = form.getItem("size").getValue(); + this.__addAnnotation(serializeData); + }, this); + win.open(); + break; + } + case annotationTypes.CONVERSATION: { + const conversationTitle = `${initPos.x}, ${initPos.y}`; + osparc.store.ConversationsProject.getInstance().postConversation(this.getStudy().getUuid(), conversationTitle, osparc.store.ConversationsProject.TYPES.PROJECT_ANNOTATION) + .then(conversationData => { + serializeData.attributes.conversationId = conversationData["conversationId"]; + serializeData.attributes.text = conversationData["name"]; + const annotation = this.__addAnnotation(serializeData); + this.__popUpConversation(conversationData["conversationId"], annotation.getId()); + }); + break; + } } - return true; + + this.__annotating = null; + this.__toolHint.setValue(null); }, - __addAnnotation: function(data, id) { - const annotation = new osparc.workbench.Annotation(this.__svgLayer, data, id); + __addAnnotation: function(annotationData) { + const annotation = this.getStudy().getUi().addAnnotation(annotationData); + + this.__renderAnnotation(annotation); + + return annotation; + }, + + __renderAnnotation: function(annotation) { + annotation.setSvgCanvas(this.__svgLayer); + this.__addAnnotationListeners(annotation); this.__annotations[annotation.getId()] = annotation; - this.getStudy().getUi().addAnnotation(annotation); + + if (annotation.getType() === osparc.workbench.Annotation.TYPES.CONVERSATION) { + osparc.store.ConversationsProject.getInstance().addListener("conversationDeleted", e => { + const data = e.getData(); + if (annotation.getAttributes()["conversationId"] === data["conversationId"]) { + this.__removeAnnotation(annotation.getId()); + } + }, this); + } }, __removeAnnotation: function(id) { @@ -1993,6 +2090,28 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { } }, + __popUpConversation: function(conversationId, annotationId) { + osparc.study.Conversations.popUpInWindow(this.getStudy().serialize(), conversationId); + + // Check if conversation still exists, if not, ask to remove annotation + osparc.store.ConversationsProject.getInstance().getConversation(this.getStudy().getUuid(), conversationId) + .catch(err => { + if ("status" in err && err.status === 404) { + const win = new osparc.ui.window.Confirmation(this.tr("Do you want to remove the annotation?")).set({ + caption: this.tr("Conversation not found"), + confirmText: this.tr("Delete"), + confirmAction: "delete", + }); + win.open(); + win.addListener("close", () => { + if (win.getConfirmed()) { + this.__removeAnnotation(annotationId); + } + }); + } + }); + }, + __dropFile: async function(e) { this.__draggingFile(e, false); diff --git a/services/static-webserver/client/source/class/osparc/wrapper/BookACallIframe.js b/services/static-webserver/client/source/class/osparc/wrapper/BookACallIframe.js new file mode 100644 index 000000000000..5f674a3e0304 --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/wrapper/BookACallIframe.js @@ -0,0 +1,88 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.wrapper.BookACallIframe", { + extend: qx.ui.embed.Iframe, + + construct: function() { + this.base(arguments); + + this.setAppearance("iframe-no-border"); + + this.initDevServiceUrl(); + + // not only once, every time there is a load (e.g. when navigating in the iframe) + this.addListener("load", () => this.__updateStyles(), this); + }, + + properties: { + serviceUrl: { + check: "String", + nullable: true, + init: null, + apply: "__applyServiceUrl" + } + }, + + statics: { + DEV_SERVICE_URL: "http://10.43.103.145/index.php", + }, + + members: { + initDevServiceUrl: function() { + this.setServiceUrl(this.self().DEV_SERVICE_URL); + }, + + __applyServiceUrl: function(url) { + const params = []; + const myAuthData = osparc.auth.Data.getInstance(); + const firstName = myAuthData.getFirstName(); + if (firstName) { + params.push("first_name=" + encodeURIComponent(firstName)); + } + const lastName = myAuthData.getLastName(); + if (lastName) { + params.push("last_name=" + encodeURIComponent(lastName)); + } + const email = myAuthData.getEmail(); + if (email) { + params.push("email=" + encodeURIComponent(email)); + } + + if (params.length > 0) { + url += "?" + params.join("&"); + } + + this.setSource(url); + }, + + __updateStyles: function() { + const colorManager = qx.theme.manager.Color.getInstance(); + const iframe = this.getContentElement().getDomElement(); + const theme = { + '--bs-body-bg': colorManager.resolve("background-main-1"), + '--osparc-text-color': colorManager.resolve("text"), + '--osparc-primary': colorManager.resolve("product-color"), + }; + const url = new URL(this.getServiceUrl()); + iframe.contentWindow.postMessage({ + type: 'osparc-theme', + theme + }, url.origin); // targetOrigin = iframe origin + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/wrapper/JsonDiffPatch.js b/services/static-webserver/client/source/class/osparc/wrapper/JsonDiffPatch.js index 92e998ccb3f7..274723702bdb 100644 --- a/services/static-webserver/client/source/class/osparc/wrapper/JsonDiffPatch.js +++ b/services/static-webserver/client/source/class/osparc/wrapper/JsonDiffPatch.js @@ -33,7 +33,7 @@ qx.Class.define("osparc.wrapper.JsonDiffPatch", { statics: { NAME: "jsondiffpatch", - VERSION: "0.3.11", + VERSION: "0.7.3", URL: "https://github.com/benjamine/jsondiffpatch" }, @@ -51,11 +51,12 @@ qx.Class.define("osparc.wrapper.JsonDiffPatch", { members: { __diffPatcher: null, + __deltaToPatch: null, init: function() { // initialize the script loading - let jsondiffpatchPath = "jsondiffpatch/jsondiffpatch.min.js"; - let dynLoader = new qx.util.DynamicScriptLoader([ + const jsondiffpatchPath = "jsondiffpatch/jsondiffpatch-0.7.3.min.js"; // own build required for the formatters to work + const dynLoader = new qx.util.DynamicScriptLoader([ jsondiffpatchPath ]); @@ -64,6 +65,9 @@ qx.Class.define("osparc.wrapper.JsonDiffPatch", { this.__diffPatcher = jsondiffpatch.create(); + const JsonPatchFormatter = jsondiffpatch.formatters.jsonpatch; + this.__deltaToPatch = new JsonPatchFormatter(); + this.setLibReady(true); }, this); @@ -75,20 +79,20 @@ qx.Class.define("osparc.wrapper.JsonDiffPatch", { dynLoader.start(); }, + // https://github.com/benjamine/jsondiffpatch/blob/master/docs/deltas.md diff: function(obj1, obj2) { - // https://github.com/benjamine/jsondiffpatch/blob/master/docs/deltas.md let delta = this.__diffPatcher.diff(obj1, obj2); return delta; }, - patch: function(obj, delta) { - this.__diffPatcher.patch(obj, delta); - return obj; + // format to JSON PATCH (RFC 6902) + // https://github.com/benjamine/jsondiffpatch/blob/master/docs/formatters.md + deltaToJsonPatches: function(delta) { + if (this.__deltaToPatch) { + const patches = this.__deltaToPatch.format(delta); + return patches; + } + return []; }, - - // deep clone - clone: function(obj) { - return this.__diffPatcher.clone(obj); - } } }); diff --git a/services/static-webserver/client/source/class/osparc/wrapper/JsonFormatter.js b/services/static-webserver/client/source/class/osparc/wrapper/JsonFormatter.js index 77c219f5452e..8ba12d65188c 100644 --- a/services/static-webserver/client/source/class/osparc/wrapper/JsonFormatter.js +++ b/services/static-webserver/client/source/class/osparc/wrapper/JsonFormatter.js @@ -63,8 +63,15 @@ qx.Class.define("osparc.wrapper.JsonFormatter", { ]); dynLoader.addListenerOnce("ready", () => { + if (typeof JSONFormatter === "undefined") { + reject(new Error("JSONFormatter loaded but did not export to window.JSONFormatter")); + return; + } console.log(jsonFormatterPath + " loaded"); this.setLibReady(true); + + this.__applyStyles(); + resolve(); }, this); @@ -78,6 +85,26 @@ qx.Class.define("osparc.wrapper.JsonFormatter", { }); }, + __applyStyles: function() { + const styleId = "json-formatter-osparc-style"; + if (!document.getElementById(styleId)) { + const color = qx.theme.manager.Color.getInstance().resolve("text"); + const style = document.createElement("style"); + style.id = styleId; + style.innerHTML = ` + .osparc-json-formatter-root * { + color: ${color} !important; /* Use osparc text color */ + font-family: "Manrope", sans-serif !important; /* Use osparc font */ + font-size: 13px !important; /* Force all fonts to 13px */ + } + .osparc-json-formatter-root .json-formatter-constructor-name { + display: none !important; /* Hide "Object" and "Array(n)" labels */ + } + `; + document.head.appendChild(style); + } + }, + createContainer: function(divId) { const container = new qx.ui.embed.Html("
"); diff --git a/services/static-webserver/client/source/class/osparc/wrapper/RadialMenu.js b/services/static-webserver/client/source/class/osparc/wrapper/RadialMenu.js index e488c2e0160b..395bbe639686 100644 --- a/services/static-webserver/client/source/class/osparc/wrapper/RadialMenu.js +++ b/services/static-webserver/client/source/class/osparc/wrapper/RadialMenu.js @@ -62,6 +62,12 @@ qx.Class.define("osparc.wrapper.RadialMenu", { dynLoader.addListenerOnce("ready", e => { this.setLibReady(true); + + // hack to trigger fonts loading + const menu = this.createMenu(); + menu.show(); + menu.hide(); + resolve(true); }, this); diff --git a/services/static-webserver/client/source/class/osparc/wrapper/RocketPreview.js b/services/static-webserver/client/source/class/osparc/wrapper/RocketPreview.js new file mode 100644 index 000000000000..668b7195312c --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/wrapper/RocketPreview.js @@ -0,0 +1,168 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2025 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +/** + * @asset(rocketPreview/build/index.html) + * @asset(rocketPreview/build/**) + * @asset(rocketPreview/osparc-bridge.js) // index.html needs to include it + */ + +/** + * A qooxdoo wrapper for The Rocket Preview + * It loads the app in an iframe and communicates via postMessage. + * NOTES + * In order to make this work, the Rocket Preview build needs to include the osparc-bridge.js script. + * Add the following to the index.html + * + * Also, the include paths in the index.html need to be adjusted, so that are relative to the index.html. + */ + +qx.Class.define("osparc.wrapper.RocketPreview", { + extend: qx.ui.core.Widget, + + construct: function() { + this.base(arguments); + + this._setLayout(new qx.ui.layout.Grow()); + + this.__messageQueue = []; + + // force creation of the iframe child control + this._createChildControl("iframe"); + + window.addEventListener("message", this.__onMessage.bind(this)); + }, + + statics: { + INDEX_HTML: "rocketPreview/build/index.html", + + /** + * Returns true if the RocketPreview build folder is available as a resource. + */ + existsBuild: function() { + const rm = qx.util.ResourceManager.getInstance(); + // index.html is a good proxy for the whole build + const resourceId = this.INDEX_HTML; + return rm.has(resourceId); + }, + + openWindow: function() { + const win = new osparc.ui.window.Window(); + win.set({ + caption: "Rocket Preview", + width: 800, + height: 600, + minWidth: 400, + minHeight: 300, + showMinimize: false, + showMaximize: false, + resizable: true, + modal: true, + allowClose: true, + contentPadding: 0, + layout: new qx.ui.layout.Grow() + }); + + const rocketPreview = new osparc.wrapper.RocketPreview(); + win.add(rocketPreview); + win.center(); + win.open(); + return win; + } + }, + + properties: { + /** + * True once the iframe signals it's ready (osparc:ready). + */ + rocketReady: { + check: "Boolean", + init: false, + event: "changeReady" + }, + }, + + members: { + __messageQueue: null, + __iframeEl: null, + + _createChildControlImpl: function(id) { + let control; + switch (id) { + case "iframe": + const src = qx.util.ResourceManager.getInstance().toUri(this.self().INDEX_HTML); + control = new qx.ui.embed.Html(""); + control.set({ + allowGrowX: true, + allowGrowY: true + }); + + // configure the real DOM iframe element + control.addListenerOnce("appear", () => { + const el = control.getContentElement().getDomElement().querySelector("iframe"); + el.src = src; + el.style.width = "100%"; + el.style.height = "100%"; + el.style.border = "0"; + this.__iframeEl = el; + }); + + this._add(control); + break; + } + return control || this.base(arguments, id); + }, + + // ---- Public API ---- + setTreeData: function(data) { + this.__send({type: "setTreeData", payload: data}); + }, + + setExtraData: function(data) { + this.__send({type: "setExtraData", payload: data}); + }, + + setImage: function(img) { + this.__send({type: "setImage", payload: img}); + }, + // -------------------- + + __send: function(msg) { + if (!this.isRocketReady()) { + this.__messageQueue.push(msg); + return; + } + this.__postMessage(msg); + }, + + __onMessage: function(ev) { + const data = ev.data; + if (data && data.type === "osparc:ready") { + this.setRocketReady(true); + while (this.__messageQueue.length) { + this.__postMessage(this.__messageQueue.shift()); + } + } + }, + + __postMessage: function(msg) { + if (this.__iframeEl && this.__iframeEl.contentWindow) { + this.__iframeEl.contentWindow.postMessage(msg, "*"); + } + }, + } +}); diff --git a/services/static-webserver/client/source/class/osparc/wrapper/Svg.js b/services/static-webserver/client/source/class/osparc/wrapper/Svg.js index 7177f74480c7..fa53674ea67d 100644 --- a/services/static-webserver/client/source/class/osparc/wrapper/Svg.js +++ b/services/static-webserver/client/source/class/osparc/wrapper/Svg.js @@ -67,7 +67,7 @@ qx.Class.define("osparc.wrapper.Svg", { drawCurve: function(draw, controls, dashed) { const edgeWidth = 3; const arrowSize = 4; - const edgeColor = qx.theme.manager.Color.getInstance().getTheme().colors["workbench-edge-comp-active"]; + const edgeColor = qx.theme.manager.Color.getInstance().getTheme().colors["workbench-edge"]; osparc.wrapper.Svg.curateCurveControls(controls); @@ -274,12 +274,105 @@ qx.Class.define("osparc.wrapper.Svg", { return rect; }, - updateText: function(representation, label) { + drawAnnotationConversation: function(draw, x = 50, y = 50, title = "Conversation") { + const color = qx.theme.manager.Color.getInstance().getTheme().colors["text"]; + const bubbleWidth = 150; + const bubbleHeight = 30; + const padding = 6; + + // Group to keep all elements together + const bubble = draw.group(); + bubble.move(x, y); + + // Rounded rectangle as the base + const rect = draw.rect(bubbleWidth, bubbleHeight) + .radius(4) + .fill("none") + .stroke({ + color, + width: 1.5, + }); + bubble.add(rect); + + // Icon (simple speech bubble using path or text) + const iconSize = 16; + const icon = draw.text('💬') + .font({ + size: iconSize + }) + .attr({ + cursor: "pointer" + }) + .move(padding, (bubbleHeight - iconSize) / 2); + bubble.add(icon); + + // Title text + const titleFontSize = 12; + const defaultFont = osparc.utils.Utils.getDefaultFont(); + const label = draw.text(title) + .font({ + fill: color, + size: titleFontSize, + family: defaultFont["family"], + anchor: 'start' + }) + .attr({ + cursor: "pointer" + }) + .move(padding + iconSize + 8, ((bubbleHeight - titleFontSize) / 2) - 3); + bubble.add(label); + bubble.label = label; // store reference for renaming + + // Compute available width for text + const availableWidth = bubbleWidth - padding * 2 - iconSize - 8; + + // Helper: truncate text with ellipsis + const fitTextWithEllipsis = (fullText, maxWidth) => { + let text = fullText; + label.text(text); + if (label.bbox().width <= maxWidth) { + return text + }; + + const ellipsis = '…'; + let low = 0; + let high = text.length; + // Binary search for the max fitting length + while (low < high) { + const mid = Math.floor((low + high) / 2); + label.text(text.slice(0, mid) + ellipsis); + if (label.bbox().width <= maxWidth) { + low = mid + 1; + } else { + high = mid; + } + } + return text.slice(0, low - 1) + ellipsis; + } + + // Truncate if needed + const fittedText = fitTextWithEllipsis(title, availableWidth); + label.text(fittedText); + + // Move label to proper position + label.move(padding + iconSize + 8, ((bubbleHeight - titleFontSize) / 2) - 3); + + bubble.back(); + + bubble["clickables"] = [icon, label]; + + return bubble; + }, + + updateText: function(representation, newText) { if (representation.type === "text") { - representation.text(label); + representation.text(newText); } else if (representation.type === "svg") { // nested - representation["textChild"].innerText = label; + representation["textChild"].innerText = newText; + } else if (representation.type === "g") { + // group + representation.label.text(newText); } }, @@ -298,7 +391,7 @@ qx.Class.define("osparc.wrapper.Svg", { /* / ANNOTATIONS */ drawDashedRect: function(draw, width, height, x, y) { - const edgeColor = qx.theme.manager.Color.getInstance().getTheme().colors["workbench-edge-comp-active"]; + const edgeColor = qx.theme.manager.Color.getInstance().getTheme().colors["workbench-edge"]; const rect = draw.rect(width, height) .fill("none") .stroke({ diff --git a/services/static-webserver/client/source/class/osparc/wrapper/WebSocket.js b/services/static-webserver/client/source/class/osparc/wrapper/WebSocket.js index 081d790c01d5..94f6925b1a45 100644 --- a/services/static-webserver/client/source/class/osparc/wrapper/WebSocket.js +++ b/services/static-webserver/client/source/class/osparc/wrapper/WebSocket.js @@ -118,7 +118,21 @@ qx.Class.define("osparc.wrapper.WebSocket", { nullable: false, init: 1000, check: "Number" - } + }, + + heartbeatInterval: { + check: "Number", + init: null, + nullable: true, + event: "heartbeatInterval" + }, + + appConnected: { + check: "Boolean", + init: false, + nullable: false, + event: "changeAppConnected" + }, }, /** Constructor @@ -202,6 +216,16 @@ qx.Class.define("osparc.wrapper.WebSocket", { this.fireDataEvent(event); }, this); }, this); + + this.on("set_heartbeat_emit_interval", ({ interval }) => { + if (interval) { + const newInterval = parseInt(interval) * 1000; + this.setHeartbeatInterval(newInterval); + + // we consider the app is connected when the backend set the heartbeat interval + this.setAppConnected(true); + } + }, this); }, this); dynLoader.start(); diff --git a/services/static-webserver/client/source/resource/intl-tel-input/css/intlTelInput.css b/services/static-webserver/client/source/resource/intl-tel-input/css/intlTelInput.css index cb158761e969..58d8f4109669 100644 --- a/services/static-webserver/client/source/resource/intl-tel-input/css/intlTelInput.css +++ b/services/static-webserver/client/source/resource/intl-tel-input/css/intlTelInput.css @@ -27,7 +27,10 @@ display: flex; align-items: center; height: 100%; - padding: 0 6px 0 8px; } + padding: 0 6px 0 8px; + /* osparc */ + padding: 0 4px 0 4px; + } .iti__arrow { margin-left: 6px; width: 0; @@ -53,8 +56,15 @@ overflow-y: scroll; -webkit-overflow-scrolling: touch; /* osparc */ - background-color: #202426; - margin: 2px; + font-size: 13px; + font-family: 'Manrope', sans-serif; + margin: 0px; + margin-left: -1px; + border-radius: 4px; + border-top-left-radius: 0px; + border: 0px; + background: var(--country-list-dropdown-bg); + color: var(--country-list-dropdown-text); } .iti__country-list--dropup { bottom: 100%; @@ -83,6 +93,8 @@ .iti--allow-dropdown input, .iti--allow-dropdown input[type=text], .iti--allow-dropdown input[type=tel], .iti--separate-dial-code input, .iti--separate-dial-code input[type=text], .iti--separate-dial-code input[type=tel] { padding-right: 6px; padding-left: 52px; + /* osparc */ + padding-left: 42px; margin-left: 0; } .iti--allow-dropdown .iti__flag-container, .iti--separate-dial-code .iti__flag-container { right: auto; @@ -930,3 +942,20 @@ .iti__flag.iti__np { background-color: transparent; } + +/* osparc */ +.iti input { + border: none !important; + border-bottom: 1px solid var(--tel-border-bottom-color) !important; + outline: none !important; + box-shadow: none !important; +} + +.iti input:focus { + border-bottom: 1px solid var(--tel-border-bottom-color-focused) !important; +} +/* +.iti input:invalid { + border-bottom: 1px solid red !important; +} +*/ diff --git a/services/static-webserver/client/source/resource/jsondiffpatch/jsondiffpatch-0.7.3.min.js b/services/static-webserver/client/source/resource/jsondiffpatch/jsondiffpatch-0.7.3.min.js new file mode 100644 index 000000000000..e54a06152722 --- /dev/null +++ b/services/static-webserver/client/source/resource/jsondiffpatch/jsondiffpatch-0.7.3.min.js @@ -0,0 +1,3 @@ +(()=>{var De=Object.defineProperty;var Ne=(i,e)=>{for(var r in e)De(i,r,{get:e[r],enumerable:!0})};var le={};Ne(le,{DiffPatcher:()=>C,clone:()=>Xe,create:()=>Se,dateReviver:()=>P,diff:()=>We,patch:()=>Be,reverse:()=>qe,unpatch:()=>Ve});function P(i,e){var r,n,t,s,o,d;if(typeof e!="string")return e;let a=/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})(?:\.(\d*))?(Z|([+-])(\d{2}):(\d{2}))$/.exec(e);return a?new Date(Date.UTC(Number.parseInt((r=a[1])!==null&&r!==void 0?r:"0",10),Number.parseInt((n=a[2])!==null&&n!==void 0?n:"0",10)-1,Number.parseInt((t=a[3])!==null&&t!==void 0?t:"0",10),Number.parseInt((s=a[4])!==null&&s!==void 0?s:"0",10),Number.parseInt((o=a[5])!==null&&o!==void 0?o:"0",10),Number.parseInt((d=a[6])!==null&&d!==void 0?d:"0",10),(a[7]?Number.parseInt(a[7]):0)||0)):e}function Ee(i){var e;let r=/^\/(.*)\/([gimyu]*)$/.exec(i.toString());if(!r)throw new Error("Invalid RegExp");return new RegExp((e=r[1])!==null&&e!==void 0?e:"",r[2])}function D(i){if(typeof i!="object")return i;if(i===null)return null;if(Array.isArray(i))return i.map(D);if(i instanceof Date)return new Date(i.getTime());if(i instanceof RegExp)return Ee(i);let e={};for(let r in i)Object.prototype.hasOwnProperty.call(i,r)&&(e[r]=D(i[r]));return e}function me(i,e){if(i.length===0)throw new Error(e||"Expected a non-empty array")}function ye(i){return i.length>0}function ve(i){return i.length>=2}var ge=i=>i[i.length-1];var b=class{setResult(e){return this.result=e,this.hasResult=!0,this}exit(){return this.exiting=!0,this}push(e,r){return e.parent=this,typeof r<"u"&&(e.childName=r),e.root=this.root||this,e.options=e.options||this.options,this.children?(me(this.children),ge(this.children).next=e,this.children.push(e)):(this.children=[e],this.nextAfterChildren=this.next||null,this.next=e),e.next=this,this}};var M=class extends b{constructor(e,r){super(),this.left=e,this.right=r,this.pipe="diff"}prepareDeltaResult(e){var r,n,t,s;if(typeof e=="object"&&(!((r=this.options)===null||r===void 0)&&r.omitRemovedValues&&Array.isArray(e)&&e.length>1&&(e.length===2||e[2]===0||e[2]===3)&&(e[0]=0),!((n=this.options)===null||n===void 0)&&n.cloneDiffValues)){let o=typeof((t=this.options)===null||t===void 0?void 0:t.cloneDiffValues)=="function"?(s=this.options)===null||s===void 0?void 0:s.cloneDiffValues:D;typeof e[0]=="object"&&(e[0]=o(e[0])),typeof e[1]=="object"&&(e[1]=o(e[1]))}return e}setResult(e){return this.prepareDeltaResult(e),super.setResult(e)}},R=M;var k=class extends b{constructor(e,r){super(),this.left=e,this.delta=r,this.pipe="patch"}},A=k;var L=class extends b{constructor(e){super(),this.delta=e,this.pipe="reverse"}},x=L;var H=class{constructor(e){this.name=e,this.filters=[]}process(e){if(!this.processor)throw new Error("add this pipe to a processor before using it");let r=this.debug,n=this.filters.length,t=e;for(let s=0;se.filterName)}after(e,...r){let n=this.indexOf(e);return this.filters.splice(n+1,0,...r),this}before(e,...r){let n=this.indexOf(e);return this.filters.splice(n,0,...r),this}replace(e,...r){let n=this.indexOf(e);return this.filters.splice(n,1,...r),this}remove(e){let r=this.indexOf(e);return this.filters.splice(r,1),this}clear(){return this.filters.length=0,this}shouldHaveResult(e){return e===!1?(this.resultCheck=null,this):this.resultCheck?this:(this.resultCheck=r=>{if(!r.hasResult){console.log(r);let n=new Error(`${this.name} failed`);throw n.noResult=!0,n}},this)}},j=H;var S=class{constructor(e){this.selfOptions=e||{},this.pipes={}}options(e){return e&&(this.selfOptions=e),this.selfOptions}pipe(e,r){let n=r;if(typeof e=="string"){if(typeof n>"u")return this.pipes[e];this.pipes[e]=n}if(e&&e.name){if(n=e,n.processor===this)return n;this.pipes[n.name]=n}if(!n)throw new Error(`pipe is not defined: ${e}`);return n.processor=this,n}process(e,r){let n=e;n.options=this.options();let t=r||e.pipe||"default",s;for(;t;)typeof n.nextAfterChildren<"u"&&(n.next=n.nextAfterChildren,n.nextAfterChildren=null),typeof t=="string"&&(t=this.pipe(t)),t.process(n),s=t,t=null,n&&n.next&&(n=n.next,t=n.pipe||s);return n.hasResult?n.result:void 0}},we=S;var Ae=(i,e,r,n)=>i[r]===e[n],xe=(i,e,r,n)=>{var t,s,o;let d=i.length,a=e.length,u,h,c=new Array(d+1);for(u=0;u{let t=e.length,s=r.length,o={sequence:[],indices1:[],indices2:[]};for(;t!==0&&s!==0;){if(i.match===void 0)throw new Error("LCS matrix match function is undefined");if(i.match(e,r,t-1,s-1,n))o.sequence.unshift(e[t-1]),o.indices1.unshift(t-1),o.indices2.unshift(s-1),--t,--s;else{let a=i[t];if(a===void 0)throw new Error("LCS matrix row is undefined");let u=a[s-1];if(u===void 0)throw new Error("LCS matrix value is undefined");let h=i[t-1];if(h===void 0)throw new Error("LCS matrix row is undefined");let c=h[s];if(c===void 0)throw new Error("LCS matrix value is undefined");u>c?--s:--t}}return o},Te=(i,e,r,n)=>{let t=n||{},s=xe(i,e,r||Ae,t);return Ie(s,i,e,t)},Re={get:Te};var N=3;function $e(i,e,r,n){for(let t=0;t"u"&&(t.hashCache1[r]=a=d(s,r)),typeof a>"u")return!1;t.hashCache2=t.hashCache2||[];let u=t.hashCache2[n];return typeof u>"u"&&(t.hashCache2[n]=u=d(o,n)),typeof u>"u"?!1:a===u}var W=function(e){var r,n,t,s,o;if(!e.leftIsArray)return;let d={objectHash:(r=e.options)===null||r===void 0?void 0:r.objectHash,matchByPosition:(n=e.options)===null||n===void 0?void 0:n.matchByPosition},a=0,u=0,h,c,f,l=e.left,p=e.right,g=l.length,w=p.length,F;for(g>0&&w>0&&!d.objectHash&&typeof d.matchByPosition!="boolean"&&(d.matchByPosition=!$e(l,p,g,w));a0)for(let T=0;Te[i]-r[i]}},B=function(e){var r;if(!e.nested)return;let n=e.delta;if(n._t!=="a")return;let t,s,o=n,d=e.left,a=[],u=[],h=[];for(t in o)if(t!=="_t")if(t[0]==="_"){let l=t;if(o[l]!==void 0&&(o[l][2]===0||o[l][2]===N))a.push(Number.parseInt(t.slice(1),10));else throw new Error(`only removal or move can be applied at original array indices, invalid diff type: ${(r=o[l])===null||r===void 0?void 0:r[2]}`)}else{let l=t;o[l].length===1?u.push({index:Number.parseInt(l,10),value:o[l][0]}):h.push({index:Number.parseInt(l,10),delta:o[l]})}for(a=a.sort(be.numerically),t=a.length-1;t>=0;t--){if(s=a[t],s===void 0)continue;let l=o[`_${s}`],p=d.splice(s,1)[0];l?.[2]===N&&u.push({index:l[1],value:p})}u=u.sort(be.numericallyBy("index"));let c=u.length;for(t=0;t0)for(t=0;t{if(typeof e=="string"&&e[0]==="_")return Number.parseInt(e.substring(1),10);if(Array.isArray(r)&&r[2]===0)return`_${e}`;let n=+e;for(let t in i){let s=i[t];if(Array.isArray(s))if(s[2]===N){let o=Number.parseInt(t.substring(1),10),d=s[1];if(d===+e)return o;o<=n&&d>n?n++:o>=n&&d{if(!i||!i.children)return;let e=i.delta;if(e._t!=="a")return;let r=e,n=i.children.length,t={_t:"a"};for(let s=0;s"u"){if(o.childName===void 0)throw new Error("child.childName is undefined");d=je(r,o.childName,o.result)}t[d]!==o.result&&(t[d]=o.result)}i.setResult(t).exit()};X.filterName="arraysCollectChildren";var U=function(e){e.left instanceof Date?(e.right instanceof Date?e.left.getTime()!==e.right.getTime()?e.setResult([e.left,e.right]):e.setResult(void 0):e.setResult([e.left,e.right]),e.exit()):e.right instanceof Date&&e.setResult([e.left,e.right]).exit()};U.filterName="dates";var G=i=>{if(!i||!i.children)return;let e=i.children.length,r=i.result;for(let n=0;n"u")){if(r=r||{},t.childName===void 0)throw new Error("diff child.childName is undefined");r[t.childName]=t.result}}r&&i.leftIsArray&&(r._t="a"),i.setResult(r).exit()};G.filterName="collectChildren";var J=i=>{var e;if(i.leftIsArray||i.leftType!=="object")return;let r=i.left,n=i.right,t=(e=i.options)===null||e===void 0?void 0:e.propertyFilter;for(let s in r){if(!Object.prototype.hasOwnProperty.call(r,s)||t&&!t(s,i))continue;let o=new R(r[s],n[s]);i.push(o,s)}for(let s in n)if(Object.prototype.hasOwnProperty.call(n,s)&&!(t&&!t(s,i))&&typeof r[s]>"u"){let o=new R(void 0,n[s]);i.push(o,s)}if(!i.children||i.children.length===0){i.setResult(void 0).exit();return}i.exit()};J.filterName="objects";var Y=function(e){if(!e.nested)return;let r=e.delta;if(r._t)return;let n=r;for(let t in n){let s=new A(e.left[t],n[t]);e.push(s,t)}e.exit()};Y.filterName="objects";var Z=function(e){if(!e||!e.children||e.delta._t)return;let n=e.left,t=e.children.length;for(let s=0;s{if(!i||!i.children||i.delta._t)return;let r=i.children.length,n={};for(let t=0;tn.patch_toText(n.patch_make(t,s)),patch:(t,s)=>{let o=n.patch_apply(n.patch_fromText(s),t);for(let d of o[1])if(!d){let a=new Error("text patch failed");throw a.textPatchFailed=!0,a}return o[0]}}}return Q}var ee=function(e){var r,n;if(e.leftType!=="string")return;let t=e.left,s=e.right,o=((n=(r=e.options)===null||r===void 0?void 0:r.textDiff)===null||n===void 0?void 0:n.minLength)||60;if(t.length{var e,r,n;let t=/^@@ +-(\d+),(\d+) +\+(\d+),(\d+) +@@$/,s=i.split(` +`);for(let o=0;o"u"){if(typeof e.right=="function")throw new Error("functions are not supported");e.setResult([e.right]).exit();return}if(typeof e.right>"u"){e.setResult([e.left,0,0]).exit();return}if(typeof e.left=="function"||typeof e.right=="function")throw new Error("functions are not supported");if(e.leftType=e.left===null?"null":typeof e.left,e.rightType=e.right===null?"null":typeof e.right,e.leftType!==e.rightType){e.setResult([e.left,e.right]).exit();return}if(e.leftType==="boolean"||e.leftType==="number"){e.setResult([e.left,e.right]).exit();return}if(e.leftType==="object"&&(e.leftIsArray=Array.isArray(e.left)),e.rightType==="object"&&(e.rightIsArray=Array.isArray(e.right)),e.leftIsArray!==e.rightIsArray){e.setResult([e.left,e.right]).exit();return}e.left instanceof RegExp&&(e.right instanceof RegExp?e.setResult([e.left.toString(),e.right.toString()]).exit():e.setResult([e.left,e.right]).exit())};ie.filterName="trivial";var ne=function(e){if(typeof e.delta>"u"){e.setResult(e.left).exit();return}if(e.nested=!Array.isArray(e.delta),e.nested)return;let r=e.delta;if(r.length===1){e.setResult(r[0]).exit();return}if(r.length===2){if(e.left instanceof RegExp){let n=/^\/(.*)\/([gimyu]+)$/.exec(r[1]);if(n?.[1]){e.setResult(new RegExp(n[1],n[2])).exit();return}}e.setResult(r[1]).exit();return}r.length===3&&r[2]===0&&e.setResult(void 0).exit()};ne.filterName="trivial";var se=function(e){if(typeof e.delta>"u"){e.setResult(e.delta).exit();return}if(e.nested=!Array.isArray(e.delta),e.nested)return;let r=e.delta;if(r.length===1){e.setResult([r[0],0,0]).exit();return}if(r.length===2){e.setResult([r[1],r[0]]).exit();return}r.length===3&&r[2]===0&&e.setResult([r[0]]).exit()};se.filterName="trivial";var oe=class{constructor(e){this.processor=new we(e),this.processor.pipe(new j("diff").append(G,ie,U,ee,J,W).shouldHaveResult()),this.processor.pipe(new j("patch").append(Z,V,ne,te,Y,B).shouldHaveResult()),this.processor.pipe(new j("reverse").append(K,X,se,re,z,q).shouldHaveResult())}options(e){return this.processor.options(e)}diff(e,r){return this.processor.process(new R(e,r))}patch(e,r){return this.processor.process(new A(e,r))}reverse(e){return this.processor.process(new x(e))}unpatch(e,r){return this.patch(e,this.reverse(r))}clone(e){return D(e)}},C=oe;function Se(i){return new C(i)}var y;function We(i,e){return y||(y=new C),y.diff(i,e)}function Be(i,e){return y||(y=new C),y.patch(i,e)}function Ve(i,e){return y||(y=new C),y.unpatch(i,e)}function qe(i){return y||(y=new C),y.reverse(i)}function Xe(i){return y||(y=new C),y.clone(i)}var _e=i=>{let e=[],r=[...i],n=0;for(;r.length>0;){let{next:t,extra:s}=Ue(r);if(t.from!==t.to){e.push({from:t.from,to:t.to});for(let o of r){if(t.from===o.from)throw new Error("trying to move the same item twice");t.from100)throw new Error("failed to apply all array moves");r.push(s)}}return e},Ue=i=>{if(!ye(i))throw new Error("no more moves to make");if(!ve(i))return{next:i.shift()};let e=i[0],r=-1,n=i[0],t=-1;for(let f=0;fn.to)&&(n=l,t=f))}let s=i[0],o=-1,d=i[0],a=-1;for(let f=0;fd.from)&&(d=l,a=f))}if(o<0||e.tod.from||n.to>n.from&&n.to===d.from){let f=i.splice(t,1)[0];if(!f)throw new Error("failed to get next move");return{next:f}}let u=i.splice(o,1)[0];if(!u)throw new Error("failed to get next move");let h=i.reduce((f,l)=>f+((l.to0;){let t=n.pop();if(t===void 0||!t.delta)break;if(Array.isArray(t.delta)){if(t.delta.length===1&&r.push({op:E.add,path:t.path,value:t.delta[0]}),t.delta.length===2&&r.push({op:E.replace,path:t.path,value:t.delta[1]}),t.delta[2]===0&&r.push({op:E.remove,path:t.path}),t.delta[2]===2)throw new Error("JSONPatch (RFC 6902) doesn't support text diffs, disable textDiff option")}else if(t.delta._t==="a"){let s=t.delta,o=[],d=[],a=[],u=[];for(let c of Object.keys(s))if(c!=="_t")if(c.substring(0,1)==="_"){let f=Number.parseInt(c.substring(1)),l=s[c];if(!l)continue;Array.isArray(l)?l.length===3&&(l[2]===3?d.push({from:f,to:l[1]}):l[2]===0&&o.push(f)):u.push({to:f,delta:l})}else{let f=s[c],l=Number.parseInt(c);if(f){if(!Array.isArray(f))u.push({to:l,delta:f});else if(f.length===1)a.push({to:l,value:f[0]});else if(f.length===2)u.push({to:l,delta:f});else if(f.length===3&&f[2]===3)throw new Error("JSONPatch (RFC 6902) doesn't support text diffs, disable textDiff option")}}a.sort((c,f)=>c.to-f.to),o.sort((c,f)=>f-c);for(let c of o)if(r.push({op:E.remove,path:`${t.path}/${c}`}),d.length>0)for(let f of d)c0){let c=[...a].reverse();for(let l of c)for(let p of d)l.to0&&n.push(...h.reverse())}else for(let s of Object.keys(t.delta).reverse()){let o=t.delta[s];n.push({path:`${t.path}/${Ge(s)}`,delta:o})}}return r}},Ce=fe;var Ge=i=>typeof i!="string"?i.toString():i.indexOf("/")===-1&&i.indexOf("~")===-1?i:i.replace(/~/g,"~0").replace(/\//g,"~1");window.jsondiffpatch=le;window.jsondiffpatch.formatters={jsonpatch:Ce};})(); diff --git a/services/static-webserver/client/source/resource/jsondiffpatch/jsondiffpatch.min.js b/services/static-webserver/client/source/resource/jsondiffpatch/jsondiffpatch.min.js deleted file mode 100644 index c7297e6fff8d..000000000000 --- a/services/static-webserver/client/source/resource/jsondiffpatch/jsondiffpatch.min.js +++ /dev/null @@ -1,36 +0,0 @@ -!function(e){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=e();else if("function"==typeof define&&define.amd)define([],e);else{var f;"undefined"!=typeof window?f=window:"undefined"!=typeof global?f=global:"undefined"!=typeof self&&(f=self),f.jsondiffpatch=e()}}(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;oa;a++)for(var n=e[a],l=0;i>l;l++){var s=t[l];if(a!==l&&n===s)return!0}}function matchItems(e,t,r,i,a){var n=e[r],l=t[i];if(n===l)return!0;if("object"!=typeof n||"object"!=typeof l)return!1;var s=a.objectHash;if(!s)return a.matchByPosition&&r===i;var o,f;return"number"==typeof r?(a.hashCache1=a.hashCache1||[],o=a.hashCache1[r],"undefined"==typeof o&&(a.hashCache1[r]=o=s(n,r))):o=s(n),"undefined"==typeof o?!1:("number"==typeof i?(a.hashCache2=a.hashCache2||[],f=a.hashCache2[i],"undefined"==typeof f&&(a.hashCache2[i]=f=s(l,i))):f=s(l),"undefined"==typeof f?!1:o===f)}var DiffContext=require("../contexts/diff").DiffContext,PatchContext=require("../contexts/patch").PatchContext,ReverseContext=require("../contexts/reverse").ReverseContext,lcs=require("./lcs"),ARRAY_MOVE=3,isArray="function"==typeof Array.isArray?Array.isArray:function(e){return e instanceof Array},arrayIndexOf="function"==typeof Array.prototype.indexOf?function(e,t){return e.indexOf(t)}:function(e,t){for(var r=e.length,i=0;r>i;i++)if(e[i]===t)return i;return-1},diffFilter=function(e){if(e.leftIsArray){var t,r,i,a,n={objectHash:e.options&&e.options.objectHash,matchByPosition:e.options&&e.options.matchByPosition},l=0,s=0,o=e.left,f=e.right,c=o.length,h=f.length;for(c>0&&h>0&&!n.objectHash&&"boolean"!=typeof n.matchByPosition&&(n.matchByPosition=!arraysHaveMatchByRef(o,f,c,h));c>l&&h>l&&matchItems(o,f,l,l,n);)t=l,a=new DiffContext(e.left[t],e.right[t]),e.push(a,t),l++;for(;c>s+l&&h>s+l&&matchItems(o,f,c-1-s,h-1-s,n);)r=c-1-s,i=h-1-s,a=new DiffContext(e.left[r],e.right[i]),e.push(a,i),s++;var u;if(l+s===c){if(c===h)return void e.setResult(void 0).exit();for(u=u||{_t:"a"},t=l;h-s>t;t++)u[t]=[f[t]];return void e.setResult(u).exit()}if(l+s===h){for(u=u||{_t:"a"},t=l;c-s>t;t++)u["_"+t]=[o[t],0,0];return void e.setResult(u).exit()}delete n.hashCache1,delete n.hashCache2;var d=o.slice(l,c-s),v=f.slice(l,h-s),p=lcs.get(d,v,matchItems,n),y=[];for(u=u||{_t:"a"},t=l;c-s>t;t++)arrayIndexOf(p.indices1,t-l)<0&&(u["_"+t]=[o[t],0,0],y.push(t));var x=!0;e.options&&e.options.arrays&&e.options.arrays.detectMove===!1&&(x=!1);var m=!1;e.options&&e.options.arrays&&e.options.arrays.includeValueOnMove&&(m=!0);var C=y.length;for(t=l;h-s>t;t++){var R=arrayIndexOf(p.indices2,t-l);if(0>R){var A=!1;if(x&&C>0)for(var _=0;C>_;_++)if(r=y[_],matchItems(d,v,r-l,t-l,n)){u["_"+r].splice(1,2,t,ARRAY_MOVE),m||(u["_"+r][0]=""),i=t,a=new DiffContext(e.left[r],e.right[i]),e.push(a,i),y.splice(_,1),A=!0;break}A||(u[t]=[f[t]])}else r=p.indices1[R]+l,i=p.indices2[R]+l,a=new DiffContext(e.left[r],e.right[i]),e.push(a,i)}e.setResult(u).exit()}};diffFilter.filterName="arrays";var compare={numerically:function(e,t){return e-t},numericallyBy:function(e){return function(t,r){return t[e]-r[e]}}},patchFilter=function(e){if(e.nested&&"a"===e.delta._t){var t,r,i=e.delta,a=e.left,n=[],l=[],s=[];for(t in i)if("_t"!==t)if("_"===t[0]){if(0!==i[t][2]&&i[t][2]!==ARRAY_MOVE)throw new Error("only removal or move can be applied at original array indices, invalid diff type: "+i[t][2]);n.push(parseInt(t.slice(1),10))}else 1===i[t].length?l.push({index:parseInt(t,10),value:i[t][0]}):s.push({index:parseInt(t,10),delta:i[t]});for(n=n.sort(compare.numerically),t=n.length-1;t>=0;t--){r=n[t];var o=i["_"+r],f=a.splice(r,1)[0];o[2]===ARRAY_MOVE&&l.push({index:o[1],value:f})}l=l.sort(compare.numericallyBy("index"));var c=l.length;for(t=0;c>t;t++){var h=l[t];a.splice(h.index,0,h.value)}var u,d=s.length;if(d>0)for(t=0;d>t;t++){var v=s[t];u=new PatchContext(e.left[v.index],v.delta),e.push(u,v.index)}return e.children?void e.exit():void e.setResult(e.left).exit()}};patchFilter.filterName="arrays";var collectChildrenPatchFilter=function(e){if(e&&e.children&&"a"===e.delta._t){for(var t,r=e.children.length,i=0;r>i;i++)t=e.children[i],e.left[t.childName]=t.result;e.setResult(e.left).exit()}};collectChildrenPatchFilter.filterName="arraysCollectChildren";var reverseFilter=function(e){if(!e.nested)return void(e.delta[2]===ARRAY_MOVE&&(e.newName="_"+e.delta[1],e.setResult([e.delta[0],parseInt(e.childName.substr(1),10),ARRAY_MOVE]).exit()));if("a"===e.delta._t){var t,r;for(t in e.delta)"_t"!==t&&(r=new ReverseContext(e.delta[t]),e.push(r,t));e.exit()}};reverseFilter.filterName="arrays";var reverseArrayDeltaIndex=function(e,t,r){if("string"==typeof t&&"_"===t[0])return parseInt(t.substr(1),10);if(isArray(r)&&0===r[2])return"_"+t;var i=+t;for(var a in e){var n=e[a];if(isArray(n))if(n[2]===ARRAY_MOVE){var l=parseInt(a.substr(1),10),s=n[1];if(s===+t)return l;i>=l&&s>i?i++:l>=i&&i>s&&i--}else if(0===n[2]){var o=parseInt(a.substr(1),10);i>=o&&i++}else 1===n.length&&i>=a&&i--}return i},collectChildrenReverseFilter=function(e){if(e&&e.children&&"a"===e.delta._t){for(var t,r=e.children.length,i={_t:"a"},a=0;r>a;a++){t=e.children[a];var n=t.newName;"undefined"==typeof n&&(n=reverseArrayDeltaIndex(e.delta,t.childName,t.result)),i[n]!==t.result&&(i[n]=t.result)}e.setResult(i).exit()}};collectChildrenReverseFilter.filterName="arraysCollectChildren",exports.diffFilter=diffFilter,exports.patchFilter=patchFilter,exports.collectChildrenPatchFilter=collectChildrenPatchFilter,exports.reverseFilter=reverseFilter,exports.collectChildrenReverseFilter=collectChildrenReverseFilter; -},{"../contexts/diff":4,"../contexts/patch":5,"../contexts/reverse":6,"./lcs":12}],11:[function(require,module,exports){ -var diffFilter=function(t){t.left instanceof Date?(t.right instanceof Date?t.left.getTime()!==t.right.getTime()?t.setResult([t.left,t.right]):t.setResult(void 0):t.setResult([t.left,t.right]),t.exit()):t.right instanceof Date&&t.setResult([t.left,t.right]).exit()};diffFilter.filterName="dates",exports.diffFilter=diffFilter; -},{}],12:[function(require,module,exports){ -var defaultMatch=function(t,e,n,r){return t[n]===e[r]},lengthMatrix=function(t,e,n,r){var c,a,i=t.length,u=e.length,f=[i+1];for(c=0;i+1>c;c++)for(f[c]=[u+1],a=0;u+1>a;a++)f[c][a]=0;for(f.match=n,c=1;i+1>c;c++)for(a=1;u+1>a;a++)n(t,e,c-1,a-1,r)?f[c][a]=f[c-1][a-1]+1:f[c][a]=Math.max(f[c-1][a],f[c][a-1]);return f},backtrack=function(t,e,n,r,c,a){if(0===r||0===c)return{sequence:[],indices1:[],indices2:[]};if(t.match(e,n,r-1,c-1,a)){var i=backtrack(t,e,n,r-1,c-1,a);return i.sequence.push(e[r-1]),i.indices1.push(r-1),i.indices2.push(c-1),i}return t[r][c-1]>t[r-1][c]?backtrack(t,e,n,r,c-1,a):backtrack(t,e,n,r-1,c,a)},get=function(t,e,n,r){r=r||{};var c=lengthMatrix(t,e,n||defaultMatch,r),a=backtrack(c,t,e,t.length,e.length,r);return"string"==typeof t&&"string"==typeof e&&(a.sequence=a.sequence.join("")),a};exports.get=get; -},{}],13:[function(require,module,exports){ -var DiffContext=require("../contexts/diff").DiffContext,PatchContext=require("../contexts/patch").PatchContext,ReverseContext=require("../contexts/reverse").ReverseContext,collectChildrenDiffFilter=function(e){if(e&&e.children){for(var t,l=e.children.length,r=e.result,i=0;l>i;i++)t=e.children[i],"undefined"!=typeof t.result&&(r=r||{},r[t.childName]=t.result);r&&e.leftIsArray&&(r._t="a"),e.setResult(r).exit()}};collectChildrenDiffFilter.filterName="collectChildren";var objectsDiffFilter=function(e){if(!e.leftIsArray&&"object"===e.leftType){var t,l,r=e.options.propertyFilter;for(t in e.left)Object.prototype.hasOwnProperty.call(e.left,t)&&(r&&!r(t,e)||(l=new DiffContext(e.left[t],e.right[t]),e.push(l,t)));for(t in e.right)Object.prototype.hasOwnProperty.call(e.right,t)&&(r&&!r(t,e)||"undefined"==typeof e.left[t]&&(l=new DiffContext(void 0,e.right[t]),e.push(l,t)));return e.children&&0!==e.children.length?void e.exit():void e.setResult(void 0).exit()}};objectsDiffFilter.filterName="objects";var patchFilter=function(e){if(e.nested&&!e.delta._t){var t,l;for(t in e.delta)l=new PatchContext(e.left[t],e.delta[t]),e.push(l,t);e.exit()}};patchFilter.filterName="objects";var collectChildrenPatchFilter=function(e){if(e&&e.children&&!e.delta._t){for(var t,l=e.children.length,r=0;l>r;r++)t=e.children[r],Object.prototype.hasOwnProperty.call(e.left,t.childName)&&void 0===t.result?delete e.left[t.childName]:e.left[t.childName]!==t.result&&(e.left[t.childName]=t.result);e.setResult(e.left).exit()}};collectChildrenPatchFilter.filterName="collectChildren";var reverseFilter=function(e){if(e.nested&&!e.delta._t){var t,l;for(t in e.delta)l=new ReverseContext(e.delta[t]),e.push(l,t);e.exit()}};reverseFilter.filterName="objects";var collectChildrenReverseFilter=function(e){if(e&&e.children&&!e.delta._t){for(var t,l=e.children.length,r={},i=0;l>i;i++)t=e.children[i],r[t.childName]!==t.result&&(r[t.childName]=t.result);e.setResult(r).exit()}};collectChildrenReverseFilter.filterName="collectChildren",exports.collectChildrenDiffFilter=collectChildrenDiffFilter,exports.objectsDiffFilter=objectsDiffFilter,exports.patchFilter=patchFilter,exports.collectChildrenPatchFilter=collectChildrenPatchFilter,exports.reverseFilter=reverseFilter,exports.collectChildrenReverseFilter=collectChildrenReverseFilter; -},{"../contexts/diff":4,"../contexts/patch":5,"../contexts/reverse":6}],14:[function(require,module,exports){ -var TEXT_DIFF=2,DEFAULT_MIN_LENGTH=60,cachedDiffPatch=null,getDiffMatchPatch=function(t){if(!cachedDiffPatch){var e;if("undefined"!=typeof diff_match_patch)e="function"==typeof diff_match_patch?new diff_match_patch:new diff_match_patch.diff_match_patch;else if("function"==typeof require)try{var i="diff_match_patch_uncompressed",f=require("../../public/external/"+i);e=new f.diff_match_patch}catch(r){e=null}if(!e){if(!t)return null;var a=new Error("text diff_match_patch library not found");throw a.diff_match_patch_not_found=!0,a}cachedDiffPatch={diff:function(t,i){return e.patch_toText(e.patch_make(t,i))},patch:function(t,i){for(var f=e.patch_apply(e.patch_fromText(i),t),r=0;re;e++){r=f[e];var o=r.slice(0,1);"@"===o?(h=d.exec(r),c=e,l=null,n=null,f[c]="@@ -"+h[3]+","+h[4]+" +"+h[1]+","+h[2]+" @@"):"+"===o?(l=e,f[e]="-"+f[e].slice(1),"+"===f[e-1].slice(0,1)&&(a=f[e],f[e]=f[e-1],f[e-1]=a)):"-"===o&&(n=e,f[e]="+"+f[e].slice(1))}return f.join("\n")},reverseFilter=function(t){t.nested||t.delta[2]===TEXT_DIFF&&t.setResult([textDeltaReverse(t.delta[0]),0,TEXT_DIFF]).exit()};reverseFilter.filterName="texts",exports.diffFilter=diffFilter,exports.patchFilter=patchFilter,exports.reverseFilter=reverseFilter; -},{}],15:[function(require,module,exports){ -var isArray="function"==typeof Array.isArray?Array.isArray:function(e){return e instanceof Array},diffFilter=function(e){if(e.left===e.right)return void e.setResult(void 0).exit();if("undefined"==typeof e.left){if("function"==typeof e.right)throw new Error("functions are not supported");return void e.setResult([e.right]).exit()}if("undefined"==typeof e.right)return void e.setResult([e.left,0,0]).exit();if("function"==typeof e.left||"function"==typeof e.right)throw new Error("functions are not supported");if(e.leftType=null===e.left?"null":typeof e.left,e.rightType=null===e.right?"null":typeof e.right,e.leftType!==e.rightType)return void e.setResult([e.left,e.right]).exit();if("boolean"===e.leftType||"number"===e.leftType)return void e.setResult([e.left,e.right]).exit();if("object"===e.leftType&&(e.leftIsArray=isArray(e.left)),"object"===e.rightType&&(e.rightIsArray=isArray(e.right)),e.leftIsArray!==e.rightIsArray)return void e.setResult([e.left,e.right]).exit();if(e.left instanceof RegExp){if(!(e.right instanceof RegExp))return void e.setResult([e.left,e.right]).exit();e.setResult([e.left.toString(),e.right.toString()]).exit()}};diffFilter.filterName="trivial";var patchFilter=function(e){if("undefined"==typeof e.delta)return void e.setResult(e.left).exit();if(e.nested=!isArray(e.delta),!e.nested){if(1===e.delta.length)return void e.setResult(e.delta[0]).exit();if(2===e.delta.length){if(e.left instanceof RegExp){var t=/^\/(.*)\/([gimyu]+)$/.exec(e.delta[1]);if(t)return void e.setResult(new RegExp(t[1],t[2])).exit()}return void e.setResult(e.delta[1]).exit()}return 3===e.delta.length&&0===e.delta[2]?void e.setResult(void 0).exit():void 0}};patchFilter.filterName="trivial";var reverseFilter=function(e){return"undefined"==typeof e.delta?void e.setResult(e.delta).exit():(e.nested=!isArray(e.delta),e.nested?void 0:1===e.delta.length?void e.setResult([e.delta[0],0,0]).exit():2===e.delta.length?void e.setResult([e.delta[1],e.delta[0]]).exit():3===e.delta.length&&0===e.delta[2]?void e.setResult([e.delta[0]]).exit():void 0)};reverseFilter.filterName="trivial",exports.diffFilter=diffFilter,exports.patchFilter=patchFilter,exports.reverseFilter=reverseFilter; -},{}],16:[function(require,module,exports){ -var Pipe=function(t){this.name=t,this.filters=[]};Pipe.prototype.process=function(t){if(!this.processor)throw new Error("add this pipe to a processor before using it");for(var e=this.debug,r=this.filters.length,i=t,s=0;r>s;s++){var o=this.filters[s];if(e&&this.log("filter: "+o.filterName),o(i),"object"==typeof i&&i.exiting){i.exiting=!1;break}}!i.next&&this.resultCheck&&this.resultCheck(i)},Pipe.prototype.log=function(t){console.log("[jsondiffpatch] "+this.name+" pipe, "+t)},Pipe.prototype.append=function(){return this.filters.push.apply(this.filters,arguments),this},Pipe.prototype.prepend=function(){return this.filters.unshift.apply(this.filters,arguments),this},Pipe.prototype.indexOf=function(t){if(!t)throw new Error("a filter name is required");for(var e=0;e diff --git a/services/static-webserver/client/source/resource/osparc/icons/circle-info-text.svg b/services/static-webserver/client/source/resource/osparc/icons/circle-info-white.svg similarity index 100% rename from services/static-webserver/client/source/resource/osparc/icons/circle-info-text.svg rename to services/static-webserver/client/source/resource/osparc/icons/circle-info-white.svg diff --git a/services/static-webserver/client/source/resource/osparc/icons/circle-xmark-black.svg b/services/static-webserver/client/source/resource/osparc/icons/circle-xmark-black.svg new file mode 100644 index 000000000000..152dda4fdc43 --- /dev/null +++ b/services/static-webserver/client/source/resource/osparc/icons/circle-xmark-black.svg @@ -0,0 +1 @@ + diff --git a/services/static-webserver/client/source/resource/osparc/icons/circle-xmark-solid.svg b/services/static-webserver/client/source/resource/osparc/icons/circle-xmark-red.svg similarity index 100% rename from services/static-webserver/client/source/resource/osparc/icons/circle-xmark-solid.svg rename to services/static-webserver/client/source/resource/osparc/icons/circle-xmark-red.svg diff --git a/services/static-webserver/client/source/resource/osparc/icons/circle-xmark-text.svg b/services/static-webserver/client/source/resource/osparc/icons/circle-xmark-white.svg similarity index 100% rename from services/static-webserver/client/source/resource/osparc/icons/circle-xmark-text.svg rename to services/static-webserver/client/source/resource/osparc/icons/circle-xmark-white.svg diff --git a/services/static-webserver/client/source/resource/osparc/icons/file-download-black.svg b/services/static-webserver/client/source/resource/osparc/icons/file-download-black.svg new file mode 100644 index 000000000000..edf3807838f1 --- /dev/null +++ b/services/static-webserver/client/source/resource/osparc/icons/file-download-black.svg @@ -0,0 +1 @@ + diff --git a/services/static-webserver/client/source/resource/osparc/icons/file-download-text.svg b/services/static-webserver/client/source/resource/osparc/icons/file-download-white.svg similarity index 100% rename from services/static-webserver/client/source/resource/osparc/icons/file-download-text.svg rename to services/static-webserver/client/source/resource/osparc/icons/file-download-white.svg diff --git a/services/static-webserver/client/source/resource/osparc/tours/osparc_tours.json b/services/static-webserver/client/source/resource/osparc/tours/osparc_tours.json index d320f6b74102..128877ec1d36 100644 --- a/services/static-webserver/client/source/resource/osparc/tours/osparc_tours.json +++ b/services/static-webserver/client/source/resource/osparc/tours/osparc_tours.json @@ -1,8 +1,8 @@ { - "studies": { - "id": "studies", - "name": "Studies", - "description": "All you need to know about Study handling", + "projects": { + "id": "projects", + "name": "Projects", + "description": "All you need to know about Project handling", "context": "osparc-test-id=newPlusBtn", "steps": [{ "beforeClick": { @@ -10,13 +10,13 @@ "action": "open" }, "anchorEl": "osparc-test-id=newPlusMenu", - "title": "Create Studies", - "text": "Clicking on the (+) New button, allows you to create new Studies or new Folders to organize the studies", + "title": "Create Projects", + "text": "Clicking on the (+) New button, allows you to create new Projects, start your favourite Apps or create new Folders to organize the projects", "placement": "right" }, { "anchorEl": "osparc-test-id=searchBarFilter-textField-study", "title": "Filter and Search", - "text": "This tool allows you to search Studies, Tutorials and Services.
You can search and filter by:
- Title, description, owner, id...
- Tags
- Shared with", + "text": "This tool allows you to search Projects, Templates or Public Projects.
You can search and filter by:
- Title, description, owner, id...
- Tags
- Shared with", "placement": "bottom" }, { "beforeClick": { @@ -25,13 +25,29 @@ }, "anchorEl": "osparc-test-id=studyItemMenuMenu", "title": "More options button", - "text": "On the Study card, you can use the three dots button to access more information and operation on the Study.", + "text": "On the Project card, you can use the three dots button to access more information and operation on the Project.", "placement": "left" }, { "anchorEl": "osparc-test-id=updateStudyBtn", "title": "Update Services", - "text": "On the Study card, you can use the Update button to update the corresponding service to the latest version.", + "text": "On the Project card, you can use the Update button to update the corresponding service to the latest version.", "placement": "bottom" + }, { + "beforeClick": { + "selector": "osparc-test-id=templatesFilterItem" + }, + "anchorEl": "osparc-test-id=templatesFilterItem", + "title": "Templates", + "text": "Templates let you freeze a project and reuse it anytime. When you create a template, it saves a snapshot of your project. Later, you can click on it to start a fresh, independent copy.
Templates shared with you by others will also appear here, so you can build on their work too.", + "placement": "right" + }, { + "beforeClick": { + "selector": "osparc-test-id=publicProjectsFilterItem" + }, + "anchorEl": "osparc-test-id=publicProjectsFilterItem", + "title": "Public Projects", + "text": "Just like templates, Public Projects are frozen snapshots—but they’re shared with the entire scientific community. Anyone can browse them and create their own copy and build upon.", + "placement": "right" }] }, "dashboard": { @@ -42,35 +58,35 @@ "steps": [{ "anchorEl": "osparc-test-id=dashboardTabs", "title": "Dashboard Menu", - "text": "The menu tabs give you quick access to a set of core elements of the platform, namely Studies, Templates, Services and Data.", + "text": "The menu tabs give you quick access to a set of core elements of the platform, namely Projects, Tutorials, Apps and Data.", "placement": "bottom" }, { "beforeClick": { "selector": "osparc-test-id=studiesTabBtn" }, "anchorEl": "osparc-test-id=studiesTabBtn", - "text": "Any Study is accessible via the Dashboard. The Studies, which belong to or are shared with you, can be found here. You can also create Folders to help you organize the Studies", + "text": "Any Project is accessible via the Dashboard. The Projects, which belong to or are shared with you, can be found here. You can also create Folders to help you organize the Projects. Also, the Templates you have access to and Public Projects can be found in their own sections.", "placement": "bottom" }, { "beforeClick": { "selector": "osparc-test-id=tutorialsTabBtn" }, "anchorEl": "osparc-test-id=tutorialsTabBtn", - "text": "Clicking on a Template will create a copy of that Study, which will appear in your own Studies tab with the same name as the Template. Any changes you make to this copy will not affect the original Template.", + "text": "Clicking on a Tutorial will create a copy of that Project, which will appear in your own Projects tab with the same name as the Tutorial. Any changes you make to this copy will not affect the original Tutorial.", "placement": "bottom" }, { "beforeClick": { "selector": "osparc-test-id=appsTabBtn" }, "anchorEl": "osparc-test-id=appsTabBtn", - "text": "Every Study in oSparc is composed of so-called Services.
These are building blocks for Studies and can provide data/files, visualize results (2D, 3D), implement code in Jupyter notebooks or perform computations to execute simulations within a Study.", + "text": "Every Project in oSparc is composed of so-called Apps.
These are building blocks for Projects and can provide data/files, visualize results (2D, 3D), implement code in Jupyter notebooks or perform computations to execute simulations within a Project.", "placement": "bottom" }, { "beforeClick": { "selector": "osparc-test-id=dataTabBtn" }, "anchorEl": "osparc-test-id=dataTabBtn", - "text": "All the Data of the Studies you have access to can bre explored here.", + "text": "All the Data of the Projects you have access to can be explored here.", "placement": "bottom" }] }, @@ -80,20 +96,25 @@ "description": "Introduction to the Navigation Bar", "context": "osparc-test-id=navigationBar", "steps": [{ + "anchorEl": "osparc-test-id=jobsButton", + "title": "Activity Center", + "text": "Here you can track all your running jobs, recent activity, and completed tasks. If you want to check the results once these are ready, you will find more details in the Activity Overview on the Project card.", + "placement": "bottom" + }, { "beforeClick": { "selector": "osparc-test-id=notificationsButton", "event": "tap" }, "anchorEl": "osparc-test-id=notificationsContainer", - "text": "By clicking on the Bell, you will you see notifications about which Studies, Templates and Organizations have been shared with you.", - "placement": "bottom" + "text": "By clicking on the Bell, you will see notifications about which Projects, Templates and Organizations have been shared with you.", + "placement": "left" }, { "beforeClick": { "selector": "osparc-test-id=helpNavigationBtn", "action": "open" }, "anchorEl": "osparc-test-id=helpNavigationMenu", - "text": "Under the question mark, you will find Manuals, Support and ways to give us Feedback. It also provides quick access to other Guided Tours.", + "text": "Under the question mark, you can access Help & Support to assist you whenever you need it.", "placement": "left" }, { "beforeClick": { diff --git a/services/static-webserver/client/source/resource/osparc/tours/s4l_tours.json b/services/static-webserver/client/source/resource/osparc/tours/s4l_tours.json index b72a26b4fdfe..62750b5f0595 100644 --- a/services/static-webserver/client/source/resource/osparc/tours/s4l_tours.json +++ b/services/static-webserver/client/source/resource/osparc/tours/s4l_tours.json @@ -11,12 +11,12 @@ }, "anchorEl": "osparc-test-id=newPlusMenu", "title": "Start Sim4Life and more", - "text": "Clicking on the (+) New button, allows you to create new Sim4Life projects or new Folders to organize the projects", + "text": "Clicking on the (+) New button, allows you to create new Sim4Life projects, start other featured Apps or create new Folders to organize the projects", "placement": "right" }, { "anchorEl": "osparc-test-id=searchBarFilter-textField-study", "title": "Filter and Search", - "text": "This tool allows you to search Projects, Tutorials and Services.
You can search and filter by:
- Title, description, owner, id...
- Tags
- Shared with", + "text": "This tool allows you to search Projects, Templates or Public Projects.
You can search and filter by:
- Title, description, owner, id...
- Tags
- Shared with", "placement": "bottom" }, { "beforeClick": { @@ -32,6 +32,22 @@ "title": "Update Services", "text": "On the Project card, you can use the Update button to update the corresponding service to the latest version.", "placement": "bottom" + }, { + "beforeClick": { + "selector": "osparc-test-id=templatesFilterItem" + }, + "anchorEl": "osparc-test-id=templatesFilterItem", + "title": "Templates", + "text": "Templates let you freeze a project and reuse it anytime. When you create a template, it saves a snapshot of your project. Later, you can click on it to start a fresh, independent copy.
Templates shared with you by others will also appear here, so you can build on their work too.", + "placement": "right" + }, { + "beforeClick": { + "selector": "osparc-test-id=publicProjectsFilterItem" + }, + "anchorEl": "osparc-test-id=publicProjectsFilterItem", + "title": "Public Projects", + "text": "Just like templates, Public Projects are frozen snapshots—but they’re shared with the entire scientific community. Anyone can browse them and create their own copy and build upon.", + "placement": "right" }] }, "dashboard": { @@ -42,14 +58,14 @@ "steps": [{ "anchorEl": "osparc-test-id=dashboardTabs", "title": "Dashboard Menu", - "text": "The menu tabs give you quick access to a set of core elements of the platform, namely Projects, Tutorials and Services.", + "text": "The menu tabs give you quick access to a set of core elements of the platform, namely Projects, Tutorials and Apps.", "placement": "bottom" }, { "beforeClick": { "selector": "osparc-test-id=studiesTabBtn" }, "anchorEl": "osparc-test-id=studiesTabBtn", - "text": "Any Project is accessible via the Dashboard. The Projects, which belong to or are shared with you, can be found here.", + "text": "Any Project is accessible via the Dashboard. The Projects, which belong to or are shared with you, can be found here. You can also create Folders to help you organize the Projects. Also, the Templates you have access to and Public Projects can be found in their own sections.", "placement": "bottom" }, { "beforeClick": { @@ -63,7 +79,7 @@ "selector": "osparc-test-id=appsTabBtn" }, "anchorEl": "osparc-test-id=appsTabBtn", - "text": "Every Project in Sim4Life is composed of at least one so-called Service.
Services are building blocks for Projects and can provide data/files, visualize results (2D, 3D), implement code in Jupyter notebooks or perform computations to execute simulations within a Project.", + "text": "Every Project in Sim4Life is composed of at least one so-called Apps.
Apps are building blocks for Projects and can provide data/files, visualize results (2D, 3D), implement code in Jupyter notebooks or perform computations to execute simulations within a Project.", "placement": "bottom" }] }, @@ -73,12 +89,17 @@ "description": "Introduction to the Navigation Bar", "context": "osparc-test-id=navigationBar", "steps": [{ + "anchorEl": "osparc-test-id=jobsButton", + "title": "Activity Center", + "text": "Here you can track all your running jobs, recent activity, and completed tasks. If you want to check the results once these are ready, you will find more details in the Activity Overview on the Project card.", + "placement": "bottom" + }, { "beforeClick": { "selector": "osparc-test-id=notificationsButton", "event": "tap" }, "anchorEl": "osparc-test-id=notificationsContainer", - "text": "By clicking on the Bell, you will you see notifications about which Projects, Credits and Organizations have been shared with you.", + "text": "By clicking on the Bell, you will see notifications about which Projects, Credits and Organizations have been shared with you.", "placement": "bottom" }, { "beforeClick": { @@ -86,7 +107,7 @@ "action": "open" }, "anchorEl": "osparc-test-id=helpNavigationMenu", - "text": "Under the question mark, you find Manuals, Support and ways to give us Feedback. It also provides quick access to other Guided Tours.", + "text": "Under the question mark, you can access Help & Support to assist you whenever you need it.", "placement": "left" }, { "beforeClick": { diff --git a/services/static-webserver/client/source/resource/osparc/tours/s4llite_tours.json b/services/static-webserver/client/source/resource/osparc/tours/s4llite_tours.json index ddb0adf8ea60..f43a0c0c43b1 100644 --- a/services/static-webserver/client/source/resource/osparc/tours/s4llite_tours.json +++ b/services/static-webserver/client/source/resource/osparc/tours/s4llite_tours.json @@ -16,7 +16,7 @@ }, { "anchorEl": "osparc-test-id=searchBarFilter-textField-study", "title": "Filter and Search", - "text": "This tool allows you to filter Projects and Tutorials.
You can search and filter by:
- Title, description, owner, id...
- Tags
- Shared with", + "text": "This tool allows you to filter Projects, Templates or Public Projects.
You can search and filter by:
- Title, description, owner, id...
- Tags
- Shared with", "placement": "bottom" }, { "beforeClick": { @@ -32,6 +32,22 @@ "title": "Update Services", "text": "On the Project card, you can use the Update button to update the corresponding service to the latest version.", "placement": "bottom" + }, { + "beforeClick": { + "selector": "osparc-test-id=templatesFilterItem" + }, + "anchorEl": "osparc-test-id=templatesFilterItem", + "title": "Templates", + "text": "Templates let you freeze a project and reuse it anytime. When you create a template, it saves a snapshot of your project. Later, you can click on it to start a fresh, independent copy.
Templates shared with you by others will also appear here, so you can build on their work too.", + "placement": "right" + }, { + "beforeClick": { + "selector": "osparc-test-id=publicProjectsFilterItem" + }, + "anchorEl": "osparc-test-id=publicProjectsFilterItem", + "title": "Public Projects", + "text": "Just like templates, Public Projects are frozen snapshots—but they’re shared with the entire scientific community. Anyone can browse them and create their own copy and build upon.", + "placement": "right" }] }, "dashboard": { @@ -68,7 +84,7 @@ "event": "tap" }, "anchorEl": "osparc-test-id=notificationsContainer", - "text": "By clicking on the Bell, you will you see notifications about which Projects and Organizations have been shared with you.", + "text": "By clicking on the Bell, you will see notifications about which Projects and Organizations have been shared with you.", "placement": "bottom" }, { "beforeClick": { diff --git a/services/static-webserver/client/source/resource/osparc/tours/tiplite_tours.json b/services/static-webserver/client/source/resource/osparc/tours/tiplite_tours.json index bdc28947d878..a68276ba0811 100644 --- a/services/static-webserver/client/source/resource/osparc/tours/tiplite_tours.json +++ b/services/static-webserver/client/source/resource/osparc/tours/tiplite_tours.json @@ -10,7 +10,7 @@ "event": "tap" }, "anchorEl": "osparc-test-id=notificationsContainer", - "text": "By clicking on the Bell, you will you see notifications about which Studies and Organizations have been shared with you.", + "text": "By clicking on the Bell, you will see notifications about which Studies and Organizations have been shared with you.", "placement": "bottom" }, { "beforeClick": { @@ -18,7 +18,7 @@ "action": "open" }, "anchorEl": "osparc-test-id=helpNavigationMenu", - "text": "Under the question mark, you will find Manuals, Support and ways to give us Feedback. It also provides quick access to other Guided Tours.", + "text": "Under the question mark, you can access Help & Support to assist you whenever you need it.", "placement": "left" }, { "beforeClick": { @@ -68,7 +68,7 @@ }, { "anchorEl": "osparc-test-id=searchBarFilter-textField-study", "title": "Filters and Search", - "text": "This tool allows you to search and filter Plans, Tutorials and Services.
You can search and filter by:
- Title, description, owner, id...
- Tags
- Shared with", + "text": "This tool allows you to search and filter Plans.
You can search and filter by:
- Title, description, owner, id...
- Tags
- Shared with", "placement": "bottom" }, { "beforeClick": { diff --git a/services/static-webserver/client/source/resource/osparc/tours/tis_tours.json b/services/static-webserver/client/source/resource/osparc/tours/tis_tours.json index 579de9a68883..71a2d51b2500 100644 --- a/services/static-webserver/client/source/resource/osparc/tours/tis_tours.json +++ b/services/static-webserver/client/source/resource/osparc/tours/tis_tours.json @@ -5,12 +5,17 @@ "description": "Introduction to the Navigation Bar", "context": "osparc-test-id=navigationBar", "steps": [{ + "anchorEl": "osparc-test-id=jobsButton", + "title": "Activity Center", + "text": "Here you can track all your running simulations, recent activity, and completed tasks. If you want to check the results once these are ready, you will find more details in the Activity Overview on the Project card.", + "placement": "bottom" + },{ "beforeClick": { "selector": "osparc-test-id=notificationsButton", "event": "tap" }, "anchorEl": "osparc-test-id=notificationsContainer", - "text": "By clicking on the Bell, you will you see notifications about which Studies, Credits and Organizations have been shared with you.", + "text": "By clicking on the Bell, you will see notifications about which Projects, Credits and Organizations have been shared with you.", "placement": "bottom" }, { "beforeClick": { @@ -18,7 +23,7 @@ "action": "open" }, "anchorEl": "osparc-test-id=helpNavigationMenu", - "text": "Under the question mark, you will find Manuals, Support and ways to give us Feedback. It also provides quick access to other Guided Tours.", + "text": "Under the question mark, you can access Help & Support to assist you whenever you need it.", "placement": "left" }, { "beforeClick": { @@ -73,7 +78,7 @@ }, { "anchorEl": "osparc-test-id=searchBarFilter-textField-study", "title": "Filters and Search", - "text": "This tool allows you to search and filter Plans, Tutorials and Services.
You can search and filter by:
- Title, description, owner, id...
- Tags
- Shared with", + "text": "This tool allows you to search and filter Plans.
You can search and filter by:
- Title, description, owner, id...
- Tags
- Shared with", "placement": "bottom" }, { "beforeClick": { @@ -106,7 +111,7 @@ }, "anchorEl": "osparc-test-id=creditsSummary", "title": "Credits Indicator", - "text": "By clicking on the Credits indicator, you will you see the number of available Credits. Click the three dots to access the Billing Center.", + "text": "By clicking on the Credits indicator, you will see the number of available Credits. Click the three dots to access the Billing Center.", "placement": "left" }, { "beforeClick": { diff --git a/services/static-webserver/client/source/resource/osparc/ui_config.json b/services/static-webserver/client/source/resource/osparc/ui_config.json index 655adf60f869..02e493d537e6 100644 --- a/services/static-webserver/client/source/resource/osparc/ui_config.json +++ b/services/static-webserver/client/source/resource/osparc/ui_config.json @@ -120,6 +120,32 @@ "newStudyLabel": "New Pipeline", "idToWidget": "emptyStudyBtn" }] + }, + "groupedServices": { + "categories": [{ + "id": "featured", + "title": "Featured", + "color": "product-color" + }], + "services": [{ + "serviceKey": "simcore/services/dynamic/s4l-ui", + "category": "featured" + }, { + "serviceKey": "simcore/services/dynamic/s4l-jupyter", + "category": "featured" + }, { + "serviceKey": "simcore/services/dynamic/iseg-web", + "category": "featured" + }, { + "serviceKey": "simcore/services/dynamic/s4l-ui-framework", + "category": "featured" + }, { + "serviceKey": "simcore/services/comp/s4l-python-runner", + "category": "featured" + }, { + "serviceKey": "simcore/services/comp/s4l-python-runner-gpu", + "category": "featured" + }] } }, "s4lacad": { @@ -146,6 +172,32 @@ "newStudyLabel": "New Pipeline", "idToWidget": "emptyStudyBtn" }] + }, + "groupedServices": { + "categories": [{ + "id": "featured", + "title": "Featured", + "color": "product-color" + }], + "services": [{ + "serviceKey": "simcore/services/dynamic/s4l-ui", + "category": "featured" + }, { + "serviceKey": "simcore/services/dynamic/s4l-jupyter", + "category": "featured" + }, { + "serviceKey": "simcore/services/dynamic/iseg-web", + "category": "featured" + }, { + "serviceKey": "simcore/services/dynamic/s4l-ui-framework", + "category": "featured" + }, { + "serviceKey": "simcore/services/comp/s4l-python-runner", + "category": "featured" + }, { + "serviceKey": "simcore/services/comp/s4l-python-runner-gpu", + "category": "featured" + }] } }, "s4llite": { diff --git a/services/static-webserver/client/source/resource/rocketPreview/osparc-bridge.js b/services/static-webserver/client/source/resource/rocketPreview/osparc-bridge.js new file mode 100644 index 000000000000..ef849c756e41 --- /dev/null +++ b/services/static-webserver/client/source/resource/rocketPreview/osparc-bridge.js @@ -0,0 +1,79 @@ +// rocket-bridge.js +(function () { + const parentWin = window.parent; + + const handlers = { + setTreeData: async (payload) => { + console.log("[rocketPreview] setTreeData", payload); + // TODO + return { + ok: true + }; + }, + setExtraData: async (payload) => { + console.log("[rocketPreview] setExtraData", payload); + // TODO + return { + ok: true + }; + }, + setImage: async (payload) => { + console.log("[rocketPreview] setImage", payload); + // TODO + return { + ok: true + }; + }, + getState: async () => { + return { + status: "ready" + }; + }, + ping: async (payload) => { + return { + pong: true, + t: payload?.t + }; + } + }; + + function reply(id, ok, resultOrError) { + parentWin.postMessage({ + type: "osparc:rpc:result", + id, + ok, + result: ok ? resultOrError : undefined, + error: ok ? undefined : String(resultOrError) + }, "*"); + } + + window.addEventListener("message", async (ev) => { + const data = ev.data; + if (!data || data.type !== "osparc:rpc") { + return; + } + const { id, action, payload, expectReply } = data; + try { + const fn = handlers[action]; + if (typeof fn !== "function") { + throw new Error(`Unknown action '${action}'`); + } + const result = await fn(payload); + if (expectReply) { + reply(id, true, result); + } + } catch (err) { + if (expectReply) { + reply(id, false, err); + } + } + }); + + // Tell osparc we’re ready + window.addEventListener("DOMContentLoaded", () => { + parentWin.postMessage({ + type: "osparc:ready", + version: "1.0.0" + }, "*"); + }); +})(); diff --git a/services/static-webserver/client/source/resource/schemas/product-ui.json b/services/static-webserver/client/source/resource/schemas/product-ui.json index a1454b8e6dd0..8fe7d31c54cf 100644 --- a/services/static-webserver/client/source/resource/schemas/product-ui.json +++ b/services/static-webserver/client/source/resource/schemas/product-ui.json @@ -7,6 +7,9 @@ }, "newStudies": { "$ref": "#/definitions/buttonConfig" + }, + "groupedServices": { + "$ref": "#/definitions/groupedServicesConfig" } }, "additionalProperties": false, @@ -92,6 +95,34 @@ } }, "additionalProperties": false + }, + "groupedServicesConfig": { + "type": "object", + "properties": { + "categories": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { "type": "string" }, + "title": { "type": "string" }, + "description": { "type": "string" } + }, + "required": ["id", "title"] + } + }, + "services": { + "type": "array", + "items": { + "type": "object", + "properties": { + "serviceKey": { "type": "string" }, + "category": { "type": "string" } + }, + "required": ["serviceKey", "category"] + } + } + } } } } diff --git a/services/static-webserver/client/tools/docker-compose.yml b/services/static-webserver/client/tools/docker-compose.yml index c0533894c612..7911d59ceff5 100644 --- a/services/static-webserver/client/tools/docker-compose.yml +++ b/services/static-webserver/client/tools/docker-compose.yml @@ -9,6 +9,8 @@ services: ports: - '8080:8080' working_dir: /project + environment: + NODE_OPTIONS: "--max-old-space-size=5120" command: > qx serve --set qx.allowUrlSettings=true --set-env osparc.vcsOriginUrl="${VCS_URL}" diff --git a/services/static-webserver/client/tools/qooxdoo-kit/builder/Dockerfile b/services/static-webserver/client/tools/qooxdoo-kit/builder/Dockerfile index d5ec65a25922..079f6dda84b0 100644 --- a/services/static-webserver/client/tools/qooxdoo-kit/builder/Dockerfile +++ b/services/static-webserver/client/tools/qooxdoo-kit/builder/Dockerfile @@ -4,7 +4,7 @@ # # Note: context at osparc-simcore/services/static-webserver/client expected # -ARG tag +ARG tag=latest FROM itisfoundation/qooxdoo-kit:${tag} AS touch WORKDIR /project @@ -84,7 +84,7 @@ FROM server-base AS production COPY --from=build-client \ --chown=${SC_USER_NAME}:${SC_USER_NAME} \ /project/build-output "/static-content" -ENV SC_BUILD_TARGET production +ENV SC_BUILD_TARGET=production FROM server-base AS development -ENV SC_BUILD_TARGET development +ENV SC_BUILD_TARGET=development diff --git a/services/storage/Dockerfile b/services/storage/Dockerfile index c0305fd8d7a0..fa76987a70b5 100644 --- a/services/storage/Dockerfile +++ b/services/storage/Dockerfile @@ -2,7 +2,7 @@ # Define arguments in the global scope ARG PYTHON_VERSION="3.11.9" -ARG UV_VERSION="0.6" +ARG UV_VERSION="0.7" FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build FROM python:${PYTHON_VERSION}-slim-bookworm AS base-arm64 @@ -30,6 +30,7 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=private \ set -eux && \ apt-get update && \ apt-get install -y --no-install-recommends \ + fd-find \ gosu \ && apt-get clean -y \ && rm -rf /var/lib/apt/lists/* \ @@ -64,8 +65,8 @@ ENV PYTHONDONTWRITEBYTECODE=1 \ ENV PATH="${VIRTUAL_ENV}/bin:$PATH" -ENV SC_BUILD_TARGET base -ENV SC_BOOT_MODE default +ENV SC_BUILD_TARGET=base +ENV SC_BOOT_MODE=default EXPOSE 8080 # -------------------------- ------------------------------- @@ -80,7 +81,7 @@ EXPOSE 8080 FROM base AS build -ENV SC_BUILD_TARGET build +ENV SC_BUILD_TARGET=build RUN --mount=type=cache,target=/var/cache/apt,sharing=private \ set -eux \ @@ -98,10 +99,7 @@ RUN uv venv "${VIRTUAL_ENV}" -RUN --mount=type=cache,target=/root/.cache/uv \ - uv pip install --upgrade \ - wheel \ - setuptools + WORKDIR /build @@ -121,6 +119,9 @@ WORKDIR /build FROM build AS prod-only-deps ENV SC_BUILD_TARGET=prod-only-deps +# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode +ENV UV_COMPILE_BYTECODE=1 \ + UV_LINK_MODE=copy # 2nd party packages WORKDIR /build/services/storage @@ -147,8 +148,6 @@ ENV SC_BUILD_TARGET=production \ SC_HEALTHCHECK_RETRY=3 ENV PYTHONOPTIMIZE=TRUE -# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode -ENV UV_COMPILE_BYTECODE=1 WORKDIR /home/scu diff --git a/services/storage/Makefile b/services/storage/Makefile index ef350cae0917..3627250ce981 100644 --- a/services/storage/Makefile +++ b/services/storage/Makefile @@ -12,7 +12,7 @@ openapi.json: .env @set -o allexport; \ source $<; \ set +o allexport; \ - python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(app.openapi(), indent=2) )" > $@ + python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(app_factory().openapi(), indent=2) )" > $@ # validates OAS file: $@ $(call validate_openapi_specs,$@) diff --git a/services/storage/VERSION b/services/storage/VERSION index faef31a4357c..39e898a4f952 100644 --- a/services/storage/VERSION +++ b/services/storage/VERSION @@ -1 +1 @@ -0.7.0 +0.7.1 diff --git a/services/storage/docker/boot.sh b/services/storage/docker/boot.sh index 2b817f133214..6dd4e72f8e6c 100755 --- a/services/storage/docker/boot.sh +++ b/services/storage/docker/boot.sh @@ -24,7 +24,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/storage - uv pip --quiet sync requirements/dev.txt + uv pip --quiet sync --link-mode=copy requirements/dev.txt cd - echo "$INFO" "PIP :" uv pip list @@ -33,7 +33,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy if command -v uv >/dev/null 2>&1; then - uv pip install debugpy + uv pip install --link-mode=copy debugpy else pip install debugpy fi @@ -59,6 +59,7 @@ if [ "${STORAGE_WORKER_MODE}" = "true" ]; then worker --pool=threads \ --loglevel="${SERVER_LOG_LEVEL}" \ --concurrency="${CELERY_CONCURRENCY}" \ + --hostname="${STORAGE_WORKER_NAME}" \ --queues="${CELERY_QUEUES:-default}" else exec celery \ @@ -66,26 +67,30 @@ if [ "${STORAGE_WORKER_MODE}" = "true" ]; then worker --pool=threads \ --loglevel="${SERVER_LOG_LEVEL}" \ --concurrency="${CELERY_CONCURRENCY}" \ + --hostname="${STORAGE_WORKER_NAME}" \ --queues="${CELERY_QUEUES:-default}" fi else if [ "${SC_BOOT_MODE}" = "debug" ]; then - reload_dir_packages=$(find /devel/packages -maxdepth 3 -type d -path "*/src/*" ! -path "*.*" -exec echo '--reload-dir {} \' \;) + reload_dir_packages=$(fdfind src /devel/packages --exec echo '--reload-dir {} ' | tr '\n' ' ') exec sh -c " cd services/storage/src/simcore_service_storage && \ - python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${STORAGE_REMOTE_DEBUGGING_PORT} -m uvicorn main:app \ + python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:${STORAGE_REMOTE_DEBUGGING_PORT} -m \ + uvicorn \ + --factory main:app_factory \ --host 0.0.0.0 \ --port ${STORAGE_PORT} \ --reload \ - $reload_dir_packages + $reload_dir_packages \ --reload-dir . \ --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_storage.main:app \ + exec uvicorn \ + --factory simcore_service_storage.main:app_factory \ --host 0.0.0.0 \ - --port ${STORAGE_PORT} \ + --port "${STORAGE_PORT}" \ --log-level "${SERVER_LOG_LEVEL}" fi fi diff --git a/services/storage/docker/entrypoint.sh b/services/storage/docker/entrypoint.sh index 11684a3a5911..ad8718d5716a 100755 --- a/services/storage/docker/entrypoint.sh +++ b/services/storage/docker/entrypoint.sh @@ -27,6 +27,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" +echo "$INFO" "UV : $(command -v uv)" if [ "${SC_BUILD_TARGET}" = "development" ]; then echo "$INFO" "development mode detected..." @@ -61,10 +62,9 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME" echo "$INFO" "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \; - # change user property of files already around + fdfind --owner ":$SC_USER_ID" --exclude proc --exec-batch chgrp --no-dereference "$CONT_GROUPNAME" . '/' echo "$INFO" "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \; + fdfind --owner "$SC_USER_ID:" --exclude proc --exec-batch chown --no-dereference "$SC_USER_NAME" . '/' fi fi diff --git a/services/storage/docker/healthcheck.py b/services/storage/docker/healthcheck.py index d938c860dabf..f56b1bf57e2b 100755 --- a/services/storage/docker/healthcheck.py +++ b/services/storage/docker/healthcheck.py @@ -20,7 +20,6 @@ import os -import socket import subprocess import sys from urllib.request import urlopen @@ -37,6 +36,7 @@ app_settings = ApplicationSettings.create_from_envs() + def _is_celery_worker_healthy(): assert app_settings.STORAGE_CELERY broker_url = app_settings.STORAGE_CELERY.CELERY_RABBIT_BROKER.dsn @@ -50,7 +50,7 @@ def _is_celery_worker_healthy(): "inspect", "ping", "--destination", - "celery@" + socket.gethostname(), + "celery@" + os.getenv("STORAGE_WORKER_NAME", "worker"), ], capture_output=True, text=True, diff --git a/services/storage/openapi.json b/services/storage/openapi.json index 6f73e78bce7d..3307a7c9aed2 100644 --- a/services/storage/openapi.json +++ b/services/storage/openapi.json @@ -1,9 +1,9 @@ { "openapi": "3.1.0", "info": { - "title": "simcore_service_storage", + "title": "simcore-service-storage", "description": "Service that manages osparc storage backend", - "version": "0.7.0" + "version": "0.7.1" }, "paths": { "/v0/": { @@ -1374,6 +1374,7 @@ "description": "Application's version" }, "services": { + "additionalProperties": true, "type": "object", "title": "Services", "description": "Other backend services connected from this service", @@ -1382,6 +1383,7 @@ "sessions": { "anyOf": [ { + "additionalProperties": true, "type": "object" }, { @@ -1440,14 +1442,14 @@ "total": { "anyOf": [ { - "type": "integer" + "type": "integer", + "minimum": 0 }, { "type": "null" } ], - "title": "Total", - "description": "Total items" + "title": "Total" }, "current_page": { "anyOf": [ @@ -1812,6 +1814,7 @@ "data": { "anyOf": [ { + "additionalProperties": true, "type": "object" }, { @@ -2511,7 +2514,6 @@ "title": "S3 Secret Key" } }, - "additionalProperties": false, "type": "object", "required": [ "S3_ACCESS_KEY", diff --git a/services/storage/requirements/_base.in b/services/storage/requirements/_base.in index e2ea1f7474f6..ca1b44e47d28 100644 --- a/services/storage/requirements/_base.in +++ b/services/storage/requirements/_base.in @@ -6,6 +6,7 @@ --requirement ../../../packages/aws-library/requirements/_base.in +--requirement ../../../packages/celery-library/requirements/_base.in --requirement ../../../packages/common-library/requirements/_base.in --requirement ../../../packages/models-library/requirements/_base.in --requirement ../../../packages/postgres-database/requirements/_base.in @@ -19,12 +20,13 @@ aiofiles # i/o asgi_lifespan asyncpg # database celery[redis] +fastapi-pagination +httpx opentelemetry-instrumentation-celery opentelemetry-instrumentation-botocore -packaging -fastapi-pagination orjson -pydantic[dotenv] +packaging +pydantic tenacity typer types-aiobotocore[s3] # s3 storage diff --git a/services/storage/requirements/_base.txt b/services/storage/requirements/_base.txt index 826a4f01de4c..ae7add7d7fd9 100644 --- a/services/storage/requirements/_base.txt +++ b/services/storage/requirements/_base.txt @@ -1,6 +1,7 @@ aio-pika==9.5.4 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in aioboto3==14.3.0 # via @@ -12,24 +13,28 @@ aiocache==0.12.3 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in aiodebug==2.3.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in aiodocker==0.24.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in aiofiles==24.1.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/_base.in # aioboto3 aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -43,6 +48,18 @@ aiohttp==3.11.18 # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -84,11 +101,16 @@ arrow==1.3.0 # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in asgi-lifespan==2.1.0 - # via -r requirements/_base.in + # via + # -r requirements/../../../packages/service-library/requirements/_fastapi.in + # -r requirements/_base.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi asyncpg==0.30.0 @@ -111,8 +133,10 @@ botocore==1.37.3 # s3transfer botocore-stubs==1.36.17 # via types-aiobotocore -celery==5.4.0 - # via -r requirements/_base.in +celery==5.5.2 + # via + # -r requirements/../../../packages/celery-library/requirements/_base.in + # -r requirements/_base.in certifi==2025.1.31 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -127,6 +151,18 @@ certifi==2025.1.31 # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -144,9 +180,10 @@ certifi==2025.1.31 # httpcore # httpx # requests + # sentry-sdk charset-normalizer==3.4.1 # via requests -click==8.1.8 +click==8.2.1 # via # celery # click-didyoumean @@ -161,12 +198,6 @@ click-plugins==1.1.1 # via celery click-repl==0.3.0 # via celery -deprecated==1.2.18 - # via - # opentelemetry-api - # opentelemetry-exporter-otlp-proto-grpc - # opentelemetry-exporter-otlp-proto-http - # opentelemetry-semantic-conventions dnspython==2.7.0 # via email-validator email-validator==2.2.0 @@ -177,25 +208,29 @@ exceptiongroup==1.2.2 # via aio-pika fast-depends==2.4.12 # via faststream -fastapi==0.115.12 +fastapi==0.116.1 # via # -r requirements/../../../packages/service-library/requirements/_fastapi.in # fastapi-lifespan-manager -fastapi-cli==0.0.7 + # fastapi-pagination +fastapi-cli==0.0.8 # via fastapi +fastapi-cloud-cli==0.1.5 + # via fastapi-cli fastapi-lifespan-manager==0.1.4 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -fastapi-pagination==0.12.34 +fastapi-pagination==0.14.0 # via -r requirements/_base.in faststream==0.5.34 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in frozenlist==1.5.0 # via # aiohttp # aiosignal -googleapis-common-protos==1.66.0 +googleapis-common-protos==1.70.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http @@ -203,7 +238,7 @@ greenlet==3.1.1 # via sqlalchemy grpcio==1.70.0 # via opentelemetry-exporter-otlp-proto-grpc -h11==0.14.0 +h11==0.16.0 # via # httpcore # uvicorn @@ -211,7 +246,7 @@ h2==4.2.0 # via httpx hpack==4.1.0 # via h2 -httpcore==1.0.7 +httpcore==1.0.9 # via httpx httptools==0.6.4 # via uvicorn @@ -229,6 +264,18 @@ httpx==0.28.1 # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -244,7 +291,9 @@ httpx==0.28.1 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/service-library/requirements/_fastapi.in + # -r requirements/_base.in # fastapi + # fastapi-cloud-cli hyperframe==6.1.0 # via h2 idna==3.10 @@ -256,7 +305,7 @@ idna==3.10 # yarl importlib-metadata==8.5.0 # via opentelemetry-api -jinja2==3.1.5 +jinja2==3.1.6 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -270,6 +319,18 @@ jinja2==3.1.5 # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -290,17 +351,27 @@ jmespath==1.0.1 # aiobotocore # boto3 # botocore +jsonref==1.1.0 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema==4.23.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema-specifications==2024.10.1 # via jsonschema -kombu==5.4.2 +kombu==5.5.3 # via celery -mako==1.3.9 +mako==1.3.10 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -314,6 +385,18 @@ mako==1.3.9 # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -342,9 +425,10 @@ multidict==6.1.0 # aiobotocore # aiohttp # yarl -opentelemetry-api==1.30.0 +opentelemetry-api==1.34.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http @@ -362,19 +446,20 @@ opentelemetry-api==1.30.0 # opentelemetry-propagator-aws-xray # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.30.0 +opentelemetry-exporter-otlp==1.34.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.30.0 +opentelemetry-exporter-otlp-proto-common==1.34.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.30.0 +opentelemetry-exporter-otlp-proto-grpc==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.30.0 +opentelemetry-exporter-otlp-proto-http==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.51b0 +opentelemetry-instrumentation==0.55b1 # via # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-asgi @@ -386,50 +471,58 @@ opentelemetry-instrumentation==0.51b0 # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aio-pika==0.51b0 +opentelemetry-instrumentation-aio-pika==0.55b1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-asgi==0.51b0 +opentelemetry-instrumentation-asgi==0.55b1 # via opentelemetry-instrumentation-fastapi -opentelemetry-instrumentation-asyncpg==0.51b0 - # via -r requirements/../../../packages/postgres-database/requirements/_base.in -opentelemetry-instrumentation-botocore==0.51b0 +opentelemetry-instrumentation-asyncpg==0.55b1 + # via + # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-botocore==0.55b1 # via # -r requirements/../../../packages/aws-library/requirements/_base.in # -r requirements/_base.in -opentelemetry-instrumentation-celery==0.51b0 +opentelemetry-instrumentation-celery==0.55b1 # via -r requirements/_base.in -opentelemetry-instrumentation-fastapi==0.51b0 +opentelemetry-instrumentation-fastapi==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-httpx==0.51b0 +opentelemetry-instrumentation-httpx==0.55b1 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in -opentelemetry-instrumentation-logging==0.51b0 +opentelemetry-instrumentation-logging==0.55b1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.51b0 +opentelemetry-instrumentation-redis==0.55b1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.51b0 +opentelemetry-instrumentation-requests==0.55b1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in opentelemetry-propagator-aws-xray==1.0.2 # via opentelemetry-instrumentation-botocore -opentelemetry-proto==1.30.0 +opentelemetry-proto==1.34.1 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.30.0 +opentelemetry-sdk==1.34.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.51b0 +opentelemetry-semantic-conventions==0.55b1 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi @@ -441,7 +534,7 @@ opentelemetry-semantic-conventions==0.51b0 # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.51b0 +opentelemetry-util-http==0.55b1 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi @@ -461,6 +554,18 @@ orjson==3.10.15 # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -483,6 +588,14 @@ orjson==3.10.15 # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in @@ -507,19 +620,20 @@ propcache==0.2.1 # via # aiohttp # yarl -protobuf==5.29.3 +protobuf==5.29.5 # via # googleapis-common-protos # opentelemetry-proto -psutil==6.1.1 +psutil==7.0.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in psycopg2-binary==2.9.10 # via sqlalchemy pycryptodome==3.21.0 # via stream-zip -pydantic==2.10.6 +pydantic==2.11.7 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -533,6 +647,18 @@ pydantic==2.10.6 # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -559,6 +685,17 @@ pydantic==2.10.6 # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in @@ -575,12 +712,13 @@ pydantic==2.10.6 # -r requirements/_base.in # fast-depends # fastapi + # fastapi-cloud-cli # fastapi-pagination # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.2 +pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.10.2 +pydantic-extra-types==2.10.5 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -590,6 +728,14 @@ pydantic-extra-types==2.10.2 # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in @@ -613,6 +759,18 @@ pydantic-settings==2.7.0 # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -631,6 +789,10 @@ pydantic-settings==2.7.0 # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -640,6 +802,7 @@ pygments==2.19.1 pyinstrument==5.0.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in python-dateutil==2.9.0.post0 # via @@ -667,6 +830,18 @@ pyyaml==6.0.2 # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -682,6 +857,7 @@ pyyaml==6.0.2 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # uvicorn redis==5.2.1 @@ -698,6 +874,18 @@ redis==5.2.1 # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -713,6 +901,7 @@ redis==5.2.1 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # celery referencing==0.35.1 @@ -729,6 +918,18 @@ referencing==0.35.1 # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -745,24 +946,32 @@ referencing==0.35.1 # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications -requests==2.32.3 +requests==2.32.4 # via opentelemetry-exporter-otlp-proto-http -rich==13.9.4 +rich==14.1.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # rich-toolkit # typer -rich-toolkit==0.13.2 - # via fastapi-cli +rich-toolkit==0.15.0 + # via + # fastapi-cli + # fastapi-cloud-cli +rignore==0.6.4 + # via fastapi-cloud-cli rpds-py==0.22.3 # via # jsonschema # referencing s3transfer==0.11.3 # via boto3 +sentry-sdk==2.35.0 + # via fastapi-cloud-cli sh==2.2.1 # via -r requirements/../../../packages/aws-library/requirements/_base.in shellingham==1.5.4 @@ -787,6 +996,18 @@ sqlalchemy==1.4.54 # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -803,7 +1024,7 @@ sqlalchemy==1.4.54 # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/postgres-database/requirements/_base.in # alembic -starlette==0.45.3 +starlette==0.47.2 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -817,6 +1038,18 @@ starlette==0.45.3 # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -835,28 +1068,35 @@ starlette==0.45.3 stream-zip==0.0.83 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in tenacity==9.0.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/_base.in toolz==1.0.0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in tqdm==4.67.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -typer==0.15.1 +typer==0.16.1 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/_base.in # fastapi-cli + # fastapi-cloud-cli types-aiobotocore==2.19.0 # via # -r requirements/../../../packages/aws-library/requirements/_base.in @@ -871,7 +1111,7 @@ types-awscrt==0.23.10 # via botocore-stubs types-python-dateutil==2.9.0.20241206 # via arrow -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # aiodebug # alembic @@ -879,21 +1119,27 @@ typing-extensions==4.12.2 # fastapi # fastapi-pagination # faststream + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http # opentelemetry-sdk + # opentelemetry-semantic-conventions # pydantic # pydantic-core # pydantic-extra-types # rich-toolkit + # starlette # typer # types-aiobotocore # types-aiobotocore-ec2 # types-aiobotocore-s3 # types-aiobotocore-ssm -tzdata==2025.1 - # via - # celery - # kombu -urllib3==2.3.0 + # typing-inspection +typing-inspection==0.4.1 + # via pydantic +tzdata==2025.2 + # via kombu +urllib3==2.5.0 # via # -c requirements/../../../packages/aws-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -907,6 +1153,18 @@ urllib3==2.3.0 # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/aws-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/celery-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -923,10 +1181,12 @@ urllib3==2.3.0 # -c requirements/../../../requirements/constraints.txt # botocore # requests + # sentry-sdk uvicorn==0.34.2 # via # fastapi # fastapi-cli + # fastapi-cloud-cli uvloop==0.21.0 # via uvicorn vine==5.1.0 @@ -945,7 +1205,6 @@ websockets==14.2 wrapt==1.17.2 # via # aiobotocore - # deprecated # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-httpx @@ -953,6 +1212,7 @@ wrapt==1.17.2 yarl==1.18.3 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/celery-library/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/postgres-database/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # aio-pika diff --git a/services/storage/requirements/_test.txt b/services/storage/requirements/_test.txt index 1e1830575bf6..f49db41e2801 100644 --- a/services/storage/requirements/_test.txt +++ b/services/storage/requirements/_test.txt @@ -2,7 +2,7 @@ aiohappyeyeballs==2.6.1 # via # -c requirements/_base.txt # aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -59,7 +59,7 @@ botocore==1.37.3 # boto3 # moto # s3transfer -celery==5.4.0 +celery==5.5.2 # via # -c requirements/_base.txt # pytest-celery @@ -79,7 +79,7 @@ charset-normalizer==3.4.1 # via # -c requirements/_base.txt # requests -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # celery @@ -126,7 +126,7 @@ flask==3.1.0 # via # flask-cors # moto -flask-cors==5.0.1 +flask-cors==6.0.1 # via moto frozenlist==1.5.0 # via @@ -139,11 +139,11 @@ greenlet==3.1.1 # via # -c requirements/_base.txt # sqlalchemy -h11==0.14.0 +h11==0.16.0 # via # -c requirements/_base.txt # httpcore -httpcore==1.0.7 +httpcore==1.0.9 # via # -c requirements/_base.txt # httpx @@ -165,7 +165,7 @@ iniconfig==2.0.0 # via pytest itsdangerous==2.2.0 # via flask -jinja2==3.1.5 +jinja2==3.1.6 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt @@ -185,7 +185,9 @@ jsonpath-ng==1.7.0 jsonpointer==3.0.0 # via jsonpatch jsonref==1.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in jsonschema==4.23.0 # via # -c requirements/_base.txt @@ -199,10 +201,11 @@ jsonschema-specifications==2024.10.1 # -c requirements/_base.txt # jsonschema # openapi-schema-validator -kombu==5.4.2 +kombu==5.5.3 # via # -c requirements/_base.txt # celery + # pytest-celery lazy-object-proxy==1.10.0 # via openapi-spec-validator lupa==2.4 @@ -221,9 +224,9 @@ multidict==6.1.0 # -c requirements/_base.txt # aiohttp # yarl -mypy==1.15.0 +mypy==1.16.1 # via sqlalchemy -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via mypy networkx==3.4.2 # via cfn-lint @@ -242,8 +245,12 @@ pandas==2.2.3 # via -r requirements/_test.in pathable==0.4.4 # via jsonschema-path +pathspec==0.12.1 + # via mypy pluggy==1.5.0 - # via pytest + # via + # pytest + # pytest-cov ply==3.11 # via jsonpath-ng pprintpp==0.4.0 @@ -257,7 +264,7 @@ propcache==0.2.1 # -c requirements/_base.txt # aiohttp # yarl -psutil==6.1.1 +psutil==7.0.0 # via # -c requirements/_base.txt # pytest-celery @@ -265,18 +272,22 @@ py-partiql-parser==0.6.1 # via moto pycparser==2.22 # via cffi -pydantic==2.10.6 +pydantic==2.11.7 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt # aws-sam-translator -pydantic-core==2.27.2 +pydantic-core==2.33.2 # via # -c requirements/_base.txt # pydantic +pygments==2.19.1 + # via + # -c requirements/_base.txt + # pytest pyparsing==3.2.1 # via moto -pytest==8.3.5 +pytest==8.4.1 # via # -r requirements/_test.in # pytest-asyncio @@ -286,19 +297,19 @@ pytest==8.3.5 # pytest-instafail # pytest-mock # pytest-sugar -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via -r requirements/_test.in -pytest-celery==1.1.3 +pytest-celery==1.2.0 # via -r requirements/_test.in -pytest-cov==6.0.0 +pytest-cov==6.2.1 # via -r requirements/_test.in -pytest-docker-tools==3.1.3 +pytest-docker-tools==3.1.9 # via pytest-celery pytest-icdiff==0.9 # via -r requirements/_test.in pytest-instafail==0.5.0 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in @@ -340,7 +351,7 @@ referencing==0.35.1 # jsonschema-specifications regex==2024.11.6 # via cfn-lint -requests==2.32.3 +requests==2.32.4 # via # -c requirements/_base.txt # docker @@ -362,7 +373,7 @@ s3transfer==0.11.3 # via # -c requirements/_base.txt # boto3 -setuptools==75.8.2 +setuptools==80.9.0 # via # moto # pytest-celery @@ -397,7 +408,7 @@ termcolor==2.5.0 # via pytest-sugar types-aiofiles==24.1.0.20241221 # via -r requirements/_test.in -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # anyio @@ -408,14 +419,18 @@ typing-extensions==4.12.2 # pydantic # pydantic-core # sqlalchemy2-stubs -tzdata==2025.1 + # typing-inspection +typing-inspection==0.4.1 + # via + # -c requirements/_base.txt + # pydantic +tzdata==2025.2 # via # -c requirements/_base.txt - # celery # faker # kombu # pandas -urllib3==2.3.0 +urllib3==2.5.0 # via # -c requirements/../../../requirements/constraints.txt # -c requirements/_base.txt diff --git a/services/storage/requirements/_tools.txt b/services/storage/requirements/_tools.txt index d57a2d475d50..85cf1f19e648 100644 --- a/services/storage/requirements/_tools.txt +++ b/services/storage/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # -c requirements/_test.txt @@ -28,11 +28,11 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via # -c requirements/_test.txt # -r requirements/../../../requirements/devenv.txt -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via # -c requirements/_test.txt # black @@ -46,7 +46,10 @@ packaging==24.2 # black # build pathspec==0.12.1 - # via black + # via + # -c requirements/_test.txt + # black + # mypy pip==25.0.1 # via pip-tools pip-tools==7.4.1 @@ -73,13 +76,13 @@ pyyaml==6.0.2 # watchdog ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.2 +setuptools==80.9.0 # via # -c requirements/_test.txt # pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # -c requirements/_test.txt diff --git a/services/storage/requirements/ci.txt b/services/storage/requirements/ci.txt index 31b66afbe90d..bb397a78f5e3 100644 --- a/services/storage/requirements/ci.txt +++ b/services/storage/requirements/ci.txt @@ -13,6 +13,7 @@ # installs this repo's packages simcore-aws-library @ ../../packages/aws-library/ +simcore-celery-library @ ../../packages/celery-library/ simcore-common-library @ ../../packages/common-library/ simcore-models-library @ ../../packages/models-library/ simcore-postgres-database @ ../../packages/postgres-database/ diff --git a/services/storage/requirements/dev.txt b/services/storage/requirements/dev.txt index 253cec8dbcb7..b428181c8c57 100644 --- a/services/storage/requirements/dev.txt +++ b/services/storage/requirements/dev.txt @@ -13,6 +13,7 @@ # installs this repo's packages --editable ../../packages/aws-library/ +--editable ../../packages/celery-library/ --editable ../../packages/common-library --editable ../../packages/models-library --editable ../../packages/postgres-database/ diff --git a/services/storage/requirements/prod.txt b/services/storage/requirements/prod.txt index dc4a2da4805b..40e94e3f4360 100644 --- a/services/storage/requirements/prod.txt +++ b/services/storage/requirements/prod.txt @@ -11,6 +11,7 @@ # installs this repo's packages simcore-aws-library @ ../../packages/aws-library/ +simcore-celery-library @ ../../packages/celery-library/ simcore-common-library @ ../../packages/common-library/ simcore-models-library @ ../../packages/models-library/ simcore-postgres-database @ ../../packages/postgres-database/ diff --git a/services/storage/setup.cfg b/services/storage/setup.cfg index 6c4b25275d5c..34668cb65b91 100644 --- a/services/storage/setup.cfg +++ b/services/storage/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.7.0 +current_version = 0.7.1 commit = True message = services/storage api version: {current_version} → {new_version} tag = False @@ -10,13 +10,13 @@ commit_args = --no-verify [tool:pytest] asyncio_mode = auto asyncio_default_fixture_loop_scope = function -markers = +markers = slow: marks tests as slow (deselect with '-m "not slow"') acceptance_test: "marks tests as 'acceptance tests' i.e. does the system do what the user expects? Typically those are workflows." testit: "marks test to run during development" heavy_load: "mark tests that require large amount of data" [mypy] -plugins = +plugins = pydantic.mypy sqlalchemy.ext.mypy.plugin diff --git a/services/storage/src/simcore_service_storage/_meta.py b/services/storage/src/simcore_service_storage/_meta.py index 7f5013dd0c31..6aa44c96808d 100644 --- a/services/storage/src/simcore_service_storage/_meta.py +++ b/services/storage/src/simcore_service_storage/_meta.py @@ -13,7 +13,7 @@ API_VERSION: Final[VersionStr] = info.__version__ API_VTAG: Final[str] = info.api_prefix_path_tag SUMMARY: Final[str] = info.get_summary() -APP_NAME: Final[str] = __name__.split(".")[0] +APP_NAME: Final[str] = info.app_name ## https://patorjk.com/software/taag/#p=display&f=Standard&t=Storage APP_STARTED_BANNER_MSG = r""" diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py index 871e8a7bcbcc..56a0343fdaed 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/_files.py @@ -1,24 +1,24 @@ import logging from celery import Task # type: ignore[import-untyped] +from celery_library.utils import get_app_server from models_library.api_schemas_storage.storage_schemas import ( FileUploadCompletionBody, ) from models_library.projects_nodes_io import LocationID, StorageFileID from models_library.users import UserID +from servicelib.celery.models import TaskID from servicelib.logging_utils import log_context from ...dsm import get_dsm_provider from ...models import FileMetaData -from ...modules.celery.models import TaskId -from ...modules.celery.utils import get_fastapi_app _logger = logging.getLogger(__name__) async def complete_upload_file( task: Task, - task_id: TaskId, + task_id: TaskID, user_id: UserID, location_id: LocationID, file_id: StorageFileID, @@ -30,7 +30,7 @@ async def complete_upload_file( logging.INFO, msg=f"completing upload of file {user_id=}, {location_id=}, {file_id=}", ): - dsm = get_dsm_provider(get_fastapi_app(task.app)).get(location_id) + dsm = get_dsm_provider(get_app_server(task.app).app).get(location_id) # NOTE: completing a multipart upload on AWS can take up to several minutes # if it returns slow we return a 202 - Accepted, the client will have to check later # for completeness diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py index 2f3d05da5471..142c0f3968b2 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/_paths.py @@ -2,22 +2,22 @@ from pathlib import Path from celery import Task # type: ignore[import-untyped] +from celery_library.utils import get_app_server from models_library.projects_nodes_io import LocationID, StorageFileID from models_library.users import UserID from pydantic import ByteSize, TypeAdapter +from servicelib.celery.models import TaskID from servicelib.logging_utils import log_context from servicelib.utils import limited_gather from ...constants import MAX_CONCURRENT_S3_TASKS from ...dsm import get_dsm_provider -from ...modules.celery.models import TaskId -from ...modules.celery.utils import get_fastapi_app _logger = logging.getLogger(__name__) async def compute_path_size( - task: Task, task_id: TaskId, user_id: UserID, location_id: LocationID, path: Path + task: Task, task_id: TaskID, user_id: UserID, location_id: LocationID, path: Path ) -> ByteSize: assert task_id # nosec with log_context( @@ -25,13 +25,13 @@ async def compute_path_size( logging.INFO, msg=f"computing path size {user_id=}, {location_id=}, {path=}", ): - dsm = get_dsm_provider(get_fastapi_app(task.app)).get(location_id) + dsm = get_dsm_provider(get_app_server(task.app).app).get(location_id) return await dsm.compute_path_size(user_id, path=Path(path)) async def delete_paths( task: Task, - task_id: TaskId, + task_id: TaskID, user_id: UserID, location_id: LocationID, paths: set[Path], @@ -42,7 +42,7 @@ async def delete_paths( logging.INFO, msg=f"delete {paths=} in {location_id=} for {user_id=}", ): - dsm = get_dsm_provider(get_fastapi_app(task.app)).get(location_id) + dsm = get_dsm_provider(get_app_server(task.app).app).get(location_id) files_ids: set[StorageFileID] = { TypeAdapter(StorageFileID).validate_python(f"{path}") for path in paths } diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py index 8a9f0a941ccb..dffd423fe253 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/_simcore_s3.py @@ -4,27 +4,31 @@ from aws_library.s3._models import S3ObjectKey from celery import Task # type: ignore[import-untyped] -from models_library.api_schemas_storage.storage_schemas import FoldersBody +from celery_library.utils import get_app_server +from models_library.api_schemas_storage.storage_schemas import ( + FoldersBody, + LinkType, + PresignedLink, +) from models_library.api_schemas_webserver.storage import PathToExport from models_library.progress_bar import ProgressReport from models_library.projects_nodes_io import StorageFileID from models_library.users import UserID from pydantic import TypeAdapter +from servicelib.celery.models import TaskID from servicelib.logging_utils import log_context from servicelib.progress_bar import ProgressBarData from ...dsm import get_dsm_provider -from ...modules.celery.models import TaskID, TaskId -from ...modules.celery.utils import get_celery_worker, get_fastapi_app from ...simcore_s3_dsm import SimcoreS3DataManager _logger = logging.getLogger(__name__) async def _task_progress_cb( - task: Task, task_id: TaskId, report: ProgressReport + task: Task, task_id: TaskID, report: ProgressReport ) -> None: - worker = get_celery_worker(task.app) + worker = get_app_server(task.app).task_manager assert task.name # nosec await worker.set_task_progress( task_id=task_id, @@ -33,14 +37,14 @@ async def _task_progress_cb( async def deep_copy_files_from_project( - task: Task, task_id: TaskId, user_id: UserID, body: FoldersBody + task: Task, task_id: TaskID, user_id: UserID, body: FoldersBody ) -> dict[str, Any]: with log_context( _logger, logging.INFO, msg=f"copying {body.source['uuid']} -> {body.destination['uuid']} with {task.request.id}", ): - dsm = get_dsm_provider(get_fastapi_app(task.app)).get( + dsm = get_dsm_provider(get_app_server(task.app).app).get( SimcoreS3DataManager.get_location_id() ) assert isinstance(dsm, SimcoreS3DataManager) # nosec @@ -75,7 +79,7 @@ async def export_data( logging.INFO, f"'{task_id}' export data (for {user_id=}) fom selection: {paths_to_export}", ): - dsm = get_dsm_provider(get_fastapi_app(task.app)).get( + dsm = get_dsm_provider(get_app_server(task.app).app).get( SimcoreS3DataManager.get_location_id() ) assert isinstance(dsm, SimcoreS3DataManager) # nosec @@ -87,7 +91,9 @@ async def export_data( async def _progress_cb(report: ProgressReport) -> None: assert task.name # nosec - await get_celery_worker(task.app).set_task_progress(task_id, report) + await get_app_server(task.app).task_manager.set_task_progress( + task_id, report + ) _logger.debug("'%s' progress %s", task_id, report.percent_value) async with ProgressBarData( @@ -98,3 +104,27 @@ async def _progress_cb(report: ProgressReport) -> None: return await dsm.create_s3_export( user_id, object_keys, progress_bar=progress_bar ) + + +async def export_data_as_download_link( + task: Task, + task_id: TaskID, + *, + user_id: UserID, + paths_to_export: list[PathToExport], +) -> PresignedLink: + """ + AccessRightError: in case user can't access project + """ + s3_object = await export_data( + task=task, task_id=task_id, user_id=user_id, paths_to_export=paths_to_export + ) + + dsm = get_dsm_provider(get_app_server(task.app).app).get( + SimcoreS3DataManager.get_location_id() + ) + + download_link = await dsm.create_file_download_link( + user_id=user_id, file_id=s3_object, link_type=LinkType.PRESIGNED + ) + return PresignedLink(link=download_link) diff --git a/services/storage/src/simcore_service_storage/api/_worker_tasks/tasks.py b/services/storage/src/simcore_service_storage/api/_worker_tasks/tasks.py index 6c74cd9792df..66de98ec2d5f 100644 --- a/services/storage/src/simcore_service_storage/api/_worker_tasks/tasks.py +++ b/services/storage/src/simcore_service_storage/api/_worker_tasks/tasks.py @@ -1,22 +1,44 @@ import logging from celery import Celery # type: ignore[import-untyped] +from celery_library.task import register_task +from celery_library.types import register_celery_types, register_pydantic_types from models_library.api_schemas_storage.export_data_async_jobs import AccessRightError +from models_library.api_schemas_storage.storage_schemas import ( + FileUploadCompletionBody, + FoldersBody, + PresignedLink, +) +from servicelib.celery.models import OwnerMetadata from servicelib.logging_utils import log_context -from ...modules.celery._celery_types import register_celery_types -from ...modules.celery._task import register_task +from ...models import FileMetaData from ._files import complete_upload_file from ._paths import compute_path_size, delete_paths -from ._simcore_s3 import deep_copy_files_from_project, export_data +from ._simcore_s3 import ( + deep_copy_files_from_project, + export_data, + export_data_as_download_link, +) _logger = logging.getLogger(__name__) def setup_worker_tasks(app: Celery) -> None: register_celery_types() + register_pydantic_types( + FileUploadCompletionBody, + FileMetaData, + FoldersBody, + PresignedLink, + OwnerMetadata, + ) + with log_context(_logger, logging.INFO, msg="worker task registration"): register_task(app, export_data, dont_autoretry_for=(AccessRightError,)) + register_task( + app, export_data_as_download_link, dont_autoretry_for=(AccessRightError,) + ) register_task(app, compute_path_size) register_task(app, complete_upload_file) register_task(app, delete_paths) diff --git a/services/storage/src/simcore_service_storage/api/rest/_files.py b/services/storage/src/simcore_service_storage/api/rest/_files.py index f47818415700..8bb3e8ff7562 100644 --- a/services/storage/src/simcore_service_storage/api/rest/_files.py +++ b/services/storage/src/simcore_service_storage/api/rest/_files.py @@ -3,7 +3,6 @@ from urllib.parse import quote from fastapi import APIRouter, Depends, Header, Request -from models_library.api_schemas_rpc_async_jobs.async_jobs import AsyncJobNameData from models_library.api_schemas_storage.storage_schemas import ( FileMetaDataGet, FileMetaDataGetv010, @@ -18,10 +17,15 @@ ) from models_library.generics import Envelope from models_library.projects_nodes_io import LocationID, StorageFileID +from models_library.users import UserID from pydantic import AnyUrl, ByteSize, TypeAdapter from servicelib.aiohttp import status +from servicelib.celery.models import ExecutionMetadata, OwnerMetadata, TaskUUID +from servicelib.celery.task_manager import TaskManager +from servicelib.logging_utils import log_context from yarl import URL +from ..._meta import APP_NAME from ...dsm import get_dsm_provider from ...exceptions.errors import FileMetaDataNotFoundError from ...models import ( @@ -34,11 +38,18 @@ StorageQueryParamsBase, UploadLinks, ) -from ...modules.celery.client import CeleryTaskClient -from ...modules.celery.models import TaskMetadata, TaskUUID from ...simcore_s3_dsm import SimcoreS3DataManager from .._worker_tasks._files import complete_upload_file as remote_complete_upload_file -from .dependencies.celery import get_celery_client +from .dependencies.celery import get_task_manager + + +def _get_owner_metadata(*, user_id: UserID) -> OwnerMetadata: + _data = { + "owner": APP_NAME, + "user_id": user_id, + } + return OwnerMetadata.model_validate(_data) + _logger = logging.getLogger(__name__) @@ -185,14 +196,17 @@ async def upload_file( """ # NOTE: Used by legacy dynamic services with single presigned link -> MUST BE BACKWARDS COMPATIBLE dsm = get_dsm_provider(request.app).get(location_id) - links: UploadLinks = await dsm.create_file_upload_links( - user_id=query_params.user_id, - file_id=file_id, - link_type=query_params.link_type, - file_size_bytes=query_params.file_size or ByteSize(0), - is_directory=query_params.is_directory, - sha256_checksum=query_params.sha256_checksum, - ) + with log_context( + logger=_logger, level=logging.DEBUG, msg=f"Creating upload links for {file_id=}" + ): + links: UploadLinks = await dsm.create_file_upload_links( + user_id=query_params.user_id, + file_id=file_id, + link_type=query_params.link_type, + file_size_bytes=query_params.file_size or ByteSize(0), + is_directory=query_params.is_directory, + sha256_checksum=query_params.sha256_checksum, + ) if query_params.is_v1_upload: # return v1 response assert len(links.urls) == 1 # nosec @@ -270,7 +284,7 @@ async def abort_upload_file( status_code=status.HTTP_202_ACCEPTED, ) async def complete_upload_file( - celery_client: Annotated[CeleryTaskClient, Depends(get_celery_client)], + task_manager: Annotated[TaskManager, Depends(get_task_manager)], query_params: Annotated[StorageQueryParamsBase, Depends()], location_id: LocationID, file_id: StorageFileID, @@ -280,16 +294,14 @@ async def complete_upload_file( # NOTE: completing a multipart upload on AWS can take up to several minutes # if it returns slow we return a 202 - Accepted, the client will have to check later # for completeness - async_job_name_data = AsyncJobNameData( - user_id=query_params.user_id, - product_name=_UNDEFINED_PRODUCT_NAME_FOR_WORKER_TASKS, # NOTE: I would need to change the API here - ) - task_uuid = await celery_client.submit_task( - TaskMetadata( + + owner_metadata = _get_owner_metadata(user_id=query_params.user_id) + task_uuid = await task_manager.submit_task( + ExecutionMetadata( name=remote_complete_upload_file.__name__, ), - task_context=async_job_name_data.model_dump(), - user_id=async_job_name_data.user_id, + owner_metadata=owner_metadata, + user_id=query_params.user_id, location_id=location_id, file_id=file_id, body=body, @@ -326,7 +338,7 @@ async def complete_upload_file( response_model=Envelope[FileUploadCompleteFutureResponse], ) async def is_completed_upload_file( - celery_client: Annotated[CeleryTaskClient, Depends(get_celery_client)], + task_manager: Annotated[TaskManager, Depends(get_task_manager)], query_params: Annotated[StorageQueryParamsBase, Depends()], location_id: LocationID, file_id: StorageFileID, @@ -336,24 +348,24 @@ async def is_completed_upload_file( # therefore we wait a bit to see if it completes fast and return a 204 # if it returns slow we return a 202 - Accepted, the client will have to check later # for completeness - async_job_name_data = AsyncJobNameData( - user_id=query_params.user_id, - product_name=_UNDEFINED_PRODUCT_NAME_FOR_WORKER_TASKS, # NOTE: I would need to change the API here - ) - task_status = await celery_client.get_task_status( - task_context=async_job_name_data.model_dump(), task_uuid=TaskUUID(future_id) + owner_metadata = _get_owner_metadata(user_id=query_params.user_id) + task_status = await task_manager.get_task_status( + owner_metadata=owner_metadata, task_uuid=TaskUUID(future_id) ) # first check if the task is in the app if task_status.is_done: - task_result = await celery_client.get_task_result( - task_context=async_job_name_data.model_dump(), task_uuid=TaskUUID(future_id) + task_result = TypeAdapter(FileMetaData).validate_python( + await task_manager.get_task_result( + owner_metadata=owner_metadata, + task_uuid=TaskUUID(future_id), + ) ) - assert isinstance(task_result, FileMetaData), f"{task_result=}" # nosec new_fmd = task_result assert new_fmd.location_id == location_id # nosec assert new_fmd.file_id == file_id # nosec response = FileUploadCompleteFutureResponse( - state=FileUploadCompleteState.OK, e_tag=new_fmd.entity_tag + state=FileUploadCompleteState.OK, + e_tag=new_fmd.entity_tag, ) else: # the task is still running diff --git a/services/storage/src/simcore_service_storage/api/rest/dependencies/celery.py b/services/storage/src/simcore_service_storage/api/rest/dependencies/celery.py index 58413bba852f..1c6cce503aa7 100644 --- a/services/storage/src/simcore_service_storage/api/rest/dependencies/celery.py +++ b/services/storage/src/simcore_service_storage/api/rest/dependencies/celery.py @@ -1,13 +1,13 @@ from typing import Annotated +from celery_library.task_manager import CeleryTaskManager from fastapi import Depends, FastAPI from servicelib.fastapi.dependencies import get_app -from ....modules.celery import get_celery_client as _get_celery_client_from_app -from ....modules.celery.client import CeleryTaskClient +from ....modules.celery import get_task_manager_from_app -def get_celery_client( +def get_task_manager( app: Annotated[FastAPI, Depends(get_app)], -) -> CeleryTaskClient: - return _get_celery_client_from_app(app) +) -> CeleryTaskManager: + return get_task_manager_from_app(app) diff --git a/services/storage/src/simcore_service_storage/api/rpc/_paths.py b/services/storage/src/simcore_service_storage/api/rpc/_paths.py index db0e69af38d7..31aaa1fdc67e 100644 --- a/services/storage/src/simcore_service_storage/api/rpc/_paths.py +++ b/services/storage/src/simcore_service_storage/api/rpc/_paths.py @@ -1,16 +1,15 @@ import logging from pathlib import Path -from fastapi import FastAPI from models_library.api_schemas_rpc_async_jobs.async_jobs import ( AsyncJobGet, - AsyncJobNameData, ) from models_library.projects_nodes_io import LocationID +from models_library.users import UserID +from servicelib.celery.models import ExecutionMetadata, OwnerMetadata +from servicelib.celery.task_manager import TaskManager from servicelib.rabbitmq import RPCRouter -from ...modules.celery import get_celery_client -from ...modules.celery.models import TaskMetadata from .._worker_tasks._paths import compute_path_size as remote_compute_path_size from .._worker_tasks._paths import delete_paths as remote_delete_paths @@ -20,18 +19,19 @@ @router.expose(reraise_if_error_type=None) async def compute_path_size( - app: FastAPI, - job_id_data: AsyncJobNameData, + task_manager: TaskManager, + owner_metadata: OwnerMetadata, location_id: LocationID, path: Path, + user_id: UserID, ) -> AsyncJobGet: task_name = remote_compute_path_size.__name__ - task_uuid = await get_celery_client(app).submit_task( - task_metadata=TaskMetadata( + task_uuid = await task_manager.submit_task( + execution_metadata=ExecutionMetadata( name=task_name, ), - task_context=job_id_data.model_dump(), - user_id=job_id_data.user_id, + owner_metadata=owner_metadata, + user_id=user_id, location_id=location_id, path=path, ) @@ -41,18 +41,19 @@ async def compute_path_size( @router.expose(reraise_if_error_type=None) async def delete_paths( - app: FastAPI, - job_id_data: AsyncJobNameData, + task_manager: TaskManager, + owner_metadata: OwnerMetadata, location_id: LocationID, paths: set[Path], + user_id: UserID, ) -> AsyncJobGet: task_name = remote_delete_paths.__name__ - task_uuid = await get_celery_client(app).submit_task( - task_metadata=TaskMetadata( + task_uuid = await task_manager.submit_task( + execution_metadata=ExecutionMetadata( name=task_name, ), - task_context=job_id_data.model_dump(), - user_id=job_id_data.user_id, + owner_metadata=owner_metadata, + user_id=user_id, location_id=location_id, paths=paths, ) diff --git a/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py b/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py index ba3830c03298..e4b2e4cee76c 100644 --- a/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py +++ b/services/storage/src/simcore_service_storage/api/rpc/_simcore_s3.py @@ -1,32 +1,42 @@ -from fastapi import FastAPI +from typing import Literal + from models_library.api_schemas_rpc_async_jobs.async_jobs import ( AsyncJobGet, - AsyncJobNameData, ) from models_library.api_schemas_storage.storage_schemas import FoldersBody from models_library.api_schemas_webserver.storage import PathToExport +from models_library.users import UserID +from servicelib.celery.models import ( + ExecutionMetadata, + OwnerMetadata, + TasksQueue, +) +from servicelib.celery.task_manager import TaskManager from servicelib.rabbitmq import RPCRouter -from ...modules.celery import get_celery_client -from ...modules.celery.models import TaskMetadata, TasksQueue -from .._worker_tasks._simcore_s3 import deep_copy_files_from_project, export_data +from .._worker_tasks._simcore_s3 import ( + deep_copy_files_from_project, + export_data, + export_data_as_download_link, +) router = RPCRouter() @router.expose(reraise_if_error_type=None) async def copy_folders_from_project( - app: FastAPI, - job_id_data: AsyncJobNameData, + task_manager: TaskManager, + owner_metadata: OwnerMetadata, body: FoldersBody, + user_id: UserID, ) -> AsyncJobGet: task_name = deep_copy_files_from_project.__name__ - task_uuid = await get_celery_client(app).submit_task( - task_metadata=TaskMetadata( + task_uuid = await task_manager.submit_task( + execution_metadata=ExecutionMetadata( name=task_name, ), - task_context=job_id_data.model_dump(), - user_id=job_id_data.user_id, + owner_metadata=owner_metadata, + user_id=user_id, body=body, ) @@ -35,17 +45,26 @@ async def copy_folders_from_project( @router.expose() async def start_export_data( - app: FastAPI, job_id_data: AsyncJobNameData, paths_to_export: list[PathToExport] + task_manager: TaskManager, + owner_metadata: OwnerMetadata, + paths_to_export: list[PathToExport], + export_as: Literal["path", "download_link"], + user_id: UserID, ) -> AsyncJobGet: - task_name = export_data.__name__ - task_uuid = await get_celery_client(app).submit_task( - task_metadata=TaskMetadata( + if export_as == "path": + task_name = export_data.__name__ + elif export_as == "download_link": + task_name = export_data_as_download_link.__name__ + else: + raise ValueError(f"Invalid export_as value: {export_as}") + task_uuid = await task_manager.submit_task( + execution_metadata=ExecutionMetadata( name=task_name, ephemeral=False, queue=TasksQueue.CPU_BOUND, ), - task_context=job_id_data.model_dump(), - user_id=job_id_data.user_id, + owner_metadata=owner_metadata, + user_id=user_id, paths_to_export=paths_to_export, ) return AsyncJobGet(job_id=task_uuid, job_name=task_name) diff --git a/services/storage/src/simcore_service_storage/api/rpc/routes.py b/services/storage/src/simcore_service_storage/api/rpc/routes.py index db6469ed380e..ebf1ba604112 100644 --- a/services/storage/src/simcore_service_storage/api/rpc/routes.py +++ b/services/storage/src/simcore_service_storage/api/rpc/routes.py @@ -1,12 +1,14 @@ import logging +from celery_library.rpc import _async_jobs from fastapi import FastAPI from models_library.api_schemas_storage import STORAGE_RPC_NAMESPACE from servicelib.logging_utils import log_context from servicelib.rabbitmq import RPCRouter +from simcore_service_storage.modules.celery import get_task_manager_from_app from ...modules.rabbitmq import get_rabbitmq_rpc_server -from . import _async_jobs, _paths, _simcore_s3 +from . import _paths, _simcore_s3 _logger = logging.getLogger(__name__) @@ -18,7 +20,7 @@ ] -def setup_rpc_api_routes(app: FastAPI) -> None: +def setup_rpc_routes(app: FastAPI) -> None: async def startup() -> None: with log_context( _logger, @@ -26,7 +28,10 @@ async def startup() -> None: msg="Storage startup RPC API Routes", ): rpc_server = get_rabbitmq_rpc_server(app) + task_manager = get_task_manager_from_app(app) for router in ROUTERS: - await rpc_server.register_router(router, STORAGE_RPC_NAMESPACE, app) + await rpc_server.register_router( + router, STORAGE_RPC_NAMESPACE, task_manager=task_manager + ) app.add_event_handler("startup", startup) diff --git a/services/storage/src/simcore_service_storage/cli.py b/services/storage/src/simcore_service_storage/cli.py index bcf4086f4aa4..f81e7cd8a38f 100644 --- a/services/storage/src/simcore_service_storage/cli.py +++ b/services/storage/src/simcore_service_storage/cli.py @@ -31,7 +31,7 @@ def run(): """Runs application""" typer.secho("Sorry, this entrypoint is intentionally disabled. Use instead") typer.secho( - f"$ uvicorn {PROJECT_NAME}.main:the_app", + f"$ uvicorn --factory {PROJECT_NAME}.main:app_factory", fg=typer.colors.BLUE, ) diff --git a/services/storage/src/simcore_service_storage/core/application.py b/services/storage/src/simcore_service_storage/core/application.py index d3d2e0c14ff4..cf3bb4546fcc 100644 --- a/services/storage/src/simcore_service_storage/core/application.py +++ b/services/storage/src/simcore_service_storage/core/application.py @@ -32,41 +32,21 @@ APP_WORKER_STARTED_BANNER_MSG, ) from ..api.rest.routes import setup_rest_api_routes -from ..api.rpc.routes import setup_rpc_api_routes +from ..api.rpc.routes import setup_rpc_routes from ..dsm import setup_dsm from ..dsm_cleaner import setup_dsm_cleaner from ..exceptions.handlers import set_exception_handlers -from ..modules.celery import setup_celery_client +from ..modules.celery import setup_task_manager from ..modules.db import setup_db -from ..modules.long_running_tasks import setup_rest_api_long_running_tasks_for_uploads from ..modules.rabbitmq import setup as setup_rabbitmq from ..modules.redis import setup as setup_redis from ..modules.s3 import setup_s3 from .settings import ApplicationSettings -_LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR -_NOISY_LOGGERS = ( - "aio_pika", - "aiobotocore", - "aiormq", - "botocore", - "httpcore", - "urllib3", - "werkzeug", -) _logger = logging.getLogger(__name__) def create_app(settings: ApplicationSettings) -> FastAPI: # noqa: C901 - # keep mostly quiet noisy loggers - quiet_level: int = max( - min(logging.root.level + _LOG_LEVEL_STEP, logging.CRITICAL), logging.WARNING - ) - for name in _NOISY_LOGGERS: - logging.getLogger(name).setLevel(quiet_level) - - _logger.info("app settings: %s", settings.model_dump_json(indent=1)) - app = FastAPI( debug=settings.SC_BOOT_MODE in [BootModeEnum.DEBUG, BootModeEnum.DEVELOPMENT, BootModeEnum.LOCAL], @@ -88,13 +68,15 @@ def create_app(settings: ApplicationSettings) -> FastAPI: # noqa: C901 setup_db(app) setup_s3(app) - setup_client_session(app) + setup_client_session(app, tracing_settings=settings.STORAGE_TRACING) + + if settings.STORAGE_CELERY: + setup_task_manager(app, settings=settings.STORAGE_CELERY) if not settings.STORAGE_WORKER_MODE: setup_rabbitmq(app) - setup_rpc_api_routes(app) - setup_celery_client(app) - setup_rest_api_long_running_tasks_for_uploads(app) + setup_rpc_routes(app) + setup_rest_api_routes(app, API_VTAG) set_exception_handlers(app) diff --git a/services/storage/src/simcore_service_storage/core/settings.py b/services/storage/src/simcore_service_storage/core/settings.py index 4d246a89eeb0..a15061179b5a 100644 --- a/services/storage/src/simcore_service_storage/core/settings.py +++ b/services/storage/src/simcore_service_storage/core/settings.py @@ -1,8 +1,8 @@ from typing import Annotated, Self +from common_library.logging.logging_utils_filtering import LoggerName, MessageSubstring from fastapi import FastAPI from pydantic import AliasChoices, Field, PositiveInt, field_validator, model_validator -from servicelib.logging_utils_filtering import LoggerName, MessageSubstring from settings_library.application import BaseApplicationSettings from settings_library.basic_types import LogLevel, PortInt from settings_library.celery import CelerySettings @@ -36,7 +36,7 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): ] STORAGE_REDIS: Annotated[ - RedisSettings | None, Field(json_schema_extra={"auto_default_from_env": True}) + RedisSettings, Field(json_schema_extra={"auto_default_from_env": True}) ] STORAGE_S3: Annotated[ @@ -75,10 +75,7 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): ] STORAGE_RABBITMQ: Annotated[ - RabbitSettings | None, - Field( - json_schema_extra={"auto_default_from_env": True}, - ), + RabbitSettings, Field(json_schema_extra={"auto_default_from_env": True}) ] STORAGE_S3_CLIENT_MAX_TRANSFER_CONCURRENCY: Annotated[ diff --git a/services/storage/src/simcore_service_storage/dsm_cleaner.py b/services/storage/src/simcore_service_storage/dsm_cleaner.py index d09c83e4f5d7..6194d61a8353 100644 --- a/services/storage/src/simcore_service_storage/dsm_cleaner.py +++ b/services/storage/src/simcore_service_storage/dsm_cleaner.py @@ -23,8 +23,8 @@ from datetime import timedelta from typing import cast +from common_library.async_tools import cancel_wait_task from fastapi import FastAPI -from servicelib.async_utils import cancel_wait_task from servicelib.background_task_utils import exclusive_periodic from servicelib.logging_utils import log_context diff --git a/services/storage/src/simcore_service_storage/main.py b/services/storage/src/simcore_service_storage/main.py index a37ead2cefc7..f2282decaa59 100644 --- a/services/storage/src/simcore_service_storage/main.py +++ b/services/storage/src/simcore_service_storage/main.py @@ -1,20 +1,41 @@ """Main application to be deployed in for example uvicorn.""" import logging +from typing import Final -from servicelib.logging_utils import config_all_loggers +from common_library.json_serialization import json_dumps +from fastapi import FastAPI +from servicelib.fastapi.logging_lifespan import create_logging_shutdown_event from simcore_service_storage.core.application import create_app from simcore_service_storage.core.settings import ApplicationSettings -_settings = ApplicationSettings.create_from_envs() +_logger = logging.getLogger(__name__) -# SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 -logging.basicConfig(level=_settings.log_level) # NOSONAR -logging.root.setLevel(_settings.log_level) -config_all_loggers( - log_format_local_dev_enabled=_settings.STORAGE_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=_settings.STORAGE_LOG_FILTER_MAPPING, - tracing_settings=_settings.STORAGE_TRACING, +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( + "aio_pika", + "aiobotocore", + "aiormq", + "botocore", + "httpcore", + "urllib3", + "werkzeug", ) -app = create_app(_settings) + +def app_factory() -> FastAPI: + app_settings = ApplicationSettings.create_from_envs() + logging_shutdown_event = create_logging_shutdown_event( + log_format_local_dev_enabled=app_settings.STORAGE_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.STORAGE_LOG_FILTER_MAPPING, + tracing_settings=app_settings.STORAGE_TRACING, + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + ) + + _logger.info( + "Application settings: %s", + json_dumps(app_settings, indent=2, sort_keys=True), + ) + app = create_app(settings=app_settings) + app.add_event_handler("shutdown", logging_shutdown_event) + return app diff --git a/services/storage/src/simcore_service_storage/modules/celery/__init__.py b/services/storage/src/simcore_service_storage/modules/celery/__init__.py index cbf60fda44e8..0dcb3a2ea5e6 100644 --- a/services/storage/src/simcore_service_storage/modules/celery/__init__.py +++ b/services/storage/src/simcore_service_storage/modules/celery/__init__.py @@ -1,56 +1,59 @@ import logging -from asyncio import AbstractEventLoop +from celery_library.backends.redis import RedisTaskInfoStore +from celery_library.common import create_app +from celery_library.task_manager import CeleryTaskManager +from celery_library.types import register_celery_types, register_pydantic_types from fastapi import FastAPI -from servicelib.redis._client import RedisClientSDK +from models_library.api_schemas_storage.storage_schemas import ( + FileUploadCompletionBody, + FoldersBody, +) +from servicelib.logging_utils import log_context +from servicelib.redis import RedisClientSDK +from settings_library.celery import CelerySettings from settings_library.redis import RedisDatabase -from ..._meta import APP_NAME -from ...core.settings import get_application_settings -from ._celery_types import register_celery_types -from ._common import create_app -from .backends._redis import RedisTaskInfoStore -from .client import CeleryTaskClient +from ...models import FileMetaData _logger = logging.getLogger(__name__) -def setup_celery_client(app: FastAPI) -> None: +def setup_task_manager(app: FastAPI, settings: CelerySettings) -> None: async def on_startup() -> None: - application_settings = get_application_settings(app) - celery_settings = application_settings.STORAGE_CELERY - assert celery_settings # nosec - celery_app = create_app(celery_settings) - redis_client_sdk = RedisClientSDK( - celery_settings.CELERY_REDIS_RESULT_BACKEND.build_redis_dsn( - RedisDatabase.CELERY_TASKS - ), - client_name=f"{APP_NAME}.celery_tasks", - ) - - app.state.celery_client = CeleryTaskClient( - celery_app, - celery_settings, - RedisTaskInfoStore(redis_client_sdk), - ) - - register_celery_types() + with log_context(_logger, logging.INFO, "Setting up Celery"): + redis_client_sdk = RedisClientSDK( + settings.CELERY_REDIS_RESULT_BACKEND.build_redis_dsn( + RedisDatabase.CELERY_TASKS + ), + client_name="storage_celery_tasks", + ) + app.state.celery_tasks_redis_client_sdk = redis_client_sdk + await redis_client_sdk.setup() + + app.state.task_manager = CeleryTaskManager( + create_app(settings), + settings, + RedisTaskInfoStore(redis_client_sdk), + ) + + register_celery_types() + register_pydantic_types(FileUploadCompletionBody, FileMetaData, FoldersBody) + + async def on_shutdown() -> None: + with log_context(_logger, logging.INFO, "Shutting down Celery"): + redis_client_sdk: RedisClientSDK | None = ( + app.state.celery_tasks_redis_client_sdk + ) + if redis_client_sdk: + await redis_client_sdk.shutdown() app.add_event_handler("startup", on_startup) + app.add_event_handler("shutdown", on_shutdown) -def get_celery_client(app: FastAPI) -> CeleryTaskClient: - assert hasattr(app.state, "celery_client") # nosec - celery_client = app.state.celery_client - assert isinstance(celery_client, CeleryTaskClient) - return celery_client - - -def get_event_loop(app: FastAPI) -> AbstractEventLoop: - event_loop = app.state.event_loop - assert isinstance(event_loop, AbstractEventLoop) - return event_loop - - -def set_event_loop(app: FastAPI, event_loop: AbstractEventLoop) -> None: - app.state.event_loop = event_loop +def get_task_manager_from_app(app: FastAPI) -> CeleryTaskManager: + assert hasattr(app.state, "task_manager") # nosec + task_manager = app.state.task_manager + assert isinstance(task_manager, CeleryTaskManager) # nosec + return task_manager diff --git a/services/storage/src/simcore_service_storage/modules/celery/client.py b/services/storage/src/simcore_service_storage/modules/celery/client.py deleted file mode 100644 index f68baf558fe2..000000000000 --- a/services/storage/src/simcore_service_storage/modules/celery/client.py +++ /dev/null @@ -1,157 +0,0 @@ -import logging -from dataclasses import dataclass -from typing import Any -from uuid import uuid4 - -from celery import Celery # type: ignore[import-untyped] -from celery.contrib.abortable import ( # type: ignore[import-untyped] - AbortableAsyncResult, -) -from common_library.async_tools import make_async -from models_library.progress_bar import ProgressReport -from servicelib.logging_utils import log_context -from settings_library.celery import CelerySettings - -from .models import ( - Task, - TaskContext, - TaskID, - TaskInfoStore, - TaskMetadata, - TaskState, - TaskStatus, - TaskUUID, - build_task_id, -) - -_logger = logging.getLogger(__name__) - - -_MIN_PROGRESS_VALUE = 0.0 -_MAX_PROGRESS_VALUE = 1.0 - - -@dataclass -class CeleryTaskClient: - _celery_app: Celery - _celery_settings: CelerySettings - _task_info_store: TaskInfoStore - - async def submit_task( - self, - task_metadata: TaskMetadata, - *, - task_context: TaskContext, - **task_params, - ) -> TaskUUID: - with log_context( - _logger, - logging.DEBUG, - msg=f"Submit {task_metadata.name=}: {task_context=} {task_params=}", - ): - task_uuid = uuid4() - task_id = build_task_id(task_context, task_uuid) - self._celery_app.send_task( - task_metadata.name, - task_id=task_id, - kwargs=task_params, - queue=task_metadata.queue.value, - ) - - expiry = ( - self._celery_settings.CELERY_EPHEMERAL_RESULT_EXPIRES - if task_metadata.ephemeral - else self._celery_settings.CELERY_RESULT_EXPIRES - ) - await self._task_info_store.create_task( - task_id, task_metadata, expiry=expiry - ) - return task_uuid - - @make_async() - def _abort_task(self, task_id: TaskID) -> None: - AbortableAsyncResult(task_id, app=self._celery_app).abort() - - async def cancel_task(self, task_context: TaskContext, task_uuid: TaskUUID) -> None: - with log_context( - _logger, - logging.DEBUG, - msg=f"task cancellation: {task_context=} {task_uuid=}", - ): - task_id = build_task_id(task_context, task_uuid) - if not (await self.get_task_status(task_context, task_uuid)).is_done: - await self._abort_task(task_id) - await self._task_info_store.remove_task(task_id) - - @make_async() - def _forget_task(self, task_id: TaskID) -> None: - AbortableAsyncResult(task_id, app=self._celery_app).forget() - - async def get_task_result( - self, task_context: TaskContext, task_uuid: TaskUUID - ) -> Any: - with log_context( - _logger, - logging.DEBUG, - msg=f"Get task result: {task_context=} {task_uuid=}", - ): - task_id = build_task_id(task_context, task_uuid) - async_result = self._celery_app.AsyncResult(task_id) - result = async_result.result - if async_result.ready(): - task_metadata = await self._task_info_store.get_task_metadata(task_id) - if task_metadata is not None and task_metadata.ephemeral: - await self._forget_task(task_id) - await self._task_info_store.remove_task(task_id) - return result - - async def _get_task_progress_report( - self, task_context: TaskContext, task_uuid: TaskUUID, task_state: TaskState - ) -> ProgressReport: - if task_state in (TaskState.STARTED, TaskState.RETRY, TaskState.ABORTED): - task_id = build_task_id(task_context, task_uuid) - progress = await self._task_info_store.get_task_progress(task_id) - if progress is not None: - return progress - if task_state in ( - TaskState.SUCCESS, - TaskState.FAILURE, - ): - return ProgressReport( - actual_value=_MAX_PROGRESS_VALUE, total=_MAX_PROGRESS_VALUE - ) - - # task is pending - return ProgressReport( - actual_value=_MIN_PROGRESS_VALUE, total=_MAX_PROGRESS_VALUE - ) - - @make_async() - def _get_task_celery_state(self, task_id: TaskID) -> TaskState: - return TaskState(self._celery_app.AsyncResult(task_id).state) - - async def get_task_status( - self, task_context: TaskContext, task_uuid: TaskUUID - ) -> TaskStatus: - with log_context( - _logger, - logging.DEBUG, - msg=f"Getting task status: {task_context=} {task_uuid=}", - ): - task_id = build_task_id(task_context, task_uuid) - task_state = await self._get_task_celery_state(task_id) - return TaskStatus( - task_uuid=task_uuid, - task_state=task_state, - progress_report=await self._get_task_progress_report( - task_context, task_uuid, task_state - ), - ) - - async def list_tasks(self, task_context: TaskContext) -> list[Task]: - with log_context( - _logger, - logging.DEBUG, - msg=f"Listing tasks: {task_context=}", - ): - return await self._task_info_store.list_tasks(task_context) diff --git a/services/storage/src/simcore_service_storage/modules/celery/models.py b/services/storage/src/simcore_service_storage/modules/celery/models.py deleted file mode 100644 index 8b19d124ff17..000000000000 --- a/services/storage/src/simcore_service_storage/modules/celery/models.py +++ /dev/null @@ -1,92 +0,0 @@ -from datetime import timedelta -from enum import StrEnum -from typing import Annotated, Any, Final, Protocol, TypeAlias -from uuid import UUID - -from models_library.progress_bar import ProgressReport -from pydantic import BaseModel, StringConstraints - -TaskContext: TypeAlias = dict[str, Any] -TaskID: TypeAlias = str -TaskName: TypeAlias = Annotated[ - str, StringConstraints(strip_whitespace=True, min_length=1) -] -TaskUUID: TypeAlias = UUID - -_CELERY_TASK_ID_KEY_SEPARATOR: Final[str] = ":" - - -def build_task_id_prefix(task_context: TaskContext) -> str: - return _CELERY_TASK_ID_KEY_SEPARATOR.join( - [f"{task_context[key]}" for key in sorted(task_context)] - ) - - -def build_task_id(task_context: TaskContext, task_uuid: TaskUUID) -> TaskID: - return _CELERY_TASK_ID_KEY_SEPARATOR.join( - [build_task_id_prefix(task_context), f"{task_uuid}"] - ) - - -class TaskState(StrEnum): - PENDING = "PENDING" - STARTED = "STARTED" - RETRY = "RETRY" - SUCCESS = "SUCCESS" - FAILURE = "FAILURE" - ABORTED = "ABORTED" - - -class TasksQueue(StrEnum): - CPU_BOUND = "cpu_bound" - DEFAULT = "default" - - -class TaskMetadata(BaseModel): - name: TaskName - ephemeral: bool = True - queue: TasksQueue = TasksQueue.DEFAULT - - -class Task(BaseModel): - uuid: TaskUUID - metadata: TaskMetadata - - -_TASK_DONE = {TaskState.SUCCESS, TaskState.FAILURE, TaskState.ABORTED} - - -class TaskInfoStore(Protocol): - async def create_task( - self, - task_id: TaskID, - task_metadata: TaskMetadata, - expiry: timedelta, - ) -> None: ... - - async def exists_task(self, task_id: TaskID) -> bool: ... - - async def get_task_metadata(self, task_id: TaskID) -> TaskMetadata | None: ... - - async def get_task_progress(self, task_id: TaskID) -> ProgressReport | None: ... - - async def list_tasks(self, task_context: TaskContext) -> list[Task]: ... - - async def remove_task(self, task_id: TaskID) -> None: ... - - async def set_task_progress( - self, task_id: TaskID, report: ProgressReport - ) -> None: ... - - -class TaskStatus(BaseModel): - task_uuid: TaskUUID - task_state: TaskState - progress_report: ProgressReport - - @property - def is_done(self) -> bool: - return self.task_state in _TASK_DONE - - -TaskId: TypeAlias = str diff --git a/services/storage/src/simcore_service_storage/modules/celery/signals.py b/services/storage/src/simcore_service_storage/modules/celery/signals.py deleted file mode 100644 index 113d26c3566b..000000000000 --- a/services/storage/src/simcore_service_storage/modules/celery/signals.py +++ /dev/null @@ -1,99 +0,0 @@ -import asyncio -import datetime -import logging -import threading -from typing import Final - -from asgi_lifespan import LifespanManager -from celery import Celery # type: ignore[import-untyped] -from fastapi import FastAPI -from servicelib.logging_utils import log_context -from servicelib.redis._client import RedisClientSDK -from settings_library.redis import RedisDatabase -from simcore_service_storage._meta import APP_NAME - -from ...core.application import create_app -from ...core.settings import ApplicationSettings -from . import set_event_loop -from .backends._redis import RedisTaskInfoStore -from .utils import ( - get_fastapi_app, - set_celery_worker, - set_fastapi_app, -) -from .worker import CeleryTaskWorker - -_logger = logging.getLogger(__name__) - -_SHUTDOWN_TIMEOUT: Final[float] = datetime.timedelta(seconds=10).total_seconds() -_STARTUP_TIMEOUT: Final[float] = datetime.timedelta(minutes=1).total_seconds() - - -def on_worker_init(sender, **_kwargs) -> None: - startup_complete_event = threading.Event() - - def _init(startup_complete_event: threading.Event) -> None: - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - shutdown_event = asyncio.Event() - - app_settings = ApplicationSettings.create_from_envs() - fastapi_app = create_app(app_settings) - - assert app_settings.STORAGE_CELERY - celery_settings = app_settings.STORAGE_CELERY - - async def setup_task_worker(): - redis_client_sdk = RedisClientSDK( - celery_settings.CELERY_REDIS_RESULT_BACKEND.build_redis_dsn( - RedisDatabase.CELERY_TASKS - ), - client_name=f"{APP_NAME}.celery_tasks", - ) - - set_celery_worker( - sender.app, CeleryTaskWorker(RedisTaskInfoStore(redis_client_sdk)) - ) - - async def fastapi_lifespan( - startup_complete_event: threading.Event, shutdown_event: asyncio.Event - ) -> None: - async with LifespanManager( - fastapi_app, - startup_timeout=_STARTUP_TIMEOUT, - shutdown_timeout=_SHUTDOWN_TIMEOUT, - ): - try: - _logger.info("fastapi APP started!") - startup_complete_event.set() - await shutdown_event.wait() - except asyncio.CancelledError: - _logger.warning("Lifespan task cancelled") - - fastapi_app.state.shutdown_event = shutdown_event - set_event_loop(fastapi_app, loop) - - set_fastapi_app(sender.app, fastapi_app) - loop.run_until_complete(setup_task_worker()) - loop.run_until_complete( - fastapi_lifespan(startup_complete_event, shutdown_event) - ) - - thread = threading.Thread( - group=None, - target=_init, - name="fastapi_app", - args=(startup_complete_event,), - daemon=True, - ) - thread.start() - # ensure the fastapi app is ready before going on - startup_complete_event.wait(_STARTUP_TIMEOUT * 1.1) - - -def on_worker_shutdown(sender, **_kwargs) -> None: - with log_context(_logger, logging.INFO, "Worker Shuts-down"): - assert isinstance(sender.app, Celery) - fastapi_app = get_fastapi_app(sender.app) - assert isinstance(fastapi_app, FastAPI) - fastapi_app.state.shutdown_event.set() diff --git a/services/storage/src/simcore_service_storage/modules/celery/utils.py b/services/storage/src/simcore_service_storage/modules/celery/utils.py deleted file mode 100644 index d09c1a1ce410..000000000000 --- a/services/storage/src/simcore_service_storage/modules/celery/utils.py +++ /dev/null @@ -1,27 +0,0 @@ -from celery import Celery # type: ignore[import-untyped] -from fastapi import FastAPI - -from .worker import CeleryTaskWorker - -_WORKER_KEY = "celery_worker" -_FASTAPI_APP_KEY = "fastapi_app" - - -def set_celery_worker(celery_app: Celery, worker: CeleryTaskWorker) -> None: - celery_app.conf[_WORKER_KEY] = worker - - -def get_celery_worker(celery_app: Celery) -> CeleryTaskWorker: - worker = celery_app.conf[_WORKER_KEY] - assert isinstance(worker, CeleryTaskWorker) - return worker - - -def set_fastapi_app(celery_app: Celery, fastapi_app: FastAPI) -> None: - celery_app.conf[_FASTAPI_APP_KEY] = fastapi_app - - -def get_fastapi_app(celery_app: Celery) -> FastAPI: - fastapi_app = celery_app.conf[_FASTAPI_APP_KEY] - assert isinstance(fastapi_app, FastAPI) - return fastapi_app diff --git a/services/storage/src/simcore_service_storage/modules/celery/worker.py b/services/storage/src/simcore_service_storage/modules/celery/worker.py deleted file mode 100644 index a5e98ac09df7..000000000000 --- a/services/storage/src/simcore_service_storage/modules/celery/worker.py +++ /dev/null @@ -1,19 +0,0 @@ -import logging -from dataclasses import dataclass - -from models_library.progress_bar import ProgressReport - -from ..celery.models import TaskID, TaskInfoStore - -_logger = logging.getLogger(__name__) - - -@dataclass -class CeleryTaskWorker: - _task_info_store: TaskInfoStore - - async def set_task_progress(self, task_id: TaskID, report: ProgressReport) -> None: - await self._task_info_store.set_task_progress( - task_id=task_id, - report=report, - ) diff --git a/services/storage/src/simcore_service_storage/modules/celery/worker_main.py b/services/storage/src/simcore_service_storage/modules/celery/worker_main.py index 58febcb61f68..f2e90e900244 100644 --- a/services/storage/src/simcore_service_storage/modules/celery/worker_main.py +++ b/services/storage/src/simcore_service_storage/modules/celery/worker_main.py @@ -1,32 +1,40 @@ """Main application to be deployed in for example uvicorn.""" -import logging - from celery.signals import worker_init, worker_shutdown # type: ignore[import-untyped] -from servicelib.logging_utils import config_all_loggers -from simcore_service_storage.api._worker_tasks.tasks import setup_worker_tasks - -from ...core.settings import ApplicationSettings -from ._common import create_app as create_celery_app -from .signals import ( +from celery_library.common import create_app as create_celery_app +from celery_library.signals import ( on_worker_init, on_worker_shutdown, ) +from servicelib.fastapi.celery.app_server import FastAPIAppServer +from servicelib.logging_utils import setup_loggers + +from ...api._worker_tasks.tasks import setup_worker_tasks +from ...core.application import create_app +from ...core.settings import ApplicationSettings _settings = ApplicationSettings.create_from_envs() -logging.basicConfig(level=_settings.log_level) # NOSONAR -logging.root.setLevel(_settings.log_level) -config_all_loggers( +setup_loggers( log_format_local_dev_enabled=_settings.STORAGE_LOG_FORMAT_LOCAL_DEV_ENABLED, logger_filter_mapping=_settings.STORAGE_LOG_FILTER_MAPPING, tracing_settings=_settings.STORAGE_TRACING, + log_base_level=_settings.log_level, + noisy_loggers=None, ) -assert _settings.STORAGE_CELERY +assert _settings.STORAGE_CELERY # nosec app = create_celery_app(_settings.STORAGE_CELERY) -worker_init.connect(on_worker_init) + +app_server = FastAPIAppServer(app=create_app(_settings)) + + +def worker_init_wrapper(sender, **kwargs): + return on_worker_init(sender, app_server, **kwargs) + + +worker_init.connect(worker_init_wrapper) worker_shutdown.connect(on_worker_shutdown) diff --git a/services/storage/src/simcore_service_storage/modules/db/__init__.py b/services/storage/src/simcore_service_storage/modules/db/__init__.py index 4fb5dacb2a7c..1b29632d6ed3 100644 --- a/services/storage/src/simcore_service_storage/modules/db/__init__.py +++ b/services/storage/src/simcore_service_storage/modules/db/__init__.py @@ -6,6 +6,7 @@ from sqlalchemy.ext.asyncio import AsyncEngine from tenacity import retry +from ..._meta import APP_NAME from ...core.settings import get_application_settings _logger = logging.getLogger(__name__) @@ -16,7 +17,9 @@ def setup_db(app: FastAPI) -> None: async def _on_startup() -> None: app_settings = get_application_settings(app) assert app_settings.STORAGE_POSTGRES is not None # nosec - await connect_to_db(app, app_settings.STORAGE_POSTGRES) + await connect_to_db( + app, app_settings.STORAGE_POSTGRES, application_name=APP_NAME + ) async def _on_shutdown() -> None: await close_db_connection(app) diff --git a/services/storage/src/simcore_service_storage/modules/long_running_tasks.py b/services/storage/src/simcore_service_storage/modules/long_running_tasks.py deleted file mode 100644 index 229c1bd3fefe..000000000000 --- a/services/storage/src/simcore_service_storage/modules/long_running_tasks.py +++ /dev/null @@ -1,11 +0,0 @@ -from fastapi import FastAPI -from servicelib.fastapi.long_running_tasks._server import setup - -from .._meta import API_VTAG - - -def setup_rest_api_long_running_tasks_for_uploads(app: FastAPI) -> None: - setup( - app, - router_prefix=f"/{API_VTAG}/futures", - ) diff --git a/services/storage/src/simcore_service_storage/modules/redis.py b/services/storage/src/simcore_service_storage/modules/redis.py index 6b2c15476ec8..8b28eb58264e 100644 --- a/services/storage/src/simcore_service_storage/modules/redis.py +++ b/services/storage/src/simcore_service_storage/modules/redis.py @@ -15,11 +15,11 @@ def setup(app: FastAPI) -> None: async def on_startup() -> None: app.state.redis_client_sdk = None redis_settings = get_application_settings(app).STORAGE_REDIS - assert redis_settings # nosec redis_locks_dsn = redis_settings.build_redis_dsn(RedisDatabase.LOCKS) app.state.redis_client_sdk = RedisClientSDK( redis_locks_dsn, client_name=APP_NAME ) + await app.state.redis_client_sdk.setup() async def on_shutdown() -> None: redis_client_sdk = app.state.redis_client_sdk diff --git a/services/storage/src/simcore_service_storage/modules/s3.py b/services/storage/src/simcore_service_storage/modules/s3.py index 7fb81b299733..5345888032d3 100644 --- a/services/storage/src/simcore_service_storage/modules/s3.py +++ b/services/storage/src/simcore_service_storage/modules/s3.py @@ -7,19 +7,42 @@ from common_library.json_serialization import json_dumps from fastapi import FastAPI from pydantic import TypeAdapter -from servicelib.logging_utils import log_context +from tenacity import retry, wait_random_exponential from tenacity.asyncio import AsyncRetrying from tenacity.before_sleep import before_sleep_log from tenacity.wait import wait_fixed from types_aiobotocore_s3.literals import BucketLocationConstraintType from ..constants import RETRY_WAIT_SECS -from ..core.settings import get_application_settings +from ..core.settings import ApplicationSettings, get_application_settings from ..exceptions.errors import ConfigurationError _logger = logging.getLogger(__name__) +@retry( + wait=wait_random_exponential(), + before_sleep=before_sleep_log(_logger, logging.WARNING), + reraise=True, +) +async def _ensure_s3_bucket( + client: SimcoreS3API, settings: ApplicationSettings +) -> None: + assert settings.STORAGE_S3 # nosec + if await client.bucket_exists(bucket=settings.STORAGE_S3.S3_BUCKET_NAME): + _logger.info( + "S3 bucket %s exists already, skipping creation", + settings.STORAGE_S3.S3_BUCKET_NAME, + ) + return + await client.create_bucket( + bucket=settings.STORAGE_S3.S3_BUCKET_NAME, + region=TypeAdapter( + BucketLocationConstraintType | Literal["us-east-1"] + ).validate_python(settings.STORAGE_S3.S3_REGION), + ) + + def setup_s3(app: FastAPI) -> None: async def _on_startup() -> None: app.state.s3_client = None @@ -44,14 +67,7 @@ async def _on_startup() -> None: assert client # nosec app.state.s3_client = client - with log_context(_logger, logging.DEBUG, msg="setup.s3_bucket.cleanup_ctx"): - assert settings.STORAGE_S3 # nosec - await client.create_bucket( - bucket=settings.STORAGE_S3.S3_BUCKET_NAME, - region=TypeAdapter( - BucketLocationConstraintType | Literal["us-east-1"] - ).validate_python(settings.STORAGE_S3.S3_REGION), - ) + await _ensure_s3_bucket(client, settings) async def _on_shutdown() -> None: if app.state.s3_client: diff --git a/services/storage/src/simcore_service_storage/simcore_s3_dsm.py b/services/storage/src/simcore_service_storage/simcore_s3_dsm.py index 5187e2345511..7e2dbdc5baf3 100644 --- a/services/storage/src/simcore_service_storage/simcore_s3_dsm.py +++ b/services/storage/src/simcore_service_storage/simcore_s3_dsm.py @@ -7,7 +7,7 @@ from contextlib import suppress from dataclasses import dataclass from pathlib import Path -from typing import Any, Final, cast +from typing import Any, Final from aws_library.s3 import ( CopiedBytesTransferredCallback, @@ -489,18 +489,23 @@ async def create_file_upload_links( file_size_bytes ): # create multipart links - assert file_size_bytes # nosec - multipart_presigned_links = await get_s3_client( - self.app - ).create_multipart_upload_links( - bucket=fmd.bucket_name, - object_key=fmd.file_id, - file_size=file_size_bytes, - expiration_secs=get_application_settings( + with log_context( + logger=_logger, + level=logging.DEBUG, + msg=f"Creating multipart upload links for {file_id=}", + ): + assert file_size_bytes # nosec + multipart_presigned_links = await get_s3_client( self.app - ).STORAGE_DEFAULT_PRESIGNED_LINK_EXPIRATION_SECONDS, - sha256_checksum=fmd.sha256_checksum, - ) + ).create_multipart_upload_links( + bucket=fmd.bucket_name, + object_key=fmd.file_id, + file_size=file_size_bytes, + expiration_secs=get_application_settings( + self.app + ).STORAGE_DEFAULT_PRESIGNED_LINK_EXPIRATION_SECONDS, + sha256_checksum=fmd.sha256_checksum, + ) # update the database so we keep the upload id fmd.upload_id = multipart_presigned_links.upload_id await FileMetaDataRepository.instance(get_db_engine(self.app)).upsert( @@ -512,25 +517,35 @@ async def create_file_upload_links( ) if link_type == LinkType.PRESIGNED: # create single presigned link - single_presigned_link = await get_s3_client( - self.app - ).create_single_presigned_upload_link( - bucket=self.simcore_bucket_name, - object_key=fmd.file_id, - expiration_secs=get_application_settings( + with log_context( + logger=_logger, + level=logging.DEBUG, + msg=f"Creating single presigned upload link for {file_id=}", + ): + single_presigned_link = await get_s3_client( self.app - ).STORAGE_DEFAULT_PRESIGNED_LINK_EXPIRATION_SECONDS, - ) + ).create_single_presigned_upload_link( + bucket=self.simcore_bucket_name, + object_key=fmd.file_id, + expiration_secs=get_application_settings( + self.app + ).STORAGE_DEFAULT_PRESIGNED_LINK_EXPIRATION_SECONDS, + ) return UploadLinks( [single_presigned_link], file_size_bytes or MAX_LINK_CHUNK_BYTE_SIZE[link_type], ) # user wants just the s3 link - s3_link = get_s3_client(self.app).compute_s3_url( - bucket=self.simcore_bucket_name, - object_key=TypeAdapter(SimcoreS3FileID).validate_python(file_id), - ) + with log_context( + logger=_logger, + level=logging.DEBUG, + msg=f"Compute S3 link for file_id={file_id}", + ): + s3_link = get_s3_client(self.app).compute_s3_url( + bucket=self.simcore_bucket_name, + object_key=TypeAdapter(SimcoreS3FileID).validate_python(file_id), + ) return UploadLinks( [s3_link], file_size_bytes or MAX_LINK_CHUNK_BYTE_SIZE[link_type] ) @@ -619,7 +634,7 @@ async def create_file_download_link( 4. Raises FileAccessRightError if the user does not have access to the file """ directory_file_id = await get_directory_file_id( - get_db_engine(self.app), cast(SimcoreS3FileID, file_id) + get_db_engine(self.app), file_id ) can = await AccessLayerRepository.instance( get_db_engine(self.app) @@ -687,45 +702,51 @@ async def delete_file( # Only use this in those circumstances where a collaborator requires to delete a file (the current # permissions model will not allow him to do so, even though this is a legitimate action) # SEE https://github.com/ITISFoundation/osparc-simcore/issues/5159 + with log_context( + logger=_logger, level=logging.DEBUG, msg=f"Deleting file {file_id=}" + ): + if enforce_access_rights: + can = await AccessLayerRepository.instance( + get_db_engine(self.app) + ).get_file_access_rights(user_id=user_id, file_id=file_id) + if not can.delete: + raise FileAccessRightError(access_right="delete", file_id=file_id) - if enforce_access_rights: - can = await AccessLayerRepository.instance( - get_db_engine(self.app) - ).get_file_access_rights(user_id=user_id, file_id=file_id) - if not can.delete: - raise FileAccessRightError(access_right="delete", file_id=file_id) - - try: - await get_s3_client(self.app).delete_objects_recursively( - bucket=self.simcore_bucket_name, - prefix=file_id, - ) - except S3KeyNotFoundError: - _logger.warning("File %s not found in S3", file_id) - # we still need to clean up the database entry (it exists) - # and to invalidate the size of the parent directory + try: + await get_s3_client(self.app).delete_objects_recursively( + bucket=self.simcore_bucket_name, + prefix=file_id, + ) + except S3KeyNotFoundError: + _logger.warning("File %s not found in S3", file_id) + # we still need to clean up the database entry (it exists) + # and to invalidate the size of the parent directory - async with transaction_context(get_db_engine(self.app)) as connection: - file_meta_data_repo = FileMetaDataRepository.instance( - get_db_engine(self.app) - ) - await file_meta_data_repo.delete(connection=connection, file_ids=[file_id]) - - if parent_dir_fmds := await file_meta_data_repo.list_filter_with_partial_file_id( - connection=connection, - user_or_project_filter=UserOrProjectFilter( - user_id=user_id, project_ids=[] - ), - file_id_prefix=compute_file_id_prefix(file_id, 2), - partial_file_id=None, - is_directory=True, - sha256_checksum=None, - ): - parent_dir_fmd = max(parent_dir_fmds, key=lambda fmd: len(fmd.file_id)) - parent_dir_fmd.file_size = UNDEFINED_SIZE - await file_meta_data_repo.upsert( - connection=connection, fmd=parent_dir_fmd + async with transaction_context(get_db_engine(self.app)) as connection: + file_meta_data_repo = FileMetaDataRepository.instance( + get_db_engine(self.app) ) + await file_meta_data_repo.delete( + connection=connection, file_ids=[file_id] + ) + + if parent_dir_fmds := await file_meta_data_repo.list_filter_with_partial_file_id( + connection=connection, + user_or_project_filter=UserOrProjectFilter( + user_id=user_id, project_ids=[] + ), + file_id_prefix=compute_file_id_prefix(file_id, 2), + partial_file_id=None, + is_directory=True, + sha256_checksum=None, + ): + parent_dir_fmd = max( + parent_dir_fmds, key=lambda fmd: len(fmd.file_id) + ) + parent_dir_fmd.file_size = UNDEFINED_SIZE + await file_meta_data_repo.upsert( + connection=connection, fmd=parent_dir_fmd + ) async def delete_project_simcore_s3( self, user_id: UserID, project_id: ProjectID, node_id: NodeID | None = None @@ -956,17 +977,22 @@ async def create_soft_link( return convert_db_to_model(await file_meta_data_repo.insert(fmd=target)) async def _clean_pending_upload(self, file_id: SimcoreS3FileID) -> None: - with suppress(FileMetaDataNotFoundError): - fmd = await FileMetaDataRepository.instance(get_db_engine(self.app)).get( - file_id=file_id - ) - if is_valid_managed_multipart_upload(fmd.upload_id): - assert fmd.upload_id # nosec - await get_s3_client(self.app).abort_multipart_upload( - bucket=self.simcore_bucket_name, - object_key=file_id, - upload_id=fmd.upload_id, - ) + with log_context( + logger=_logger, + level=logging.DEBUG, + msg=f"Cleaning pending uploads for {file_id=}", + ): + with suppress(FileMetaDataNotFoundError): + fmd = await FileMetaDataRepository.instance( + get_db_engine(self.app) + ).get(file_id=file_id) + if is_valid_managed_multipart_upload(fmd.upload_id): + assert fmd.upload_id # nosec + await get_s3_client(self.app).abort_multipart_upload( + bucket=self.simcore_bucket_name, + object_key=file_id, + upload_id=fmd.upload_id, + ) async def _clean_expired_uploads(self) -> None: """this method will check for all incomplete updates by checking diff --git a/services/storage/src/simcore_service_storage/utils/s3_utils.py b/services/storage/src/simcore_service_storage/utils/s3_utils.py index 3fcb17d0c452..0ebec26a6bbd 100644 --- a/services/storage/src/simcore_service_storage/utils/s3_utils.py +++ b/services/storage/src/simcore_service_storage/utils/s3_utils.py @@ -4,8 +4,8 @@ from collections import defaultdict from dataclasses import dataclass, field +from common_library.async_tools import cancel_wait_task from pydantic import ByteSize, TypeAdapter -from servicelib.async_utils import cancel_wait_task from servicelib.background_task import create_periodic_task from servicelib.progress_bar import ProgressBarData diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index 6bbffe9cd197..802c3fab387c 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -24,6 +24,8 @@ from celery import Celery from celery.contrib.testing.worker import TestWorkController, start_worker from celery.signals import worker_init, worker_shutdown +from celery.worker.worker import WorkController +from celery_library.signals import on_worker_init, on_worker_shutdown from faker import Faker from fakeredis.aioredis import FakeRedis from fastapi import FastAPI @@ -43,6 +45,7 @@ from models_library.projects_nodes_io import LocationID, SimcoreS3FileID, StorageFileID from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder +from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter from pydantic import ByteSize, TypeAdapter from pytest_mock import MockerFixture from pytest_simcore.helpers.fastapi import url_from_operation_id @@ -60,6 +63,7 @@ ) from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.aiohttp import status +from servicelib.fastapi.celery.app_server import FastAPIAppServer from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient from servicelib.utils import limited_gather from settings_library.rabbit import RabbitSettings @@ -71,12 +75,6 @@ from simcore_service_storage.datcore_dsm import DatCoreDataManager from simcore_service_storage.dsm import get_dsm_provider from simcore_service_storage.models import FileMetaData, FileMetaDataAtDB, S3BucketName -from simcore_service_storage.modules.celery.signals import ( - on_worker_init, - on_worker_shutdown, -) -from simcore_service_storage.modules.celery.utils import get_celery_worker -from simcore_service_storage.modules.celery.worker import CeleryTaskWorker from simcore_service_storage.modules.s3 import get_s3_client from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager from sqlalchemy import literal_column @@ -89,6 +87,7 @@ from yarl import URL pytest_plugins = [ + "pytest_simcore.asyncio_event_loops", "pytest_simcore.aws_s3_service", "pytest_simcore.aws_server", "pytest_simcore.cli_runner", @@ -98,6 +97,7 @@ "pytest_simcore.environment_configs", "pytest_simcore.file_extra", "pytest_simcore.httpbin_service", + "pytest_simcore.logging", "pytest_simcore.openapi_specs", "pytest_simcore.postgres_service", "pytest_simcore.pytest_global_environs", @@ -106,6 +106,7 @@ "pytest_simcore.simcore_storage_data_models", "pytest_simcore.simcore_storage_datcore_adapter", "pytest_simcore.simcore_storage_service", + "pytest_simcore.tracing", ] CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent @@ -189,6 +190,15 @@ def disabled_rabbitmq(app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPa monkeypatch.setenv("STORAGE_RABBITMQ", "null") +@pytest.fixture +def enable_tracing( + app_environment: EnvVarsDict, + monkeypatch: pytest.MonkeyPatch, + setup_tracing_fastapi: InMemorySpanExporter, +): + monkeypatch.setenv("STORAGE_TRACING", "{}") + + @pytest.fixture def enabled_rabbitmq( app_environment: EnvVarsDict, rabbit_service: RabbitSettings @@ -204,6 +214,7 @@ async def mocked_redis_server(mocker: MockerFixture) -> None: @pytest.fixture def app_settings( + enable_tracing, app_environment: EnvVarsDict, enabled_rabbitmq: RabbitSettings, sqlalchemy_async_engine: AsyncEngine, @@ -365,7 +376,7 @@ def upload_file( create_upload_file_link_v2: Callable[..., Awaitable[FileUploadSchema]], create_file_of_size: Callable[[ByteSize, str | None], Path], create_simcore_file_id: Callable[[ProjectID, NodeID, str], SimcoreS3FileID], - with_storage_celery_worker: CeleryTaskWorker, + with_storage_celery_worker: TestWorkController, ) -> Callable[ [ByteSize, str, SimcoreS3FileID | None], Awaitable[tuple[Path, SimcoreS3FileID]] ]: @@ -480,7 +491,7 @@ async def create_empty_directory( create_simcore_file_id: Callable[[ProjectID, NodeID, str], SimcoreS3FileID], create_upload_file_link_v2: Callable[..., Awaitable[FileUploadSchema]], client: httpx.AsyncClient, - with_storage_celery_worker: CeleryTaskWorker, + with_storage_celery_worker: TestWorkController, ) -> Callable[[str, ProjectID, NodeID], Awaitable[SimcoreS3FileID]]: async def _directory_creator( dir_name: str, project_id: ProjectID, node_id: NodeID @@ -977,10 +988,7 @@ def celery_config() -> dict[str, Any]: def mock_celery_app(mocker: MockerFixture, celery_config: dict[str, Any]) -> Celery: celery_app = Celery(**celery_config) - for module in ( - "simcore_service_storage.modules.celery._common.create_app", - "simcore_service_storage.modules.celery.create_app", - ): + for module in ("simcore_service_storage.modules.celery.create_app",): mocker.patch(module, return_value=celery_app) return celery_app @@ -996,20 +1004,27 @@ def _(celery_app: Celery) -> None: ... @pytest.fixture -async def with_storage_celery_worker_controller( +async def with_storage_celery_worker( app_environment: EnvVarsDict, celery_app: Celery, monkeypatch: pytest.MonkeyPatch, register_celery_tasks: Callable[[Celery], None], ) -> AsyncIterator[TestWorkController]: # Signals must be explicitily connected - worker_init.connect(on_worker_init) + monkeypatch.setenv("STORAGE_WORKER_MODE", "true") + app_settings = ApplicationSettings.create_from_envs() + + app_server = FastAPIAppServer(app=create_app(app_settings)) + + def _on_worker_init_wrapper(sender: WorkController, **_kwargs): + return on_worker_init(sender, app_server, **_kwargs) + + worker_init.connect(_on_worker_init_wrapper) worker_shutdown.connect(on_worker_shutdown) setup_worker_tasks(celery_app) register_celery_tasks(celery_app) - monkeypatch.setenv("STORAGE_WORKER_MODE", "true") with start_worker( celery_app, pool="threads", @@ -1021,14 +1036,6 @@ async def with_storage_celery_worker_controller( yield worker -@pytest.fixture -def with_storage_celery_worker( - with_storage_celery_worker_controller: TestWorkController, -) -> CeleryTaskWorker: - assert isinstance(with_storage_celery_worker_controller.app, Celery) - return get_celery_worker(with_storage_celery_worker_controller.app) - - @pytest.fixture async def storage_rabbitmq_rpc_client( rabbitmq_rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], diff --git a/services/storage/tests/unit/test_core_settings.py b/services/storage/tests/unit/test_core_settings.py new file mode 100644 index 000000000000..56b9da04cf4c --- /dev/null +++ b/services/storage/tests/unit/test_core_settings.py @@ -0,0 +1,18 @@ +# pylint: disable=unused-variable +# pylint: disable=unused-argument +# pylint: disable=redefined-outer-name + + +from pytest_simcore.helpers.monkeypatch_envs import ( + EnvVarsDict, +) +from simcore_service_storage.core.settings import ApplicationSettings + + +def test_valid_application_settings(app_environment: EnvVarsDict): + assert app_environment + + settings = ApplicationSettings() # type: ignore + assert settings + + assert settings == ApplicationSettings.create_from_envs() diff --git a/services/storage/tests/unit/test_handlers_files.py b/services/storage/tests/unit/test_handlers_files.py index db66eab9878c..f07b63cdbe92 100644 --- a/services/storage/tests/unit/test_handlers_files.py +++ b/services/storage/tests/unit/test_handlers_files.py @@ -23,6 +23,7 @@ from aiohttp import ClientSession from aws_library.s3 import S3KeyNotFoundError, S3ObjectKey, SimcoreS3API from aws_library.s3._constants import MULTIPART_UPLOADS_MIN_TOTAL_SIZE +from celery.contrib.testing.worker import TestWorkController from faker import Faker from fastapi import FastAPI from models_library.api_schemas_storage.storage_schemas import ( @@ -53,7 +54,6 @@ from servicelib.aiohttp import status from simcore_service_storage.constants import S3_UNDEFINED_OR_EXTERNAL_MULTIPART_ID from simcore_service_storage.models import FileDownloadResponse, S3BucketName, UploadID -from simcore_service_storage.modules.celery.worker import CeleryTaskWorker from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager from sqlalchemy.ext.asyncio import AsyncEngine from tenacity.asyncio import AsyncRetrying @@ -683,7 +683,7 @@ async def test_upload_real_file_with_s3_client( node_id: NodeID, faker: Faker, s3_client: S3Client, - with_storage_celery_worker: CeleryTaskWorker, + with_storage_celery_worker: TestWorkController, ): file_size = TypeAdapter(ByteSize).validate_python("500Mib") file_name = faker.file_name() diff --git a/services/storage/tests/unit/test_handlers_paths.py b/services/storage/tests/unit/test_handlers_paths.py index 6997bb5bf7dd..0fac7c5deb2c 100644 --- a/services/storage/tests/unit/test_handlers_paths.py +++ b/services/storage/tests/unit/test_handlers_paths.py @@ -18,7 +18,6 @@ import sqlalchemy as sa from faker import Faker from fastapi import FastAPI, status -from fastapi_pagination.cursor import CursorPage from models_library.api_schemas_storage.storage_schemas import ( PathMetaDataGet, PathTotalSizeCreate, @@ -30,6 +29,7 @@ from pytest_simcore.helpers.fastapi import url_from_operation_id from pytest_simcore.helpers.httpx_assert_checks import assert_status from pytest_simcore.helpers.storage_utils import FileIDDict, ProjectWithFilesParams +from servicelib.fastapi.rest_pagination import CustomizedPathsCursorPage from simcore_postgres_database.models.projects import projects from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager from sqlalchemy.ext.asyncio import AsyncEngine @@ -67,11 +67,12 @@ async def _assert_list_paths( limit: int = 25, expected_paths: list[tuple[Path, _IsFile]], check_total: bool = True, -) -> CursorPage[PathMetaDataGet]: +) -> CustomizedPathsCursorPage[PathMetaDataGet]: offset = 0 total_expected = len(expected_paths) next_cursor = 0 # NOTE: this will initialize total_received = 0 + page_of_files = None while next_cursor is not None: url = url_from_operation_id( client, initialized_app, "list_paths", location_id=f"{location_id}" @@ -89,7 +90,7 @@ async def _assert_list_paths( page_of_files, _ = assert_status( response, status.HTTP_200_OK, - CursorPage[PathMetaDataGet], + CustomizedPathsCursorPage[PathMetaDataGet], # type: ignore expect_envelope=False, ) assert page_of_files @@ -112,6 +113,7 @@ async def _assert_list_paths( total_received += len(page_of_files.items) offset += limit assert total_received == total_expected + assert page_of_files assert page_of_files.next_page is None return page_of_files diff --git a/services/storage/tests/unit/test_modules_celery.py b/services/storage/tests/unit/test_modules_celery.py deleted file mode 100644 index b1819aabb44d..000000000000 --- a/services/storage/tests/unit/test_modules_celery.py +++ /dev/null @@ -1,223 +0,0 @@ -# pylint: disable=protected-access -# pylint: disable=redefined-outer-name -# pylint: disable=too-many-arguments -# pylint: disable=unused-argument -# pylint: disable=unused-variable - -import asyncio -import logging -import time -from collections.abc import Callable -from random import randint - -import pytest -from celery import Celery, Task -from celery.contrib.abortable import AbortableTask -from common_library.errors_classes import OsparcErrorMixin -from fastapi import FastAPI -from models_library.progress_bar import ProgressReport -from servicelib.logging_utils import log_context -from simcore_service_storage.modules.celery import get_celery_client, get_event_loop -from simcore_service_storage.modules.celery._task import ( - AbortableAsyncResult, - register_task, -) -from simcore_service_storage.modules.celery.client import CeleryTaskClient -from simcore_service_storage.modules.celery.errors import TransferrableCeleryError -from simcore_service_storage.modules.celery.models import ( - TaskContext, - TaskID, - TaskMetadata, - TaskState, -) -from simcore_service_storage.modules.celery.utils import ( - get_celery_worker, - get_fastapi_app, -) -from simcore_service_storage.modules.celery.worker import CeleryTaskWorker -from tenacity import Retrying, retry_if_exception_type, stop_after_delay, wait_fixed - -_logger = logging.getLogger(__name__) - -pytest_simcore_core_services_selection = ["postgres", "rabbit"] -pytest_simcore_ops_services_selection = [] - - -@pytest.fixture -def celery_client( - initialized_app: FastAPI, - with_storage_celery_worker: CeleryTaskWorker, -) -> CeleryTaskClient: - return get_celery_client(initialized_app) - - -async def _fake_file_processor( - celery_app: Celery, task_name: str, task_id: str, files: list[str] -) -> str: - worker = get_celery_worker(celery_app) - - def sleep_for(seconds: float) -> None: - time.sleep(seconds) - - for n, file in enumerate(files, start=1): - with log_context(_logger, logging.INFO, msg=f"Processing file {file}"): - await worker.set_task_progress( - task_id=task_id, - report=ProgressReport(actual_value=n / len(files)), - ) - await asyncio.get_event_loop().run_in_executor(None, sleep_for, 1) - - return "archive.zip" - - -def fake_file_processor(task: Task, files: list[str]) -> str: - assert task.name - _logger.info("Calling _fake_file_processor") - return asyncio.run_coroutine_threadsafe( - _fake_file_processor(task.app, task.name, task.request.id, files), - get_event_loop(get_fastapi_app(task.app)), - ).result() - - -class MyError(OsparcErrorMixin, Exception): - msg_template = "Something strange happened: {msg}" - - -def failure_task(task: Task): - assert task - msg = "BOOM!" - raise MyError(msg=msg) - - -async def dreamer_task(task: AbortableTask, task_id: TaskID) -> list[int]: - numbers = [] - for _ in range(30): - if AbortableAsyncResult(task_id, app=task.app).is_aborted(): - _logger.warning("Alarm clock") - return numbers - numbers.append(randint(1, 90)) # noqa: S311 - await asyncio.sleep(0.1) - return numbers - - -@pytest.fixture -def register_celery_tasks() -> Callable[[Celery], None]: - def _(celery_app: Celery) -> None: - register_task(celery_app, fake_file_processor) - register_task(celery_app, failure_task) - register_task(celery_app, dreamer_task) - - return _ - - -async def test_submitting_task_calling_async_function_results_with_success_state( - celery_client: CeleryTaskClient, -): - task_context = TaskContext(user_id=42) - - task_uuid = await celery_client.submit_task( - TaskMetadata( - name=fake_file_processor.__name__, - ), - task_context=task_context, - files=[f"file{n}" for n in range(5)], - ) - - for attempt in Retrying( - retry=retry_if_exception_type(AssertionError), - wait=wait_fixed(1), - stop=stop_after_delay(30), - ): - with attempt: - status = await celery_client.get_task_status(task_context, task_uuid) - assert status.task_state == TaskState.SUCCESS - - assert ( - await celery_client.get_task_status(task_context, task_uuid) - ).task_state == TaskState.SUCCESS - assert ( - await celery_client.get_task_result(task_context, task_uuid) - ) == "archive.zip" - - -async def test_submitting_task_with_failure_results_with_error( - celery_client: CeleryTaskClient, -): - task_context = TaskContext(user_id=42) - - task_uuid = await celery_client.submit_task( - TaskMetadata( - name=failure_task.__name__, - ), - task_context=task_context, - ) - - for attempt in Retrying( - retry=retry_if_exception_type(AssertionError), - wait=wait_fixed(1), - stop=stop_after_delay(30), - ): - - with attempt: - raw_result = await celery_client.get_task_result(task_context, task_uuid) - assert isinstance(raw_result, TransferrableCeleryError) - - raw_result = await celery_client.get_task_result(task_context, task_uuid) - assert f"{raw_result}" == "Something strange happened: BOOM!" - - -async def test_cancelling_a_running_task_aborts_and_deletes( - celery_client: CeleryTaskClient, -): - task_context = TaskContext(user_id=42) - - task_uuid = await celery_client.submit_task( - TaskMetadata( - name=dreamer_task.__name__, - ), - task_context=task_context, - ) - - await celery_client.cancel_task(task_context, task_uuid) - - for attempt in Retrying( - retry=retry_if_exception_type(AssertionError), - wait=wait_fixed(1), - stop=stop_after_delay(30), - ): - with attempt: - progress = await celery_client.get_task_status(task_context, task_uuid) - assert progress.task_state == TaskState.ABORTED - - assert ( - await celery_client.get_task_status(task_context, task_uuid) - ).task_state == TaskState.ABORTED - - assert task_uuid not in await celery_client.list_tasks(task_context) - - -async def test_listing_task_uuids_contains_submitted_task( - celery_client: CeleryTaskClient, -): - task_context = TaskContext(user_id=42) - - task_uuid = await celery_client.submit_task( - TaskMetadata( - name=dreamer_task.__name__, - ), - task_context=task_context, - ) - - for attempt in Retrying( - retry=retry_if_exception_type(AssertionError), - wait=wait_fixed(0.1), - stop=stop_after_delay(10), - ): - with attempt: - tasks = await celery_client.list_tasks(task_context) - assert len(tasks) == 1 - assert task_uuid == tasks[0].uuid - - tasks = await celery_client.list_tasks(task_context) - assert len(tasks) == 1 - assert task_uuid == tasks[0].uuid diff --git a/services/storage/tests/unit/test_rpc_handlers_paths.py b/services/storage/tests/unit/test_rpc_handlers_paths.py index 98cb75e8cbad..8ea54e4c614d 100644 --- a/services/storage/tests/unit/test_rpc_handlers_paths.py +++ b/services/storage/tests/unit/test_rpc_handlers_paths.py @@ -13,10 +13,10 @@ from typing import Any, TypeAlias import pytest +from celery.contrib.testing.worker import TestWorkController from faker import Faker from fastapi import FastAPI from models_library.api_schemas_rpc_async_jobs.async_jobs import ( - AsyncJobNameData, AsyncJobResult, ) from models_library.api_schemas_storage import STORAGE_RPC_NAMESPACE @@ -26,6 +26,7 @@ from models_library.users import UserID from pydantic import ByteSize, TypeAdapter from pytest_simcore.helpers.storage_utils import FileIDDict, ProjectWithFilesParams +from servicelib.celery.models import OwnerMetadata, Wildcard from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient from servicelib.rabbitmq.rpc_interfaces.async_jobs.async_jobs import ( wait_and_get_result, @@ -34,7 +35,6 @@ compute_path_size, delete_paths, ) -from simcore_service_storage.modules.celery.worker import CeleryTaskWorker from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager pytest_simcore_core_services_selection = ["postgres", "rabbit"] @@ -43,6 +43,11 @@ _IsFile: TypeAlias = bool +class TestOwnerMetadata(OwnerMetadata): + user_id: int | Wildcard + product_name: str | Wildcard + + def _filter_and_group_paths_one_level_deeper( paths: list[Path], prefix: Path ) -> list[tuple[Path, _IsFile]]: @@ -71,17 +76,21 @@ async def _assert_compute_path_size( ) -> ByteSize: async_job, async_job_name = await compute_path_size( storage_rpc_client, - product_name=product_name, - user_id=user_id, location_id=location_id, path=path, + owner_metadata=TestOwnerMetadata( + user_id=user_id, product_name=product_name, owner="pytest_client_name" + ), + user_id=user_id, ) async for job_composed_result in wait_and_get_result( storage_rpc_client, rpc_namespace=STORAGE_RPC_NAMESPACE, method_name=RPCMethodName(compute_path_size.__name__), job_id=async_job.job_id, - job_id_data=AsyncJobNameData(user_id=user_id, product_name=product_name), + owner_metadata=TestOwnerMetadata( + user_id=user_id, product_name=product_name, owner="pytest_client_name" + ), client_timeout=datetime.timedelta(seconds=120), ): if job_composed_result.done: @@ -105,17 +114,21 @@ async def _assert_delete_paths( ) -> None: async_job, async_job_name = await delete_paths( storage_rpc_client, - product_name=product_name, - user_id=user_id, location_id=location_id, paths=paths, + owner_metadata=TestOwnerMetadata( + user_id=user_id, product_name=product_name, owner="pytest_client_name" + ), + user_id=user_id, ) async for job_composed_result in wait_and_get_result( storage_rpc_client, rpc_namespace=STORAGE_RPC_NAMESPACE, method_name=RPCMethodName(compute_path_size.__name__), job_id=async_job.job_id, - job_id_data=AsyncJobNameData(user_id=user_id, product_name=product_name), + owner_metadata=TestOwnerMetadata( + user_id=user_id, product_name=product_name, owner="pytest_client_name" + ), client_timeout=datetime.timedelta(seconds=120), ): if job_composed_result.done: @@ -265,7 +278,7 @@ async def test_path_compute_size_inexistent_path( mock_celery_app: None, initialized_app: FastAPI, storage_rabbitmq_rpc_client: RabbitMQRPCClient, - with_storage_celery_worker: CeleryTaskWorker, + with_storage_celery_worker: TestWorkController, location_id: LocationID, user_id: UserID, faker: Faker, @@ -294,7 +307,7 @@ async def test_delete_paths_empty_set( user_id: UserID, location_id: LocationID, product_name: ProductName, - with_storage_celery_worker: CeleryTaskWorker, + with_storage_celery_worker: TestWorkController, ): await _assert_delete_paths( storage_rabbitmq_rpc_client, @@ -333,7 +346,7 @@ async def test_delete_paths( ], project_params: ProjectWithFilesParams, product_name: ProductName, - with_storage_celery_worker: CeleryTaskWorker, + with_storage_celery_worker: TestWorkController, ): assert ( len(project_params.allowed_file_sizes) == 1 diff --git a/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py b/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py index 751cdae4f2f8..1ca1c5d3c729 100644 --- a/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py +++ b/services/storage/tests/unit/test_rpc_handlers_simcore_s3.py @@ -13,22 +13,26 @@ from collections.abc import Awaitable, Callable from copy import deepcopy from pathlib import Path -from typing import Any +from typing import Any, Literal from unittest.mock import Mock import httpx import pytest import sqlalchemy as sa from celery.contrib.testing.worker import TestWorkController +from celery_library.task_manager import CeleryTaskManager from faker import Faker from fastapi import FastAPI from fastapi.encoders import jsonable_encoder -from models_library.api_schemas_rpc_async_jobs.async_jobs import AsyncJobResult +from models_library.api_schemas_rpc_async_jobs.async_jobs import ( + AsyncJobResult, +) from models_library.api_schemas_rpc_async_jobs.exceptions import JobError from models_library.api_schemas_storage import STORAGE_RPC_NAMESPACE from models_library.api_schemas_storage.storage_schemas import ( FileMetaDataGet, FoldersBody, + PresignedLink, ) from models_library.api_schemas_webserver.storage import PathToExport from models_library.basic_types import SHA256Str @@ -50,14 +54,15 @@ ) from pytest_simcore.helpers.storage_utils_project import clone_project_data from servicelib.aiohttp import status +from servicelib.celery.models import OwnerMetadata from servicelib.rabbitmq._client_rpc import RabbitMQRPCClient +from servicelib.rabbitmq._errors import RPCServerError from servicelib.rabbitmq.rpc_interfaces.async_jobs.async_jobs import wait_and_get_result from servicelib.rabbitmq.rpc_interfaces.storage.simcore_s3 import ( copy_folders_from_project, start_export_data, ) from simcore_postgres_database.storage_models import file_meta_data -from simcore_service_storage.modules.celery.worker import CeleryTaskWorker from simcore_service_storage.simcore_s3_dsm import SimcoreS3DataManager from sqlalchemy.ext.asyncio import AsyncEngine from yarl import URL @@ -66,6 +71,12 @@ pytest_simcore_ops_services_selection = ["adminer"] +class _TestOwnerMetadata(OwnerMetadata): + user_id: UserID + product_name: ProductName + owner: str = "PYTEST_CLIENT_NAME" + + async def _request_copy_folders( rpc_client: RabbitMQRPCClient, user_id: UserID, @@ -80,13 +91,17 @@ async def _request_copy_folders( logging.INFO, f"Copying folders from {source_project['uuid']} to {dst_project['uuid']}", ) as ctx: - async_job_get, async_job_name = await copy_folders_from_project( + async_job_get, owner_metadata = await copy_folders_from_project( rpc_client, - user_id=user_id, - product_name=product_name, body=FoldersBody( source=source_project, destination=dst_project, nodes_map=nodes_map ), + owner_metadata=_TestOwnerMetadata( + user_id=user_id, + product_name=product_name, + owner="PYTEST_CLIENT_NAME", + ), + user_id=user_id, ) async for async_job_result in wait_and_get_result( @@ -94,7 +109,7 @@ async def _request_copy_folders( rpc_namespace=STORAGE_RPC_NAMESPACE, method_name=copy_folders_from_project.__name__, job_id=async_job_get.job_id, - job_id_data=async_job_name, + owner_metadata=owner_metadata, client_timeout=client_timeout, ): ctx.logger.info("%s", f"<-- current state is {async_job_result=}") @@ -113,7 +128,7 @@ async def test_copy_folders_from_non_existing_project( product_name: ProductName, create_project: Callable[..., Awaitable[dict[str, Any]]], faker: Faker, - with_storage_celery_worker: CeleryTaskWorker, + with_storage_celery_worker: TestWorkController, ): src_project = await create_project() incorrect_src_project = deepcopy(src_project) @@ -154,7 +169,7 @@ async def test_copy_folders_from_empty_project( product_name: ProductName, create_project: Callable[[], Awaitable[dict[str, Any]]], sqlalchemy_async_engine: AsyncEngine, - with_storage_celery_worker: CeleryTaskWorker, + with_storage_celery_worker: TestWorkController, ): # we will copy from src to dst src_project = await create_project() @@ -514,18 +529,24 @@ async def _request_start_export_data( user_id: UserID, product_name: ProductName, paths_to_export: list[PathToExport], + export_as: Literal["path", "download_link"], *, client_timeout: datetime.timedelta = datetime.timedelta(seconds=60), -) -> dict[str, Any]: +) -> str: with log_context( logging.INFO, f"Data export form {paths_to_export=}", ) as ctx: - async_job_get, async_job_name = await start_export_data( + async_job_get, owner_metadata = await start_export_data( rpc_client, - user_id=user_id, - product_name=product_name, paths_to_export=paths_to_export, + export_as=export_as, + owner_metadata=_TestOwnerMetadata( + user_id=user_id, + product_name=product_name, + owner="PYTEST_CLIENT_NAME", + ), + user_id=user_id, ) async for async_job_result in wait_and_get_result( @@ -533,7 +554,7 @@ async def _request_start_export_data( rpc_namespace=STORAGE_RPC_NAMESPACE, method_name=start_export_data.__name__, job_id=async_job_get.job_id, - job_id_data=async_job_name, + owner_metadata=owner_metadata, client_timeout=client_timeout, ): ctx.logger.info("%s", f"<-- current state is {async_job_result=}") @@ -547,7 +568,7 @@ async def _request_start_export_data( @pytest.fixture def task_progress_spy(mocker: MockerFixture) -> Mock: - return mocker.spy(CeleryTaskWorker, "set_task_progress") + return mocker.spy(CeleryTaskManager, "set_task_progress") @pytest.mark.parametrize( @@ -572,10 +593,14 @@ def task_progress_spy(mocker: MockerFixture) -> Mock: ], ids=str, ) +@pytest.mark.parametrize( + "export_as", + ["path", "download_link"], +) async def test_start_export_data( initialized_app: FastAPI, short_dsm_cleaner_interval: int, - with_storage_celery_worker_controller: TestWorkController, + with_storage_celery_worker: TestWorkController, storage_rabbitmq_rpc_client: RabbitMQRPCClient, user_id: UserID, product_name: ProductName, @@ -589,6 +614,7 @@ async def test_start_export_data( ], project_params: ProjectWithFilesParams, task_progress_spy: Mock, + export_as: Literal["path", "download_link"], ): _, src_projects_list = await random_project_with_files(project_params) @@ -606,26 +632,41 @@ async def test_start_export_data( user_id, product_name, paths_to_export=list(nodes_in_project_to_export), + export_as=export_as, ) - assert re.fullmatch( - rf"^exports/{user_id}/[0-9a-fA-F]{{8}}-[0-9a-fA-F]{{4}}-[0-9a-fA-F]{{4}}-[0-9a-fA-F]{{4}}-[0-9a-fA-F]{{12}}\.zip$", - result, - ) + if export_as == "path": + assert re.fullmatch( + rf"^exports/{user_id}/[0-9a-fA-F]{{8}}-[0-9a-fA-F]{{4}}-[0-9a-fA-F]{{4}}-[0-9a-fA-F]{{4}}-[0-9a-fA-F]{{12}}\.zip$", + result, + ) + elif export_as == "download_link": + link = PresignedLink.model_validate(result).link + assert re.search( + rf"exports/{user_id}/[0-9a-fA-F]{{8}}-[0-9a-fA-F]{{4}}-[0-9a-fA-F]{{4}}-[0-9a-fA-F]{{4}}-[0-9a-fA-F]{{12}}\.zip", + f"{link}", + ) + else: + pytest.fail(f"Unexpected export_as value: {export_as}") progress_updates = [x[0][2].actual_value for x in task_progress_spy.call_args_list] assert progress_updates[0] == 0 assert progress_updates[-1] == 1 +@pytest.mark.parametrize( + "export_as", + ["path", "download_link"], +) async def test_start_export_data_access_error( initialized_app: FastAPI, short_dsm_cleaner_interval: int, - with_storage_celery_worker_controller: TestWorkController, + with_storage_celery_worker: TestWorkController, storage_rabbitmq_rpc_client: RabbitMQRPCClient, user_id: UserID, product_name: ProductName, faker: Faker, + export_as: Literal["path", "download_link"], ): path_to_export = TypeAdapter(PathToExport).validate_python( f"{faker.uuid4()}/{faker.uuid4()}/{faker.file_name()}" @@ -637,9 +678,35 @@ async def test_start_export_data_access_error( product_name, paths_to_export=[path_to_export], client_timeout=datetime.timedelta(seconds=60), + export_as=export_as, ) assert isinstance(exc.value, JobError) assert exc.value.exc_type == "AccessRightError" assert f" {user_id} " in f"{exc.value}" assert f" {path_to_export} " in f"{exc.value}" + + +async def test_start_export_invalid_export_format( + initialized_app: FastAPI, + short_dsm_cleaner_interval: int, + with_storage_celery_worker: TestWorkController, + storage_rabbitmq_rpc_client: RabbitMQRPCClient, + user_id: UserID, + product_name: ProductName, + faker: Faker, +): + path_to_export = TypeAdapter(PathToExport).validate_python( + f"{faker.uuid4()}/{faker.uuid4()}/{faker.file_name()}" + ) + with pytest.raises(RPCServerError) as exc: + await _request_start_export_data( + storage_rabbitmq_rpc_client, + user_id, + product_name, + paths_to_export=[path_to_export], + client_timeout=datetime.timedelta(seconds=60), + export_as="invalid_format", # type: ignore + ) + + assert exc.value.exc_type == "builtins.ValueError" diff --git a/services/web/Dockerfile b/services/web/Dockerfile index ff208a017fe7..fd6d8f93ad44 100644 --- a/services/web/Dockerfile +++ b/services/web/Dockerfile @@ -2,7 +2,7 @@ # Define arguments in the global scope ARG PYTHON_VERSION="3.11.9" -ARG UV_VERSION="0.6" +ARG UV_VERSION="0.7" FROM ghcr.io/astral-sh/uv:${UV_VERSION} AS uv_build FROM python:${PYTHON_VERSION}-slim-bookworm AS base-arm64 @@ -32,6 +32,7 @@ RUN --mount=type=cache,target=/var/cache/apt,sharing=private \ set -eux && \ apt-get update && \ apt-get install -y --no-install-recommends \ + fd-find \ curl \ gosu \ libmagic1 \ @@ -75,7 +76,7 @@ ENV PATH="${VIRTUAL_ENV}/bin:$PATH" # TODO: eliminate this variable! -ENV IS_CONTAINER_CONTEXT Yes +ENV IS_CONTAINER_CONTEXT=Yes EXPOSE 8080 @@ -92,7 +93,7 @@ EXPOSE 8080 FROM base AS build -ENV SC_BUILD_TARGET build +ENV SC_BUILD_TARGET=build RUN --mount=type=cache,target=/var/cache/apt,sharing=private \ set -eux \ @@ -111,10 +112,7 @@ RUN uv venv "${VIRTUAL_ENV}" -RUN --mount=type=cache,target=/root/.cache/uv \ - uv pip install --upgrade \ - wheel \ - setuptools + WORKDIR /build @@ -129,6 +127,9 @@ WORKDIR /build FROM build AS prod-only-deps ENV SC_BUILD_TARGET=prod-only-deps +# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode +ENV UV_COMPILE_BYTECODE=1 \ + UV_LINK_MODE=copy # 2nd party packages WORKDIR /build/services/web/server @@ -154,8 +155,6 @@ ENV SC_BUILD_TARGET=production \ SC_BOOT_MODE=production ENV PYTHONOPTIMIZE=TRUE -# https://docs.astral.sh/uv/guides/integration/docker/#compiling-bytecode -ENV UV_COMPILE_BYTECODE=1 WORKDIR /home/scu @@ -207,7 +206,7 @@ CMD ["services/web/server/docker/boot.sh"] # FROM build AS development -ENV SC_BUILD_TARGET development +ENV SC_BUILD_TARGET=development WORKDIR /devel diff --git a/services/web/server/VERSION b/services/web/server/VERSION index 4a46fb5b7a11..870aec4a7caa 100644 --- a/services/web/server/VERSION +++ b/services/web/server/VERSION @@ -1 +1 @@ -0.68.0 +0.79.0 diff --git a/services/web/server/docker/boot.sh b/services/web/server/docker/boot.sh index add4415d44d0..1fa26ebbda93 100755 --- a/services/web/server/docker/boot.sh +++ b/services/web/server/docker/boot.sh @@ -19,7 +19,7 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then command -v python | sed 's/^/ /' cd services/web/server - uv pip --quiet sync requirements/dev.txt + uv pip --quiet sync --link-mode=copy requirements/dev.txt cd - echo "$INFO" "PIP :" uv pip list @@ -32,7 +32,7 @@ fi if [ "${SC_BOOT_MODE}" = "debug" ]; then # NOTE: production does NOT pre-installs debugpy if command -v uv >/dev/null 2>&1; then - uv pip install debugpy + uv pip install --link-mode=copy debugpy else pip install debugpy fi diff --git a/services/web/server/docker/entrypoint.sh b/services/web/server/docker/entrypoint.sh index d7e4f13bb7a5..184ea4e00a1e 100755 --- a/services/web/server/docker/entrypoint.sh +++ b/services/web/server/docker/entrypoint.sh @@ -27,9 +27,7 @@ echo "$INFO" "Workdir : $(pwd)" echo "$INFO" "User : $(id scu)" echo "$INFO" "python : $(command -v python)" echo "$INFO" "pip : $(command -v pip)" - -USERNAME=scu -GROUPNAME=scu +echo "$INFO" "UV : $(command -v uv)" if [ "${SC_BUILD_TARGET}" = "development" ]; then echo "$INFO" "development mode detected..." @@ -64,10 +62,9 @@ if [ "${SC_BUILD_TARGET}" = "development" ]; then usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME" echo "$INFO" "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \; - # change user property of files already around + fdfind --owner ":$SC_USER_ID" --exclude proc --exec-batch chgrp --no-dereference "$CONT_GROUPNAME" . '/' echo "$INFO" "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" - find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \; + fdfind --owner "$SC_USER_ID:" --exclude proc --exec-batch chown --no-dereference "$SC_USER_NAME" . '/' fi fi diff --git a/services/web/server/requirements/_base.in b/services/web/server/requirements/_base.in index 977b148c6877..2af5fb69ea3d 100644 --- a/services/web/server/requirements/_base.in +++ b/services/web/server/requirements/_base.in @@ -45,6 +45,7 @@ opentelemetry-instrumentation-aiopg orjson # json packaging passlib +phonenumbers pint # units pycountry pydantic[email] # models diff --git a/services/web/server/requirements/_base.txt b/services/web/server/requirements/_base.txt index 2c2029b834d9..a8e2f003e025 100644 --- a/services/web/server/requirements/_base.txt +++ b/services/web/server/requirements/_base.txt @@ -27,7 +27,7 @@ aiofiles==0.8.0 # -r requirements/_base.in aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -173,7 +173,7 @@ cffi==1.17.1 # via cryptography charset-normalizer==2.0.12 # via requests -click==8.1.3 +click==8.2.1 # via typer cryptography==41.0.7 # via @@ -217,12 +217,6 @@ cryptography==41.0.7 # aiohttp-session deepdiff==8.1.1 # via -r requirements/_base.in -deprecated==1.2.14 - # via - # opentelemetry-api - # opentelemetry-exporter-otlp-proto-grpc - # opentelemetry-exporter-otlp-proto-http - # opentelemetry-semantic-conventions dnspython==2.2.1 # via email-validator email-validator==2.2.0 @@ -246,7 +240,7 @@ frozenlist==1.4.1 # -c requirements/./constraints.txt # aiohttp # aiosignal -googleapis-common-protos==1.65.0 +googleapis-common-protos==1.70.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http @@ -256,9 +250,11 @@ grpcio==1.66.0 # via opentelemetry-exporter-otlp-proto-grpc gunicorn==23.0.0 # via -r requirements/_base.in -h11==0.14.0 - # via httpcore -httpcore==1.0.7 +h11==0.16.0 + # via + # httpcore + # wsproto +httpcore==1.0.9 # via httpx httpx==0.28.1 # via @@ -298,6 +294,7 @@ httpx==0.28.1 # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../../requirements/constraints.txt + # -r requirements/../../../../packages/simcore-sdk/requirements/_base.in # -r requirements/_base.in idna==3.3 # via @@ -310,7 +307,7 @@ importlib-metadata==8.0.0 # via opentelemetry-api jinja-app-loader==1.0.2 # via -r requirements/_base.in -jinja2==3.1.2 +jinja2==3.1.6 # via # -c requirements/../../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -353,6 +350,13 @@ jinja2==3.1.2 # swagger-ui-py jsondiff==2.0.0 # via -r requirements/_base.in +jsonref==1.1.0 + # via + # -r requirements/../../../../packages/models-library/requirements/_base.in + # -r requirements/../../../../packages/notifications-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema==3.2.0 # via # -r requirements/../../../../packages/models-library/requirements/_base.in @@ -361,7 +365,7 @@ jsonschema==3.2.0 # -r requirements/../../../../packages/service-library/requirements/_aiohttp.in # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in -mako==1.2.2 +mako==1.3.10 # via # -c requirements/../../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -416,7 +420,7 @@ multidict==6.1.0 # yarl openpyxl==3.0.9 # via -r requirements/_base.in -opentelemetry-api==1.27.0 +opentelemetry-api==1.34.1 # via # -r requirements/../../../../packages/service-library/requirements/_base.in # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in @@ -434,19 +438,19 @@ opentelemetry-api==1.27.0 # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.27.0 +opentelemetry-exporter-otlp==1.34.1 # via # -r requirements/../../../../packages/service-library/requirements/_base.in # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.27.0 +opentelemetry-exporter-otlp-proto-common==1.34.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.27.0 +opentelemetry-exporter-otlp-proto-grpc==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.27.0 +opentelemetry-exporter-otlp-proto-http==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.48b0 +opentelemetry-instrumentation==0.55b1 # via # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-aiohttp-client @@ -457,50 +461,50 @@ opentelemetry-instrumentation==0.48b0 # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aio-pika==0.48b0 +opentelemetry-instrumentation-aio-pika==0.55b1 # via # -r requirements/../../../../packages/service-library/requirements/_base.in # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-aiohttp-client==0.48b0 +opentelemetry-instrumentation-aiohttp-client==0.55b1 # via -r requirements/../../../../packages/service-library/requirements/_aiohttp.in -opentelemetry-instrumentation-aiohttp-server==0.48b0 +opentelemetry-instrumentation-aiohttp-server==0.55b1 # via -r requirements/../../../../packages/service-library/requirements/_aiohttp.in -opentelemetry-instrumentation-aiopg==0.48b0 +opentelemetry-instrumentation-aiopg==0.55b1 # via # -r requirements/../../../../packages/service-library/requirements/_aiohttp.in # -r requirements/_base.in -opentelemetry-instrumentation-asyncpg==0.48b0 +opentelemetry-instrumentation-asyncpg==0.55b1 # via - # -r requirements/../../../../packages/notifications-library/requirements/../../../packages/postgres-database/requirements/_base.in - # -r requirements/../../../../packages/postgres-database/requirements/_base.in - # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in -opentelemetry-instrumentation-dbapi==0.48b0 + # -r requirements/../../../../packages/service-library/requirements/_base.in + # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-dbapi==0.55b1 # via opentelemetry-instrumentation-aiopg -opentelemetry-instrumentation-logging==0.48b0 +opentelemetry-instrumentation-logging==0.55b1 # via # -r requirements/../../../../packages/service-library/requirements/_base.in # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.48b0 +opentelemetry-instrumentation-redis==0.55b1 # via # -r requirements/../../../../packages/service-library/requirements/_base.in # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.48b0 +opentelemetry-instrumentation-requests==0.55b1 # via # -r requirements/../../../../packages/service-library/requirements/_base.in # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.27.0 +opentelemetry-proto==1.34.1 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.27.0 +opentelemetry-sdk==1.34.1 # via # -r requirements/../../../../packages/service-library/requirements/_base.in # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.48b0 +opentelemetry-semantic-conventions==0.55b1 # via + # opentelemetry-instrumentation # opentelemetry-instrumentation-aiohttp-client # opentelemetry-instrumentation-aiohttp-server # opentelemetry-instrumentation-asyncpg @@ -508,7 +512,7 @@ opentelemetry-semantic-conventions==0.48b0 # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.48b0 +opentelemetry-util-http==0.55b1 # via # opentelemetry-instrumentation-aiohttp-client # opentelemetry-instrumentation-aiohttp-server @@ -583,11 +587,14 @@ packaging==24.1 # -r requirements/../../../../packages/simcore-sdk/requirements/_base.in # -r requirements/_base.in # gunicorn + # opentelemetry-instrumentation # swagger-ui-py pamqp==3.2.1 # via aiormq passlib==1.7.4 # via -r requirements/_base.in +phonenumbers==9.0.9 + # via -r requirements/_base.in pillow==10.3.0 # via captcha pint==0.24.3 @@ -600,7 +607,7 @@ propcache==0.3.1 # via # aiohttp # yarl -protobuf==4.25.4 +protobuf==5.29.5 # via # googleapis-common-protos # opentelemetry-proto @@ -618,7 +625,7 @@ pycparser==2.21 # via cffi pycryptodome==3.21.0 # via stream-zip -pydantic==2.10.2 +pydantic==2.11.7 # via # -c requirements/../../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -656,7 +663,6 @@ pydantic==2.10.2 # -c requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../../requirements/constraints.txt - # -c requirements/./constraints.txt # -r requirements/../../../../packages/common-library/requirements/_base.in # -r requirements/../../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../../packages/models-library/requirements/_base.in @@ -695,9 +701,9 @@ pydantic==2.10.2 # fast-depends # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.1 +pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.9.0 +pydantic-extra-types==2.10.5 # via # -r requirements/../../../../packages/common-library/requirements/_base.in # -r requirements/../../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in @@ -722,7 +728,7 @@ pydantic-extra-types==2.9.0 # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in -pydantic-settings==2.5.2 +pydantic-settings==2.7.0 # via # -c requirements/../../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -786,11 +792,11 @@ python-dateutil==2.8.2 # faker python-dotenv==1.0.1 # via pydantic-settings -python-engineio==4.3.4 +python-engineio==4.12.2 # via python-socketio python-magic==0.4.25 # via -r requirements/_base.in -python-socketio==5.8.0 +python-socketio==5.13.0 # via -r requirements/_base.in pytz==2022.1 # via twilio @@ -876,11 +882,11 @@ redis==5.2.1 # -r requirements/../../../../packages/service-library/requirements/_base.in # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/_base.in -requests==2.32.2 +requests==2.32.4 # via # opentelemetry-exporter-otlp-proto-http # twilio -rich==13.4.2 +rich==14.1.0 # via # -r requirements/../../../../packages/notifications-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -890,12 +896,12 @@ rich==13.4.2 # typer setproctitle==1.3.3 # via gunicorn -setuptools==69.1.1 - # via - # jsonschema - # opentelemetry-instrumentation +setuptools==80.9.0 + # via jsonschema shellingham==1.5.4 # via typer +simple-websocket==1.1.0 + # via python-engineio six==1.16.0 # via # jsonschema @@ -969,7 +975,7 @@ tqdm==4.64.0 # -r requirements/../../../../packages/simcore-sdk/requirements/_base.in twilio==7.12.0 # via -r requirements/_base.in -typer==0.12.3 +typer==0.16.1 # via # -r requirements/../../../../packages/notifications-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in @@ -978,19 +984,27 @@ typer==0.12.3 # -r requirements/../../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in types-python-dateutil==2.9.0.20250516 # via arrow -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # aiodebug # aiodocker # faststream # flexcache # flexparser + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http # opentelemetry-sdk + # opentelemetry-semantic-conventions # pint # pydantic # pydantic-core + # pydantic-extra-types # typer -urllib3==2.2.3 + # typing-inspection +typing-inspection==0.4.1 + # via pydantic +urllib3==2.5.0 # via # -c requirements/../../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -1035,7 +1049,6 @@ werkzeug==2.1.2 # via -r requirements/../../../../packages/service-library/requirements/_aiohttp.in wrapt==1.16.0 # via - # deprecated # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-aiohttp-client @@ -1043,6 +1056,8 @@ wrapt==1.16.0 # opentelemetry-instrumentation-aiopg # opentelemetry-instrumentation-dbapi # opentelemetry-instrumentation-redis +wsproto==1.2.0 + # via simple-websocket yarl==1.20.0 # via # -c requirements/./constraints.txt diff --git a/services/web/server/requirements/_test.in b/services/web/server/requirements/_test.in index d8afabb9146a..a05cd9ce38c0 100644 --- a/services/web/server/requirements/_test.in +++ b/services/web/server/requirements/_test.in @@ -15,8 +15,9 @@ click coverage docker Faker -fastapi[standard] +fakeredis[lua] fastapi-pagination +fastapi[standard] flaky hypothesis jsonref diff --git a/services/web/server/requirements/_test.txt b/services/web/server/requirements/_test.txt index 3d0dbd753fcb..e992e5eb9b8d 100644 --- a/services/web/server/requirements/_test.txt +++ b/services/web/server/requirements/_test.txt @@ -2,7 +2,7 @@ aiohappyeyeballs==2.6.1 # via # -c requirements/_base.txt # aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../../requirements/constraints.txt # -c requirements/_base.txt @@ -48,14 +48,16 @@ certifi==2023.7.22 # httpcore # httpx # requests + # sentry-sdk charset-normalizer==2.0.12 # via # -c requirements/_base.txt # requests -click==8.1.3 +click==8.2.1 # via # -c requirements/_base.txt # -r requirements/_test.in + # rich-toolkit # typer # uvicorn coverage==7.6.12 @@ -72,17 +74,24 @@ email-validator==2.2.0 # via # -c requirements/_base.txt # fastapi + # pydantic execnet==2.1.1 # via pytest-xdist faker==19.6.1 # via # -c requirements/_base.txt # -r requirements/_test.in -fastapi==0.115.6 +fakeredis==2.30.3 # via -r requirements/_test.in -fastapi-cli==0.0.5 +fastapi==0.116.1 + # via + # -r requirements/_test.in + # fastapi-pagination +fastapi-cli==0.0.8 # via fastapi -fastapi-pagination==0.12.34 +fastapi-cloud-cli==0.1.5 + # via fastapi-cli +fastapi-pagination==0.14.0 # via -r requirements/_test.in flaky==3.8.1 # via -r requirements/_test.in @@ -95,12 +104,12 @@ greenlet==2.0.2 # via # -c requirements/_base.txt # sqlalchemy -h11==0.14.0 +h11==0.16.0 # via # -c requirements/_base.txt # httpcore # uvicorn -httpcore==1.0.7 +httpcore==1.0.9 # via # -c requirements/_base.txt # httpx @@ -111,6 +120,7 @@ httpx==0.28.1 # -c requirements/../../../../requirements/constraints.txt # -c requirements/_base.txt # fastapi + # fastapi-cloud-cli # respx hypothesis==6.91.0 # via -r requirements/_test.in @@ -126,20 +136,24 @@ idna==3.3 # yarl iniconfig==2.0.0 # via pytest -jinja2==3.1.2 +jinja2==3.1.6 # via # -c requirements/../../../../requirements/constraints.txt # -c requirements/_base.txt # fastapi jsonref==1.1.0 - # via -r requirements/_test.in + # via + # -c requirements/_base.txt + # -r requirements/_test.in jsonschema==3.2.0 # via # -c requirements/_base.txt # -r requirements/_test.in # openapi-schema-validator # openapi-spec-validator -mako==1.2.2 +lupa==2.5 + # via fakeredis +mako==1.3.10 # via # -c requirements/../../../../requirements/constraints.txt # -c requirements/_base.txt @@ -162,9 +176,9 @@ multidict==6.1.0 # -c requirements/_base.txt # aiohttp # yarl -mypy==1.15.0 +mypy==1.16.1 # via sqlalchemy -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via mypy openapi-schema-validator==0.2.3 # via openapi-spec-validator @@ -176,8 +190,12 @@ packaging==24.1 # aioresponses # pytest # pytest-sugar +pathspec==0.12.1 + # via mypy pluggy==1.5.0 - # via pytest + # via + # pytest + # pytest-cov pprintpp==0.4.0 # via pytest-icdiff propcache==0.3.1 @@ -187,26 +205,28 @@ propcache==0.3.1 # yarl py-cpuinfo==9.0.0 # via pytest-benchmark -pydantic==2.10.2 +pydantic==2.11.7 # via # -c requirements/../../../../requirements/constraints.txt # -c requirements/_base.txt # fastapi + # fastapi-cloud-cli # fastapi-pagination -pydantic-core==2.27.1 +pydantic-core==2.33.2 # via # -c requirements/_base.txt # pydantic pygments==2.15.1 # via # -c requirements/_base.txt + # pytest # rich pyrsistent==0.18.1 # via # -c requirements/_base.txt # jsonschema # referencing -pytest==8.3.5 +pytest==8.4.1 # via # -r requirements/_test.in # pytest-asyncio @@ -218,25 +238,25 @@ pytest==8.3.5 # pytest-mock # pytest-sugar # pytest-xdist -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via -r requirements/_test.in pytest-benchmark==5.1.0 # via -r requirements/_test.in -pytest-cov==6.0.0 +pytest-cov==6.2.1 # via -r requirements/_test.in -pytest-docker==3.2.0 +pytest-docker==3.2.3 # via -r requirements/_test.in pytest-icdiff==0.9 # via -r requirements/_test.in pytest-instafail==0.5.0 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in pytest-sugar==1.0.0 # via -r requirements/_test.in -pytest-xdist==3.6.1 +pytest-xdist==3.8.0 # via -r requirements/_test.in python-dateutil==2.8.2 # via @@ -260,21 +280,31 @@ redis==5.2.1 # -c requirements/../../../../requirements/constraints.txt # -c requirements/_base.txt # -r requirements/_test.in + # fakeredis referencing==0.8.11 # via # -c requirements/../../../../requirements/constraints.txt # types-jsonschema -requests==2.32.2 +requests==2.32.4 # via # -c requirements/_base.txt # docker respx==0.22.0 # via -r requirements/_test.in -rich==13.4.2 +rich==14.1.0 # via # -c requirements/_base.txt + # rich-toolkit # typer -setuptools==69.1.1 +rich-toolkit==0.15.0 + # via + # fastapi-cli + # fastapi-cloud-cli +rignore==0.6.4 + # via fastapi-cloud-cli +sentry-sdk==2.35.0 + # via fastapi-cloud-cli +setuptools==80.9.0 # via # -c requirements/_base.txt # jsonschema @@ -293,7 +323,9 @@ sniffio==1.3.1 # -c requirements/_base.txt # anyio sortedcontainers==2.4.0 - # via hypothesis + # via + # fakeredis + # hypothesis sqlalchemy==1.4.47 # via # -c requirements/../../../../requirements/constraints.txt @@ -302,7 +334,7 @@ sqlalchemy==1.4.47 # alembic sqlalchemy2-stubs==0.0.2a38 # via sqlalchemy -starlette==0.41.3 +starlette==0.47.2 # via # -c requirements/../../../../requirements/constraints.txt # fastapi @@ -312,10 +344,11 @@ tenacity==8.5.0 # -r requirements/_test.in termcolor==2.5.0 # via pytest-sugar -typer==0.12.3 +typer==0.16.1 # via # -c requirements/_base.txt # fastapi-cli + # fastapi-cloud-cli types-aiofiles==24.1.0.20241221 # via -r requirements/_test.in types-jsonschema==4.23.0.20241208 @@ -326,7 +359,7 @@ types-passlib==1.7.7.20241221 # via -r requirements/_test.in types-pyyaml==6.0.12.20241230 # via -r requirements/_test.in -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # asyncpg-stubs @@ -335,18 +368,27 @@ typing-extensions==4.12.2 # mypy # pydantic # pydantic-core + # rich-toolkit # sqlalchemy2-stubs + # starlette # typer -urllib3==2.2.3 + # typing-inspection +typing-inspection==0.4.1 + # via + # -c requirements/_base.txt + # pydantic +urllib3==2.5.0 # via # -c requirements/../../../../requirements/constraints.txt # -c requirements/_base.txt # docker # requests + # sentry-sdk uvicorn==0.34.2 # via # fastapi # fastapi-cli + # fastapi-cloud-cli uvloop==0.21.0 # via # -c requirements/_base.txt diff --git a/services/web/server/requirements/_tools.txt b/services/web/server/requirements/_tools.txt index 731ed3fc72fc..d81c8dbc0ec5 100644 --- a/services/web/server/requirements/_tools.txt +++ b/services/web/server/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.3 +click==8.2.1 # via # -c requirements/_base.txt # -c requirements/_test.txt @@ -30,11 +30,11 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via # -c requirements/_test.txt # -r requirements/../../../../requirements/devenv.txt -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via # -c requirements/_test.txt # black @@ -50,7 +50,10 @@ packaging==24.1 # black # build pathspec==0.12.1 - # via black + # via + # -c requirements/_test.txt + # black + # mypy pip==25.0.1 # via pip-tools pip-tools==7.4.1 @@ -76,7 +79,7 @@ pyyaml==6.0.1 # pre-commit ruff==0.9.9 # via -r requirements/../../../../requirements/devenv.txt -setuptools==69.1.1 +setuptools==80.9.0 # via # -c requirements/_base.txt # -c requirements/_test.txt @@ -85,7 +88,7 @@ tomlkit==0.13.2 # via pylint types-cachetools==5.5.0.20240820 # via -r requirements/_tools.in -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # -c requirements/_test.txt diff --git a/services/web/server/requirements/constraints.txt b/services/web/server/requirements/constraints.txt index 71c712593cb1..da024164311a 100644 --- a/services/web/server/requirements/constraints.txt +++ b/services/web/server/requirements/constraints.txt @@ -7,6 +7,3 @@ # SEE services/web/server/tests/unit/isolated/test_utils.py::test_yarl_new_url_generation for properly usage yarl>=1.8.2 frozenlist>=1.3.1 - -# See: https://github.com/pydantic/pydantic/issues/4011 -pydantic>=1.10 diff --git a/services/web/server/setup.cfg b/services/web/server/setup.cfg index 8400824ccb51..9c4a56f0e785 100644 --- a/services/web/server/setup.cfg +++ b/services/web/server/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.68.0 +current_version = 0.79.0 commit = True message = services/webserver api version: {current_version} → {new_version} tag = False diff --git a/services/web/server/src/simcore_service_webserver/_meta.py b/services/web/server/src/simcore_service_webserver/_meta.py index 8dde8d5ef954..33eab823fb27 100644 --- a/services/web/server/src/simcore_service_webserver/_meta.py +++ b/services/web/server/src/simcore_service_webserver/_meta.py @@ -1,6 +1,5 @@ -""" Current version of the simcore_service_webserver application and its API +"""Current version of the simcore_service_webserver application and its API""" -""" from typing import Final from models_library.basic_types import VersionStr @@ -19,12 +18,11 @@ # legacy consts -APP_NAME: str = __name__.split(".")[0] +APP_NAME: Final[str] = info.app_name +PROMETHEUS_FRIENDLY_APP_NAME: Final[str] = info.prometheus_friendly_app_name api_version_prefix: str = API_VTAG -# kids drawings :-) - WELCOME_MSG = r""" _ _ _ | | | | | | @@ -45,6 +43,7 @@ (_)) __((/ __| | (_ || (__ \___| \___| + """ WELCOME_DB_LISTENER_MSG = r""" @@ -54,5 +53,17 @@ | | | _ <___| |--| |___ \- -| __| | | __| _ < |_____\_____/ \_____\___<_____/|__|\_____\__|__\_____\__|\_/ - """ + +# SEE https://patorjk.com/software/taag/#p=display&f=BlurVision%20ASCII&t=Auth%0A +WELCOME_AUTH_APP_MSG = r""" + ░▒▓██████▓▒░░▒▓█▓▒░░▒▓█▓▒░▒▓████████▓▒░▒▓█▓▒░░▒▓█▓▒░ +░▒▓█▓▒░░▒▓█▓▒░▒▓█▓▒░░▒▓█▓▒░ ░▒▓█▓▒░ ░▒▓█▓▒░░▒▓█▓▒░ +░▒▓█▓▒░░▒▓█▓▒░▒▓█▓▒░░▒▓█▓▒░ ░▒▓█▓▒░ ░▒▓█▓▒░░▒▓█▓▒░ +░▒▓████████▓▒░▒▓█▓▒░░▒▓█▓▒░ ░▒▓█▓▒░ ░▒▓████████▓▒░ +░▒▓█▓▒░░▒▓█▓▒░▒▓█▓▒░░▒▓█▓▒░ ░▒▓█▓▒░ ░▒▓█▓▒░░▒▓█▓▒░ +░▒▓█▓▒░░▒▓█▓▒░▒▓█▓▒░░▒▓█▓▒░ ░▒▓█▓▒░ ░▒▓█▓▒░░▒▓█▓▒░ +░▒▓█▓▒░░▒▓█▓▒░░▒▓██████▓▒░ ░▒▓█▓▒░ ░▒▓█▓▒░░▒▓█▓▒░ {} +""".format( + f"v{__version__}" +) diff --git a/services/web/server/src/simcore_service_webserver/activity/_handlers.py b/services/web/server/src/simcore_service_webserver/activity/_handlers.py index 4e87c8f3bc0f..8bdba1300181 100644 --- a/services/web/server/src/simcore_service_webserver/activity/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/activity/_handlers.py @@ -6,8 +6,8 @@ from models_library.api_schemas_webserver.activity import ActivityStatusDict from pydantic import TypeAdapter from servicelib.aiohttp.client_session import get_client_session +from servicelib.aiohttp.request_keys import RQT_USERID_KEY from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON -from servicelib.request_keys import RQT_USERID_KEY from yarl import URL from .._meta import API_VTAG diff --git a/services/web/server/src/simcore_service_webserver/activity/plugin.py b/services/web/server/src/simcore_service_webserver/activity/plugin.py index 9313c81f2a4c..dfcac7c8015a 100644 --- a/services/web/server/src/simcore_service_webserver/activity/plugin.py +++ b/services/web/server/src/simcore_service_webserver/activity/plugin.py @@ -1,15 +1,15 @@ import logging from aiohttp import web -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from ..application_setup import ModuleCategory, app_setup_func from . import _handlers from .settings import get_plugin_settings _logger = logging.getLogger(__name__) -@app_module_setup( +@app_setup_func( "simcore_service_webserver.activity", category=ModuleCategory.ADDON, settings_name="WEBSERVER_ACTIVITY", diff --git a/services/web/server/src/simcore_service_webserver/announcements/plugin.py b/services/web/server/src/simcore_service_webserver/announcements/plugin.py index 88a39940cbfd..662b458ac06c 100644 --- a/services/web/server/src/simcore_service_webserver/announcements/plugin.py +++ b/services/web/server/src/simcore_service_webserver/announcements/plugin.py @@ -1,12 +1,12 @@ """ - Plugin to broadcast announcements to all front-end users +Plugin to broadcast announcements to all front-end users """ import logging from aiohttp import web -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from ..application_setup import ModuleCategory, app_setup_func from ..constants import APP_SETTINGS_KEY from ..products.plugin import setup_products from ..redis import setup_redis @@ -15,7 +15,7 @@ _logger = logging.getLogger(__name__) -@app_module_setup( +@app_setup_func( __name__, ModuleCategory.ADDON, settings_name="WEBSERVER_ANNOUNCEMENTS", diff --git a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml index 8addd2a9eac7..07364b9219ad 100644 --- a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml +++ b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml @@ -2,7 +2,7 @@ openapi: 3.1.0 info: title: simcore-service-webserver description: Main service with an interface (http-API & websockets) to the web front-end - version: 0.68.0 + version: 0.79.0 servers: - url: '' description: webserver @@ -238,13 +238,13 @@ paths: tags: - auth summary: Check Auth - description: checks if user is authenticated in the platform - operationId: check_authentication + description: checks whether user request is authenticated + operationId: check_auth responses: '204': description: Successful Response '401': - description: unauthorized reset due to invalid token code + description: Unauthorized content: application/json: schema: @@ -381,62 +381,70 @@ paths: schema: {} image/png: {} /v0/auth/api-keys: - get: + post: tags: - auth - summary: List Api Keys - description: lists API keys by this user - operationId: list_api_keys + summary: Create Api Key + description: creates API keys to access public API + operationId: create_api_key + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/ApiKeyCreateRequest' responses: - '200': + '201': description: Successful Response content: application/json: schema: - $ref: '#/components/schemas/Envelope_list_ApiKeyGet__' + $ref: '#/components/schemas/Envelope_ApiKeyCreateResponse_' '409': - description: Conflict content: application/json: schema: $ref: '#/components/schemas/EnvelopedError' + description: Conflict '404': - description: Not Found content: application/json: schema: $ref: '#/components/schemas/EnvelopedError' - post: + description: Not Found + get: tags: - auth - summary: Create Api Key - description: creates API keys to access public API - operationId: create_api_key - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/ApiKeyCreateRequest' - required: true + summary: List Api Keys + description: lists API keys by this user + operationId: list_api_keys + parameters: + - name: includeAutogenerated + in: query + required: false + schema: + type: boolean + default: false + title: Includeautogenerated responses: - '201': + '200': description: Successful Response content: application/json: schema: - $ref: '#/components/schemas/Envelope_ApiKeyCreateResponse_' + $ref: '#/components/schemas/Envelope_list_ApiKeyGet__' '409': - description: Conflict content: application/json: schema: $ref: '#/components/schemas/EnvelopedError' + description: Conflict '404': - description: Not Found content: application/json: schema: $ref: '#/components/schemas/EnvelopedError' + description: Not Found /v0/auth/api-keys/{api_key_id}: get: tags: @@ -502,6 +510,268 @@ paths: schema: $ref: '#/components/schemas/EnvelopedError' description: Not Found + /v0/conversations: + post: + tags: + - conversations + summary: Create Conversation + operationId: create_conversation + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/_ConversationsCreateBodyParams' + responses: + '201': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Envelope_ConversationRestGet_' + get: + tags: + - conversations + summary: List Conversations + operationId: list_conversations + parameters: + - name: limit + in: query + required: false + schema: + type: integer + maximum: 50 + minimum: 1 + default: 20 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + minimum: 0 + default: 0 + title: Offset + - name: type + in: query + required: true + schema: + $ref: '#/components/schemas/ConversationType' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Page_ConversationRestGet_' + /v0/conversations/{conversation_id}: + patch: + tags: + - conversations + summary: Update Conversation + operationId: update_conversation + parameters: + - name: conversation_id + in: path + required: true + schema: + type: string + format: uuid + title: Conversation Id + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/ConversationPatch' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Envelope_ConversationRestGet_' + delete: + tags: + - conversations + summary: Delete Conversation + operationId: delete_conversation + parameters: + - name: conversation_id + in: path + required: true + schema: + type: string + format: uuid + title: Conversation Id + responses: + '204': + description: Successful Response + get: + tags: + - conversations + summary: Get Conversation + operationId: get_conversation + parameters: + - name: conversation_id + in: path + required: true + schema: + type: string + format: uuid + title: Conversation Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Envelope_ConversationRestGet_' + /v0/conversations/{conversation_id}/messages: + post: + tags: + - conversations + summary: Create Conversation Message + operationId: create_conversation_message + parameters: + - name: conversation_id + in: path + required: true + schema: + type: string + format: uuid + title: Conversation Id + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/_ConversationMessageCreateBodyParams' + responses: + '201': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Envelope_ConversationMessageRestGet_' + get: + tags: + - conversations + summary: List Conversation Messages + operationId: list_conversation_messages + parameters: + - name: conversation_id + in: path + required: true + schema: + type: string + format: uuid + title: Conversation Id + - name: limit + in: query + required: false + schema: + type: integer + default: 20 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + default: 0 + title: Offset + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Page_ConversationMessageRestGet_' + /v0/conversations/{conversation_id}/messages/{message_id}: + put: + tags: + - conversations + summary: Update Conversation Message + operationId: update_conversation_message + parameters: + - name: conversation_id + in: path + required: true + schema: + type: string + format: uuid + title: Conversation Id + - name: message_id + in: path + required: true + schema: + type: string + format: uuid + title: Message Id + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/ConversationMessagePatch' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Envelope_ConversationMessageRestGet_' + delete: + tags: + - conversations + summary: Delete Conversation Message + operationId: delete_conversation_message + parameters: + - name: conversation_id + in: path + required: true + schema: + type: string + format: uuid + title: Conversation Id + - name: message_id + in: path + required: true + schema: + type: string + format: uuid + title: Message Id + responses: + '204': + description: Successful Response + get: + tags: + - conversations + summary: Get Conversation Message + operationId: get_conversation_message + parameters: + - name: conversation_id + in: path + required: true + schema: + type: string + format: uuid + title: Conversation Id + - name: message_id + in: path + required: true + schema: + type: string + format: uuid + title: Message Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Envelope_ConversationMessageRestGet_' /v0/groups: get: tags: @@ -1179,7 +1449,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Envelope_MyProfileGet_' + $ref: '#/components/schemas/Envelope_MyProfileRestGet_' patch: tags: - users @@ -1189,11 +1459,79 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/MyProfilePatch' + $ref: '#/components/schemas/MyProfileRestPatch' required: true responses: '204': description: Successful Response + /v0/me/phone:register: + post: + tags: + - users + summary: My Phone Register + description: Starts the phone registration process + operationId: my_phone_register + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/MyPhoneRegister' + required: true + responses: + '202': + description: Phone registration initiated + content: + application/json: + schema: {} + '401': + description: Authentication required + '403': + description: Insufficient permissions + '422': + description: Invalid phone number format + /v0/me/phone:resend: + post: + tags: + - users + summary: My Phone Resend + description: Resends the phone registration code + operationId: my_phone_resend + responses: + '202': + description: Phone code resent + content: + application/json: + schema: {} + '400': + description: No pending phone registration found + '401': + description: Authentication required + '403': + description: Insufficient permissions + /v0/me/phone:confirm: + post: + tags: + - users + summary: My Phone Confirm + description: Confirms the phone registration + operationId: my_phone_confirm + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/MyPhoneConfirm' + required: true + responses: + '204': + description: Phone registration confirmed + '400': + description: No pending registration or invalid code + '401': + description: Authentication required + '403': + description: Insufficient permissions + '422': + description: Invalid confirmation code format /v0/me/preferences/{preference_id}: patch: tags: @@ -1344,6 +1682,19 @@ paths: application/json: schema: $ref: '#/components/schemas/Envelope_list_MyPermissionGet__' + /v0/me/function-permissions: + get: + tags: + - users + summary: List User Functions Permissions + operationId: list_user_functions_permissions + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Envelope_MyFunctionPermissionsGet_' /v0/users:search: post: tags: @@ -1447,12 +1798,36 @@ paths: parameters: - name: email in: query - required: true + required: false schema: - type: string - minLength: 3 - maxLength: 200 + anyOf: + - type: string + minLength: 3 + maxLength: 200 + pattern: ^[^%]*$ + - type: 'null' title: Email + - name: primary_group_id + in: query + required: false + schema: + anyOf: + - type: integer + exclusiveMinimum: true + minimum: 0 + - type: 'null' + title: Primary Group Id + - name: user_name + in: query + required: false + schema: + anyOf: + - type: string + minLength: 3 + maxLength: 200 + pattern: ^[^%]*$ + - type: 'null' + title: User Name responses: '200': description: Successful Response @@ -1471,7 +1846,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/PreRegisteredUserGet' + $ref: '#/components/schemas/UserAccountRestPreRegister' required: true responses: '200': @@ -2702,31 +3077,119 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Page_ComputationRunRestGet_' - /v0/computations/{project_id}/iterations/latest/tasks: + $ref: '#/components/schemas/Page_ComputationRunRestGet_' + /v0/computations/{project_id}/iterations/latest/tasks: + get: + tags: + - computations + - projects + summary: List Computations Latest Iteration Tasks + operationId: list_computations_latest_iteration_tasks + parameters: + - name: project_id + in: path + required: true + schema: + type: string + format: uuid + title: Project Id + - name: order_by + in: query + required: false + schema: + type: string + contentMediaType: application/json + contentSchema: {} + default: '{"field":"start","direction":"asc"}' + title: Order By + - name: limit + in: query + required: false + schema: + type: integer + default: 20 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + default: 0 + title: Offset + - name: include_children + in: query + required: false + schema: + type: boolean + default: false + title: Include Children + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Page_ComputationTaskRestGet_' + /v0/computation-collection-runs: + get: + tags: + - computations + - projects + summary: List Computation Collection Runs + operationId: list_computation_collection_runs + parameters: + - name: limit + in: query + required: false + schema: + type: integer + default: 20 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + default: 0 + title: Offset + - name: filter_only_running + in: query + required: false + schema: + type: boolean + default: false + title: Filter Only Running + - name: filter_by_root_project_id + in: query + required: false + schema: + anyOf: + - type: string + format: uuid + - type: 'null' + title: Filter By Root Project Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Page_ComputationCollectionRunRestGet_' + /v0/computation-collection-runs/{collection_run_id}/tasks: get: tags: - computations - projects - summary: List Computations Latest Iteration Tasks - operationId: list_computations_latest_iteration_tasks + summary: List Computation Collection Run Tasks + operationId: list_computation_collection_run_tasks parameters: - - name: project_id + - name: collection_run_id in: path required: true schema: type: string format: uuid - title: Project Id - - name: order_by - in: query - required: false - schema: - type: string - contentMediaType: application/json - contentSchema: {} - default: '{"field":"start","direction":"asc"}' - title: Order By + title: Collection Run Id - name: limit in: query required: false @@ -2741,20 +3204,13 @@ paths: type: integer default: 0 title: Offset - - name: include_children - in: query - required: false - schema: - type: boolean - default: false - title: Include Children responses: '200': description: Successful Response content: application/json: schema: - $ref: '#/components/schemas/Page_ComputationTaskRestGet_' + $ref: '#/components/schemas/Page_ComputationCollectionRunTaskRestGet_' /v0/projects/{project_id}:xport: post: tags: @@ -3184,19 +3640,19 @@ paths: summary: Register Function operationId: register_function requestBody: + required: true content: application/json: schema: oneOf: - $ref: '#/components/schemas/ProjectFunctionToRegister' - $ref: '#/components/schemas/SolverFunctionToRegister' - title: ' Body' discriminator: propertyName: functionClass mapping: PROJECT: '#/components/schemas/ProjectFunctionToRegister' SOLVER: '#/components/schemas/SolverFunctionToRegister' - required: true + title: ' Body' responses: '200': description: Successful Response @@ -3204,6 +3660,67 @@ paths: application/json: schema: $ref: '#/components/schemas/Envelope_Annotated_Union_RegisteredProjectFunctionGet__RegisteredSolverFunctionGet___FieldInfo_annotation_NoneType__required_True__discriminator__function_class____' + get: + tags: + - functions + summary: List Functions + operationId: list_functions + parameters: + - name: include_extras + in: query + required: false + schema: + type: boolean + default: false + title: Include Extras + - name: search + in: query + required: false + schema: + anyOf: + - type: string + - type: 'null' + title: Search + - name: filters + in: query + required: false + schema: + anyOf: + - type: string + contentMediaType: application/json + contentSchema: {} + - type: 'null' + title: Filters + - name: order_by + in: query + required: false + schema: + type: string + contentMediaType: application/json + contentSchema: {} + default: '{"field":"modified","direction":"desc"}' + title: Order By + - name: limit + in: query + required: false + schema: + type: integer + default: 20 + title: Limit + - name: offset + in: query + required: false + schema: + type: integer + default: 0 + title: Offset + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Envelope_list_Annotated_Union_RegisteredProjectFunctionGet__RegisteredSolverFunctionGet___FieldInfo_annotation_NoneType__required_True__discriminator__function_class_____' /v0/functions/{function_id}: get: tags: @@ -3218,6 +3735,39 @@ paths: type: string format: uuid title: Function Id + - name: include_extras + in: query + required: false + schema: + type: boolean + default: false + title: Include Extras + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Envelope_Annotated_Union_RegisteredProjectFunctionGet__RegisteredSolverFunctionGet___FieldInfo_annotation_NoneType__required_True__discriminator__function_class____' + patch: + tags: + - functions + summary: Update Function + operationId: update_function + parameters: + - name: function_id + in: path + required: true + schema: + type: string + format: uuid + title: Function Id + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/RegisteredFunctionUpdate' responses: '200': description: Successful Response @@ -3238,6 +3788,93 @@ paths: type: string format: uuid title: Function Id + - name: force + in: query + required: false + schema: + type: boolean + default: false + title: Force + responses: + '204': + description: Successful Response + /v0/functions/{function_id}/groups: + get: + tags: + - functions + summary: Get Function Groups + operationId: get_function_groups + parameters: + - name: function_id + in: path + required: true + schema: + type: string + format: uuid + title: Function Id + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Envelope_dict_Annotated_int__Gt___FunctionGroupAccessRightsGet__' + /v0/functions/{function_id}/groups/{group_id}: + put: + tags: + - functions + summary: Create or update a Function Group + operationId: create_or_update_function_group + parameters: + - name: function_id + in: path + required: true + schema: + type: string + format: uuid + title: Function Id + - name: group_id + in: path + required: true + schema: + type: integer + exclusiveMinimum: true + title: Group Id + minimum: 0 + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/FunctionGroupAccessRightsUpdate' + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Envelope_FunctionGroupAccessRightsGet_' + delete: + tags: + - functions + summary: Delete a Function Group + operationId: delete_function_group + parameters: + - name: function_id + in: path + required: true + schema: + type: string + format: uuid + title: Function Id + - name: group_id + in: path + required: true + schema: + type: integer + exclusiveMinimum: true + title: Group Id + minimum: 0 responses: '204': description: Successful Response @@ -3245,7 +3882,7 @@ paths: get: tags: - long-running-tasks - summary: List Tasks + summary: Get Async Jobs description: Lists all long running tasks operationId: get_async_jobs responses: @@ -3283,7 +3920,7 @@ paths: get: tags: - long-running-tasks - summary: Get Task Status + summary: Get Async Job Status description: Retrieves the status of a task operationId: get_async_job_status parameters: @@ -3327,8 +3964,8 @@ paths: delete: tags: - long-running-tasks - summary: Cancel And Delete Task - description: Cancels and deletes a task + summary: Cancel Async Job + description: Cancels and removes a task operationId: cancel_async_job parameters: - name: task_id @@ -3368,7 +4005,7 @@ paths: get: tags: - long-running-tasks - summary: Get Task Result + summary: Get Async Job Result description: Retrieves the result of a task operationId: get_async_job_result parameters: @@ -3383,7 +4020,8 @@ paths: description: Successful Response content: application/json: - schema: {} + schema: + title: Response Get Async Job Result '404': content: application/json: @@ -3446,9 +4084,9 @@ paths: delete: tags: - long-running-tasks-legacy - summary: Cancel And Delete Task - description: Cancels and deletes a task - operationId: cancel_and_delete_task + summary: Remove Task + description: Cancels and removes a task + operationId: remove_task parameters: - name: task_id in: path @@ -3478,7 +4116,8 @@ paths: description: Successful Response content: application/json: - schema: {} + schema: + title: Response Get Task Result /v0/catalog/licensed-items: get: tags: @@ -3920,6 +4559,13 @@ paths: type: string pattern: ^simcore/services/((comp|dynamic|frontend))/([a-z0-9][a-z0-9_.-]*/)*([a-z0-9-_]+[a-z0-9])$ title: Viewer Key + - name: viewer_version + in: query + required: true + schema: + type: string + pattern: ^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$ + title: Viewer Version - name: file_size in: query required: true @@ -3946,12 +4592,6 @@ paths: - type: 'null' default: unknown title: File Name - requestBody: - required: true - content: - application/json: - schema: - $ref: '#/components/schemas/ServiceKeyVersion' responses: '302': description: Opens osparc and starts viewer for selected data @@ -4585,6 +5225,20 @@ paths: type: integer default: 0 title: Offset + - name: type + in: query + required: false + schema: + $ref: '#/components/schemas/ProjectTypeAPI' + default: all + - name: template_type + in: query + required: false + schema: + anyOf: + - $ref: '#/components/schemas/ProjectTemplateType' + - type: 'null' + title: Template Type - name: text in: query required: false @@ -6118,7 +6772,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Envelope_ProjectState_' + $ref: '#/components/schemas/Envelope_ProjectStateOutputSchema_' /v0/projects/{project_uuid}/tags/{tag_id}:add: post: tags: @@ -8370,6 +9024,7 @@ components: AccountRequestInfo: properties: form: + additionalProperties: true type: object title: Form captcha: @@ -8387,14 +9042,14 @@ components: application: Antenna_Design city: Washington company: EM Com - country: USA + country: Switzerland description: Description of something email: maxwel@email.com eula: true firstName: James hear: Search_Engine lastName: Maxwel - phone: +1 123456789 + phone: +41 44 245 96 96 postalCode: '98001' privacyPolicy: true AccountRequestStatus: @@ -8421,7 +9076,34 @@ components: - stats - limits title: Activity - AnnotationUI: + AnnotationUI-Input: + properties: + type: + type: string + enum: + - note + - rect + - text + - conversation + title: Type + color: + anyOf: + - type: string + format: color + - type: 'null' + title: Color + attributes: + additionalProperties: true + type: object + title: Attributes + description: svg attributes + additionalProperties: false + type: object + required: + - type + - attributes + title: AnnotationUI + AnnotationUI-Output: properties: type: type: string @@ -8429,12 +9111,13 @@ components: - note - rect - text + - conversation title: Type color: type: string - format: color title: Color attributes: + additionalProperties: true type: object title: Attributes description: svg attributes @@ -8442,7 +9125,6 @@ components: type: object required: - type - - color - attributes title: AnnotationUI Announcement: @@ -8532,6 +9214,9 @@ components: apiBaseUrl: anyOf: - type: string + maxLength: 2083 + minLength: 1 + format: uri - type: 'null' title: Apibaseurl apiKey: @@ -8574,13 +9259,15 @@ components: title: Version description: Application's version services: + additionalProperties: true type: object title: Services description: Other backend services connected from this service default: {} sessions: anyOf: - - type: object + - additionalProperties: true + type: object - type: 'null' title: Sessions description: Client sessions info. If single session per app, then is denoted @@ -8589,12 +9276,16 @@ components: url: anyOf: - type: string + minLength: 1 + format: uri - type: 'null' title: Url description: Link to current resource diagnostics_url: anyOf: - type: string + minLength: 1 + format: uri - type: 'null' title: Diagnostics Url description: Link to diagnostics report sub-resource. This MIGHT take some @@ -8711,11 +9402,17 @@ components: thumbnail: anyOf: - type: string + maxLength: 2083 + minLength: 1 + format: uri - type: 'null' title: Thumbnail icon: anyOf: - type: string + maxLength: 2083 + minLength: 1 + format: uri - type: 'null' title: Icon descriptionUi: @@ -8767,6 +9464,7 @@ components: - type: 'null' title: Classifiers quality: + additionalProperties: true type: object title: Quality release: @@ -8930,11 +9628,17 @@ components: thumbnail: anyOf: - type: string + maxLength: 2083 + minLength: 1 + format: uri - type: 'null' title: Thumbnail icon: anyOf: - type: string + maxLength: 2083 + minLength: 1 + format: uri - type: 'null' title: Icon descriptionUi: @@ -8988,6 +9692,7 @@ components: - type: 'null' title: Classifiers quality: + additionalProperties: true type: object title: Quality history: @@ -9130,6 +9835,7 @@ components: - type: 'null' title: Classifiers quality: + additionalProperties: true type: object title: Quality default: {} @@ -9214,6 +9920,112 @@ components: required: - version title: CompatibleService + ComputationCollectionRunRestGet: + properties: + collectionRunId: + type: string + format: uuid + title: Collectionrunid + projectIds: + items: + type: string + type: array + title: Projectids + state: + $ref: '#/components/schemas/RunningState' + info: + additionalProperties: true + type: object + title: Info + submittedAt: + type: string + format: date-time + title: Submittedat + startedAt: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Startedat + endedAt: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Endedat + name: + type: string + title: Name + type: object + required: + - collectionRunId + - projectIds + - state + - info + - submittedAt + - startedAt + - endedAt + - name + title: ComputationCollectionRunRestGet + ComputationCollectionRunTaskRestGet: + properties: + projectUuid: + type: string + format: uuid + title: Projectuuid + nodeId: + type: string + format: uuid + title: Nodeid + state: + $ref: '#/components/schemas/RunningState' + progress: + type: number + title: Progress + image: + additionalProperties: true + type: object + title: Image + startedAt: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Startedat + endedAt: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Endedat + logDownloadLink: + anyOf: + - type: string + minLength: 1 + format: uri + - type: 'null' + title: Logdownloadlink + osparcCredits: + anyOf: + - type: string + - type: 'null' + title: Osparccredits + name: + type: string + title: Name + type: object + required: + - projectUuid + - nodeId + - state + - progress + - image + - startedAt + - endedAt + - logDownloadLink + - osparcCredits + - name + title: ComputationCollectionRunTaskRestGet ComputationGet: properties: id: @@ -9268,11 +10080,15 @@ components: task url: type: string + minLength: 1 + format: uri title: Url description: the link where to get the status of the task stop_url: anyOf: - type: string + minLength: 1 + format: uri - type: 'null' title: Stop Url description: the link where to stop the task @@ -9299,6 +10115,7 @@ components: state: $ref: '#/components/schemas/RunningState' info: + additionalProperties: true type: object title: Info submittedAt: @@ -9321,6 +10138,7 @@ components: type: string title: Rootprojectname projectCustomMetadata: + additionalProperties: true type: object title: Projectcustommetadata type: object @@ -9384,6 +10202,7 @@ components: type: number title: Progress image: + additionalProperties: true type: object title: Image startedAt: @@ -9401,6 +10220,8 @@ components: logDownloadLink: anyOf: - type: string + minLength: 1 + format: uri - type: 'null' title: Logdownloadlink nodeName: @@ -9439,6 +10260,15 @@ components: - serviceKey - serviceVersion title: ConnectServiceToPricingPlanBodyParams + ConversationMessagePatch: + properties: + content: + anyOf: + - type: string + - type: 'null' + title: Content + type: object + title: ConversationMessagePatch ConversationMessageRestGet: properties: messageId: @@ -9484,6 +10314,21 @@ components: - MESSAGE - NOTIFICATION title: ConversationMessageType + ConversationPatch: + properties: + name: + anyOf: + - type: string + - type: 'null' + title: Name + extraContext: + anyOf: + - additionalProperties: true + type: object + - type: 'null' + title: Extracontext + type: object + title: ConversationPatch ConversationRestGet: properties: conversationId: @@ -9518,6 +10363,11 @@ components: type: string format: date-time title: Modified + extraContext: + additionalProperties: + type: string + type: object + title: Extracontext type: object required: - conversationId @@ -9528,12 +10378,14 @@ components: - type - created - modified + - extraContext title: ConversationRestGet ConversationType: type: string enum: - PROJECT_STATIC - PROJECT_ANNOTATION + - SUPPORT title: ConversationType CountryInfoDict: properties: @@ -9686,9 +10538,9 @@ components: total: anyOf: - type: integer + minimum: 0 - type: 'null' title: Total - description: Total items current_page: anyOf: - type: string @@ -9808,28 +10660,27 @@ components: test_name: type: string title: Test Name - error_type: - type: string - title: Error Type - error_message: - type: string - title: Error Message - traceback: + error_code: + anyOf: + - type: string + - type: 'null' + title: Error Code + user_message: type: string - title: Traceback + title: User Message + default: Email test failed type: object required: - test_name - - error_type - - error_message - - traceback title: EmailTestFailed EmailTestPassed: properties: fixtures: + additionalProperties: true type: object title: Fixtures info: + additionalProperties: true type: object title: Info type: object @@ -9869,6 +10720,8 @@ components: data: anyOf: - type: string + minLength: 1 + format: uri - type: 'null' title: Data error: @@ -10073,6 +10926,19 @@ components: title: Error type: object title: Envelope[FolderGet] + Envelope_FunctionGroupAccessRightsGet_: + properties: + data: + anyOf: + - $ref: '#/components/schemas/FunctionGroupAccessRightsGet' + - type: 'null' + error: + anyOf: + - {} + - type: 'null' + title: Error + type: object + title: Envelope[FunctionGroupAccessRightsGet] Envelope_GetProjectInactivityResponse_: properties: data: @@ -10203,6 +11069,19 @@ components: title: Error type: object title: Envelope[LoginNextPage] + Envelope_MyFunctionPermissionsGet_: + properties: + data: + anyOf: + - $ref: '#/components/schemas/MyFunctionPermissionsGet' + - type: 'null' + error: + anyOf: + - {} + - type: 'null' + title: Error + type: object + title: Envelope[MyFunctionPermissionsGet] Envelope_MyGroupsGet_: properties: data: @@ -10216,11 +11095,11 @@ components: title: Error type: object title: Envelope[MyGroupsGet] - Envelope_MyProfileGet_: + Envelope_MyProfileRestGet_: properties: data: anyOf: - - $ref: '#/components/schemas/MyProfileGet' + - $ref: '#/components/schemas/MyProfileRestGet' - type: 'null' error: anyOf: @@ -10228,7 +11107,7 @@ components: - type: 'null' title: Error type: object - title: Envelope[MyProfileGet] + title: Envelope[MyProfileRestGet] Envelope_MyTokenGet_: properties: data: @@ -10463,11 +11342,11 @@ components: title: Error type: object title: Envelope[ProjectShareAccepted] - Envelope_ProjectState_: + Envelope_ProjectStateOutputSchema_: properties: data: anyOf: - - $ref: '#/components/schemas/ProjectState' + - $ref: '#/components/schemas/ProjectStateOutputSchema' - type: 'null' error: anyOf: @@ -10475,7 +11354,7 @@ components: - type: 'null' title: Error type: object - title: Envelope[ProjectState] + title: Envelope[ProjectStateOutputSchema] Envelope_ProjectsCommentsAPI_: properties: data: @@ -10769,6 +11648,22 @@ components: title: Error type: object title: Envelope[_ProjectNodePreview] + Envelope_dict_Annotated_int__Gt___FunctionGroupAccessRightsGet__: + properties: + data: + anyOf: + - additionalProperties: + $ref: '#/components/schemas/FunctionGroupAccessRightsGet' + type: object + - type: 'null' + title: Data + error: + anyOf: + - {} + - type: 'null' + title: Error + type: object + title: Envelope[dict[Annotated[int, Gt], FunctionGroupAccessRightsGet]] Envelope_dict_Annotated_str__StringConstraints___ImageResources__: properties: data: @@ -10861,7 +11756,8 @@ components: properties: data: anyOf: - - type: object + - additionalProperties: true + type: object - type: 'null' title: Data error: @@ -10871,6 +11767,30 @@ components: title: Error type: object title: Envelope[dict[str, Any]] + ? Envelope_list_Annotated_Union_RegisteredProjectFunctionGet__RegisteredSolverFunctionGet___FieldInfo_annotation_NoneType__required_True__discriminator__function_class_____ + : properties: + data: + anyOf: + - items: + oneOf: + - $ref: '#/components/schemas/RegisteredProjectFunctionGet' + - $ref: '#/components/schemas/RegisteredSolverFunctionGet' + discriminator: + propertyName: functionClass + mapping: + PROJECT: '#/components/schemas/RegisteredProjectFunctionGet' + SOLVER: '#/components/schemas/RegisteredSolverFunctionGet' + type: array + - type: 'null' + title: Data + error: + anyOf: + - {} + - type: 'null' + title: Error + type: object + title: Envelope[list[Annotated[Union[RegisteredProjectFunctionGet, RegisteredSolverFunctionGet], + FieldInfo(annotation=NoneType, required=True, discriminator='function_class')]]] Envelope_list_Annotated_str__StringConstraints___: properties: data: @@ -11395,6 +12315,8 @@ components: status: type: integer title: Status + description: Redundant HTTP status code of the error.Must be the same as + in the HTTP response errors: items: $ref: '#/components/schemas/ErrorItemType' @@ -11682,6 +12604,8 @@ components: properties: state: type: string + minLength: 1 + format: uri title: State type: object required: @@ -11716,9 +12640,13 @@ components: properties: abort_upload: type: string + minLength: 1 + format: uri title: Abort Upload complete_upload: type: string + minLength: 1 + format: uri title: Complete Upload type: object required: @@ -11734,6 +12662,8 @@ components: urls: items: type: string + minLength: 1 + format: uri type: array title: Urls links: @@ -11855,6 +12785,40 @@ components: required: - name title: FolderReplaceBodyParams + FunctionGroupAccessRightsGet: + properties: + read: + type: boolean + title: Read + write: + type: boolean + title: Write + execute: + type: boolean + title: Execute + type: object + required: + - read + - write + - execute + title: FunctionGroupAccessRightsGet + FunctionGroupAccessRightsUpdate: + properties: + read: + type: boolean + title: Read + write: + type: boolean + title: Write + execute: + type: boolean + title: Execute + type: object + required: + - read + - write + - execute + title: FunctionGroupAccessRightsUpdate GetProjectInactivityResponse: properties: is_inactive: @@ -11949,22 +12913,23 @@ components: type: integer exclusiveMinimum: true title: Gid - description: the group ID + description: the group's unique ID minimum: 0 label: type: string title: Label - description: the group name + description: the group's display name description: type: string title: Description - description: the group description thumbnail: anyOf: - type: string + minLength: 1 + format: uri - type: 'null' title: Thumbnail - description: url to the group thumbnail + description: a link to the group's thumbnail accessRights: $ref: '#/components/schemas/GroupAccessRights' inclusionRules: @@ -11978,8 +12943,37 @@ components: - gid - label - description - - accessRights - title: GroupGet + - accessRights + title: GroupGet + GroupGetBase: + properties: + gid: + type: integer + exclusiveMinimum: true + title: Gid + description: the group's unique ID + minimum: 0 + label: + type: string + title: Label + description: the group's display name + description: + type: string + title: Description + thumbnail: + anyOf: + - type: string + minLength: 1 + format: uri + - type: 'null' + title: Thumbnail + description: a link to the group's thumbnail + type: object + required: + - gid + - label + - description + title: GroupGetBase GroupUpdate: properties: label: @@ -12211,6 +13205,8 @@ components: minimum: 0 - type: 'null' title: Trialaccountdays + description: Expiration time in days for trial accounts; `null` means not + a trial account extraCreditsInUsd: anyOf: - type: integer @@ -12219,6 +13215,7 @@ components: maximum: 500 - type: 'null' title: Extracreditsinusd + description: Welcome credits in USD; `null` means no welcome credits type: object required: - guest @@ -12255,6 +13252,9 @@ components: title: Created invitationLink: type: string + maxLength: 2083 + minLength: 1 + format: uri title: Invitationlink type: object required: @@ -12279,6 +13279,7 @@ components: JSONFunctionInputSchema: properties: schema_content: + additionalProperties: true type: object title: JSON Schema description: JSON Schema @@ -12293,6 +13294,7 @@ components: JSONFunctionOutputSchema: properties: schema_content: + additionalProperties: true type: object title: JSON Schema description: JSON Schema @@ -12422,11 +13424,17 @@ components: categoryIcon: anyOf: - type: string + maxLength: 2083 + minLength: 1 + format: uri - type: 'null' title: Categoryicon termsOfUseUrl: anyOf: - type: string + maxLength: 2083 + minLength: 1 + format: uri - type: 'null' title: Termsofuseurl createdAt: @@ -12627,6 +13635,19 @@ components: required: - color title: MarkerUI + MyFunctionPermissionsGet: + properties: + readFunctions: + type: boolean + title: Readfunctions + writeFunctions: + type: boolean + title: Writefunctions + type: object + required: + - readFunctions + - writeFunctions + title: MyFunctionPermissionsGet MyGroupsGet: properties: me: @@ -12644,6 +13665,12 @@ components: anyOf: - $ref: '#/components/schemas/GroupGet' - type: 'null' + support: + anyOf: + - $ref: '#/components/schemas/GroupGetBase' + - type: 'null' + description: Group ID of the app support team or None if no support is defined + for this product type: object required: - me @@ -12682,6 +13709,11 @@ components: description: Some foundation gid: '16' label: Blue Fundation + support: + description: The support team of the application + gid: '2' + label: Support Team + thumbnail: https://placekitten.com/15/15 MyPermissionGet: properties: name: @@ -12695,7 +13727,108 @@ components: - name - allowed title: MyPermissionGet - MyProfileGet: + MyPhoneConfirm: + properties: + code: + type: string + pattern: ^[A-Za-z0-9]+$ + title: Code + description: Alphanumeric confirmation code + type: object + required: + - code + title: MyPhoneConfirm + MyPhoneRegister: + properties: + phone: + type: string + format: phone + title: Phone + description: Phone number to register + type: object + required: + - phone + title: MyPhoneRegister + MyProfileAddressGet: + properties: + institution: + anyOf: + - type: string + - type: 'null' + title: Institution + address: + anyOf: + - type: string + - type: 'null' + title: Address + city: + anyOf: + - type: string + - type: 'null' + title: City + state: + anyOf: + - type: string + - type: 'null' + title: State + description: State, province, canton, ... + postalCode: + anyOf: + - type: string + - type: 'null' + title: Postalcode + country: + anyOf: + - type: string + - type: 'null' + title: Country + type: object + required: + - institution + - address + - city + - state + - postalCode + - country + title: MyProfileAddressGet + description: Details provided upon registration and used e.g. for invoicing + MyProfilePrivacyGet: + properties: + hideUsername: + type: boolean + title: Hideusername + hideFullname: + type: boolean + title: Hidefullname + hideEmail: + type: boolean + title: Hideemail + type: object + required: + - hideUsername + - hideFullname + - hideEmail + title: MyProfilePrivacyGet + MyProfilePrivacyPatch: + properties: + hideUsername: + anyOf: + - type: boolean + - type: 'null' + title: Hideusername + hideFullname: + anyOf: + - type: boolean + - type: 'null' + title: Hidefullname + hideEmail: + anyOf: + - type: boolean + - type: 'null' + title: Hideemail + type: object + title: MyProfilePrivacyPatch + MyProfileRestGet: properties: id: type: integer @@ -12724,6 +13857,11 @@ components: type: string format: email title: Login + phone: + anyOf: + - type: string + - type: 'null' + title: Phone role: type: string enum: @@ -12759,6 +13897,10 @@ components: $ref: '#/components/schemas/Preference' type: object title: Preferences + contact: + anyOf: + - $ref: '#/components/schemas/MyProfileAddressGet' + - type: 'null' type: object required: - id @@ -12767,8 +13909,8 @@ components: - role - privacy - preferences - title: MyProfileGet - MyProfilePatch: + title: MyProfileRestGet + MyProfileRestPatch: properties: first_name: anyOf: @@ -12794,46 +13936,7 @@ components: - $ref: '#/components/schemas/MyProfilePrivacyPatch' - type: 'null' type: object - title: MyProfilePatch - example: - first_name: Pedro - last_name: Crespo - MyProfilePrivacyGet: - properties: - hideUsername: - type: boolean - title: Hideusername - hideFullname: - type: boolean - title: Hidefullname - hideEmail: - type: boolean - title: Hideemail - type: object - required: - - hideUsername - - hideFullname - - hideEmail - title: MyProfilePrivacyGet - MyProfilePrivacyPatch: - properties: - hideUsername: - anyOf: - - type: boolean - - type: 'null' - title: Hideusername - hideFullname: - anyOf: - - type: boolean - - type: 'null' - title: Hidefullname - hideEmail: - anyOf: - - type: boolean - - type: 'null' - title: Hideemail - type: object - title: MyProfilePrivacyPatch + title: MyProfileRestPatch MyTokenCreate: properties: service: @@ -12908,8 +14011,7 @@ components: minimum: 0.0 - type: 'null' title: Progress - description: the node progress value (deprecated in DB, still used for API - only) + description: the node progress value deprecated: true thumbnail: anyOf: @@ -12921,6 +14023,7 @@ components: - type: 'null' title: Thumbnail description: url of the latest screenshot of the node + deprecated: true runHash: anyOf: - type: string @@ -12935,10 +14038,12 @@ components: title: Inputs description: values of input properties inputsRequired: - items: - type: string - pattern: ^[-_a-zA-Z0-9]+$ - type: array + anyOf: + - items: + type: string + pattern: ^[-_a-zA-Z0-9]+$ + type: array + - type: 'null' title: Inputsrequired description: Defines inputs that are required in order to run the service inputsUnits: @@ -12984,6 +14089,7 @@ components: - type: 'null' title: Outputnodes description: Used in group-nodes. Node IDs of those connected to the output + deprecated: true parent: anyOf: - type: string @@ -12991,6 +14097,7 @@ components: - type: 'null' title: Parent description: Parent's (group-nodes') node ID s. Used to group + deprecated: true position: anyOf: - $ref: '#/components/schemas/Position' @@ -12999,7 +14106,7 @@ components: deprecated: true state: anyOf: - - $ref: '#/components/schemas/NodeState' + - $ref: '#/components/schemas/NodeState-Input' - type: 'null' description: The node's state object bootOptions: @@ -13041,15 +14148,19 @@ components: minimum: 0.0 - type: 'null' title: Progress - description: the node progress value (deprecated in DB, still used for API - only) + description: the node progress value deprecated: true thumbnail: anyOf: - type: string + - type: string + maxLength: 2083 + minLength: 1 + format: uri - type: 'null' title: Thumbnail description: url of the latest screenshot of the node + deprecated: true runHash: anyOf: - type: string @@ -13064,10 +14175,12 @@ components: title: Inputs description: values of input properties inputsRequired: - items: - type: string - pattern: ^[-_a-zA-Z0-9]+$ - type: array + anyOf: + - items: + type: string + pattern: ^[-_a-zA-Z0-9]+$ + type: array + - type: 'null' title: Inputsrequired description: Defines inputs that are required in order to run the service inputsUnits: @@ -13113,6 +14226,7 @@ components: - type: 'null' title: Outputnodes description: Used in group-nodes. Node IDs of those connected to the output + deprecated: true parent: anyOf: - type: string @@ -13120,6 +14234,7 @@ components: - type: 'null' title: Parent description: Parent's (group-nodes') node ID s. Used to group + deprecated: true position: anyOf: - $ref: '#/components/schemas/Position' @@ -13128,7 +14243,7 @@ components: deprecated: true state: anyOf: - - $ref: '#/components/schemas/NodeState' + - $ref: '#/components/schemas/NodeState-Output' - type: 'null' description: The node's state object bootOptions: @@ -13220,7 +14335,6 @@ components: exclusiveMinimum: true title: Serviceport description: port to access the service within the network - default: 8080 maximum: 65535 minimum: 0 serviceBasepath: @@ -13257,6 +14371,7 @@ components: - serviceKey - serviceVersion - serviceHost + - servicePort - serviceState - userId title: NodeGet @@ -13275,9 +14390,6 @@ components: - serviceState - serviceUuid title: NodeGetIdle - example: - service_state: idle - service_uuid: 3fa85f64-5717-4562-b3fc-2c963f66afa6 NodeGetUnknown: properties: serviceState: @@ -13299,6 +14411,7 @@ components: NodeOutputs: properties: outputs: + additionalProperties: true type: object title: Outputs type: object @@ -13353,12 +14466,14 @@ components: title: Progress bootOptions: anyOf: - - type: object + - additionalProperties: true + type: object - type: 'null' title: Bootoptions outputs: anyOf: - - type: object + - additionalProperties: true + type: object - type: 'null' title: Outputs type: object @@ -13389,9 +14504,15 @@ components: properties: thumbnail_url: type: string + maxLength: 2083 + minLength: 1 + format: uri title: Thumbnail Url file_url: type: string + maxLength: 2083 + minLength: 1 + format: uri title: File Url mimetype: anyOf: @@ -13424,13 +14545,85 @@ components: myAccessRights: $ref: '#/components/schemas/ExecutableAccessRights' type: object - required: - - key - - release - - owner - - myAccessRights - title: NodeServiceGet - NodeState: + required: + - key + - release + - owner + - myAccessRights + title: NodeServiceGet + NodeShareState: + properties: + locked: + type: boolean + title: Locked + description: True if the node is locked, False otherwise + current_user_groupids: + anyOf: + - items: + type: integer + exclusiveMinimum: true + minimum: 0 + type: array + - type: 'null' + title: Current User Groupids + description: Group(s) that currently have access to the node (or locked + it) + status: + anyOf: + - $ref: '#/components/schemas/NodeShareStatus' + - type: 'null' + description: Reason why the node is locked, None if not locked + additionalProperties: false + type: object + required: + - locked + title: NodeShareState + NodeShareStatus: + type: string + enum: + - OPENING + - OPENED + - CLOSING + title: NodeShareStatus + NodeState-Input: + properties: + modified: + type: boolean + title: Modified + description: true if the node's outputs need to be re-computed + default: true + dependencies: + items: + type: string + format: uuid + type: array + uniqueItems: true + title: Dependencies + description: contains the node inputs dependencies if they need to be computed + first + currentStatus: + $ref: '#/components/schemas/RunningState' + description: the node's current state + default: NOT_STARTED + progress: + anyOf: + - type: number + maximum: 1.0 + minimum: 0.0 + - type: 'null' + title: Progress + description: current progress of the task if available (None if not started + or not a computational task) + default: 0 + lock_state: + anyOf: + - $ref: '#/components/schemas/NodeShareState' + - type: 'null' + description: the node's lock state + additionalProperties: false + type: object + title: NodeState + NodeState-Output: properties: modified: type: boolean @@ -13460,6 +14653,11 @@ components: description: current progress of the task if available (None if not started or not a computational task) default: 0 + lock_state: + anyOf: + - $ref: '#/components/schemas/NodeShareState' + - type: 'null' + description: the node's lock state additionalProperties: false type: object title: NodeState @@ -13469,6 +14667,7 @@ components: - NEW_ORGANIZATION - STUDY_SHARED - TEMPLATE_SHARED + - CONVERSATION_NOTIFICATION - ANNOTATION_NOTE - WALLET_SHARED title: NotificationCategory @@ -13490,35 +14689,6 @@ components: - service_key - running_time_in_hours title: OsparcCreditsAggregatedByServiceGet - Owner: - properties: - user_id: - type: integer - exclusiveMinimum: true - title: User Id - description: Owner's user id - minimum: 0 - first_name: - anyOf: - - type: string - maxLength: 255 - - type: 'null' - title: First Name - description: Owner's first name - last_name: - anyOf: - - type: string - maxLength: 255 - - type: 'null' - title: Last Name - description: Owner's last name - additionalProperties: false - type: object - required: - - user_id - - first_name - - last_name - title: Owner PageLinks: properties: self: @@ -13594,6 +14764,68 @@ components: - _links - data title: Page[CatalogLatestServiceGet] + Page_ComputationCollectionRunRestGet_: + properties: + items: + items: + $ref: '#/components/schemas/ComputationCollectionRunRestGet' + type: array + title: Items + total: + type: integer + minimum: 0 + title: Total + page: + type: integer + minimum: 1 + title: Page + size: + type: integer + minimum: 1 + title: Size + pages: + type: integer + minimum: 0 + title: Pages + type: object + required: + - items + - total + - page + - size + - pages + title: Page[ComputationCollectionRunRestGet] + Page_ComputationCollectionRunTaskRestGet_: + properties: + items: + items: + $ref: '#/components/schemas/ComputationCollectionRunTaskRestGet' + type: array + title: Items + total: + type: integer + minimum: 0 + title: Total + page: + type: integer + minimum: 1 + title: Page + size: + type: integer + minimum: 1 + title: Size + pages: + type: integer + minimum: 0 + title: Pages + type: object + required: + - items + - total + - page + - size + - pages + title: Page[ComputationCollectionRunTaskRestGet] Page_ComputationRunRestGet_: properties: items: @@ -13602,28 +14834,20 @@ components: type: array title: Items total: - anyOf: - - type: integer - minimum: 0 - - type: 'null' + type: integer + minimum: 0 title: Total page: - anyOf: - - type: integer - minimum: 1 - - type: 'null' + type: integer + minimum: 1 title: Page size: - anyOf: - - type: integer - minimum: 1 - - type: 'null' + type: integer + minimum: 1 title: Size pages: - anyOf: - - type: integer - minimum: 0 - - type: 'null' + type: integer + minimum: 0 title: Pages type: object required: @@ -13631,6 +14855,7 @@ components: - total - page - size + - pages title: Page[ComputationRunRestGet] Page_ComputationTaskRestGet_: properties: @@ -13640,28 +14865,20 @@ components: type: array title: Items total: - anyOf: - - type: integer - minimum: 0 - - type: 'null' + type: integer + minimum: 0 title: Total page: - anyOf: - - type: integer - minimum: 1 - - type: 'null' + type: integer + minimum: 1 title: Page size: - anyOf: - - type: integer - minimum: 1 - - type: 'null' + type: integer + minimum: 1 title: Size pages: - anyOf: - - type: integer - minimum: 0 - - type: 'null' + type: integer + minimum: 0 title: Pages type: object required: @@ -13669,6 +14886,7 @@ components: - total - page - size + - pages title: Page[ComputationTaskRestGet] Page_ConversationMessageRestGet_: properties: @@ -13966,6 +15184,9 @@ components: title: Paymentmethodid paymentMethodFormUrl: type: string + maxLength: 2083 + minLength: 1 + format: uri title: Paymentmethodformurl description: Link to external site that holds the payment submission form type: object @@ -14023,6 +15244,9 @@ components: invoiceUrl: anyOf: - type: string + maxLength: 2083 + minLength: 1 + format: uri - type: 'null' title: Invoiceurl type: object @@ -14082,7 +15306,7 @@ components: tasks) node_states: additionalProperties: - $ref: '#/components/schemas/NodeState' + $ref: '#/components/schemas/NodeState-Output' propertyNames: format: uuid type: object @@ -14130,61 +15354,6 @@ components: - x - y title: Position - PreRegisteredUserGet: - properties: - firstName: - type: string - title: Firstname - lastName: - type: string - title: Lastname - email: - type: string - format: email - title: Email - institution: - anyOf: - - type: string - - type: 'null' - title: Institution - description: company, university, ... - phone: - anyOf: - - type: string - - type: 'null' - title: Phone - address: - type: string - title: Address - city: - type: string - title: City - state: - anyOf: - - type: string - - type: 'null' - title: State - postalCode: - type: string - title: Postalcode - country: - type: string - title: Country - extras: - type: object - title: Extras - description: Keeps extra information provided in the request form. - type: object - required: - - firstName - - lastName - - email - - phone - - address - - city - - postalCode - - country - title: PreRegisteredUserGet Preference: properties: defaultValue: @@ -14202,6 +15371,8 @@ components: properties: link: type: string + minLength: 1 + format: uri title: Link type: object required: @@ -14417,13 +15588,15 @@ components: description: Short display name for SMS vendor: anyOf: - - type: object + - additionalProperties: true + type: object - type: 'null' title: Vendor description: vendor attributes issues: anyOf: - items: + additionalProperties: true type: object type: array - type: 'null' @@ -14432,6 +15605,7 @@ components: manuals: anyOf: - items: + additionalProperties: true type: object type: array - type: 'null' @@ -14440,12 +15614,14 @@ components: support: anyOf: - items: + additionalProperties: true type: object type: array - type: 'null' title: Support description: List of support resources loginSettings: + additionalProperties: true type: object title: Loginsettings maxOpenStudiesPerUser: @@ -14500,6 +15676,7 @@ components: type: string title: Productname ui: + additionalProperties: true type: object title: Ui description: Front-end owned ui product configuration @@ -14637,7 +15814,8 @@ components: application/schema+json: '#/components/schemas/JSONFunctionOutputSchema' defaultInputs: anyOf: - - type: object + - additionalProperties: true + type: object - type: 'null' title: Defaultinputs projectId: @@ -14666,6 +15844,9 @@ components: thumbnail: anyOf: - type: string + maxLength: 2083 + minLength: 1 + format: uri - type: string const: '' title: Thumbnail @@ -14700,7 +15881,7 @@ components: title: Lastchangedate state: anyOf: - - $ref: '#/components/schemas/ProjectState' + - $ref: '#/components/schemas/ProjectStateOutputSchema' - type: 'null' trashedAt: anyOf: @@ -14728,6 +15909,7 @@ components: title: Classifiers default: [] quality: + additionalProperties: true type: object title: Quality default: {} @@ -14739,7 +15921,8 @@ components: title: Ui dev: anyOf: - - type: object + - additionalProperties: true + type: object - type: 'null' title: Dev permalink: @@ -14774,7 +15957,6 @@ components: - creationDate - lastChangeDate - trashedAt - - trashedBy - tags - dev - workspaceId @@ -14864,6 +16046,9 @@ components: thumbnail: anyOf: - type: string + maxLength: 2083 + minLength: 1 + format: uri - type: string const: '' title: Thumbnail @@ -14898,7 +16083,7 @@ components: title: Lastchangedate state: anyOf: - - $ref: '#/components/schemas/ProjectState' + - $ref: '#/components/schemas/ProjectStateOutputSchema' - type: 'null' trashedAt: anyOf: @@ -14926,6 +16111,7 @@ components: title: Classifiers default: [] quality: + additionalProperties: true type: object title: Quality default: {} @@ -14937,7 +16123,8 @@ components: title: Ui dev: anyOf: - - type: object + - additionalProperties: true + type: object - type: 'null' title: Dev permalink: @@ -14972,32 +16159,11 @@ components: - creationDate - lastChangeDate - trashedAt - - trashedBy - tags - dev - workspaceId - folderId title: ProjectListItem - ProjectLocked: - properties: - value: - type: boolean - title: Value - description: True if the project is locked - status: - $ref: '#/components/schemas/ProjectStatus' - description: The status of the project - owner: - anyOf: - - $ref: '#/components/schemas/Owner' - - type: 'null' - description: If locked, the user that owns the lock - additionalProperties: false - type: object - required: - - value - - status - title: ProjectLocked ProjectMetadataGet: properties: projectUuid: @@ -15034,7 +16200,8 @@ components: title: Kind content_schema: anyOf: - - type: object + - additionalProperties: true + type: object - type: 'null' title: Content Schema description: jsonschema for the port's value. SEE https://json-schema.org/understanding-json-schema/ @@ -15133,7 +16300,8 @@ components: title: Classifiers dev: anyOf: - - type: object + - additionalProperties: true + type: object - type: 'null' title: Dev ui: @@ -15142,19 +16310,28 @@ components: - type: 'null' quality: anyOf: - - type: object + - additionalProperties: true + type: object - type: 'null' title: Quality templateType: anyOf: - $ref: '#/components/schemas/ProjectTemplateType' - type: 'null' + hidden: + anyOf: + - type: boolean + - type: 'null' + title: Hidden type: object title: ProjectPatch ProjectPermalink: properties: url: type: string + maxLength: 2083 + minLength: 1 + format: uri title: Url is_public: type: boolean @@ -15210,26 +16387,51 @@ components: title: Shareeemail confirmationLink: type: string + maxLength: 2083 + minLength: 1 + format: uri title: Confirmationlink type: object required: - - shareeEmail - - confirmationLink - title: ProjectShareAccepted - ProjectState: + - shareeEmail + - confirmationLink + title: ProjectShareAccepted + ProjectShareStateOutputSchema: + properties: + status: + $ref: '#/components/schemas/ProjectStatus' + description: The status of the project + locked: + type: boolean + title: Locked + description: True if the project is locked + currentUserGroupids: + items: + type: integer + exclusiveMinimum: true + minimum: 0 + type: array + title: Currentusergroupids + description: Current users in the project (if the project is locked, the + list contains only the lock owner) + type: object + required: + - status + - locked + - currentUserGroupids + title: ProjectShareStateOutputSchema + ProjectStateOutputSchema: properties: - locked: - $ref: '#/components/schemas/ProjectLocked' - description: The project lock state + shareState: + $ref: '#/components/schemas/ProjectShareStateOutputSchema' state: $ref: '#/components/schemas/ProjectRunningState' description: The project running state - additionalProperties: false type: object required: - - locked + - shareState - state - title: ProjectState + title: ProjectStateOutputSchema ProjectStatus: type: string enum: @@ -15383,6 +16585,7 @@ components: title: Email phone: type: string + format: phone title: Phone description: Phone number E.164, needed on the deployments with 2FA additionalProperties: false @@ -15422,6 +16625,20 @@ components: - name - message title: RegisterPhoneNextPage + RegisteredFunctionUpdate: + properties: + title: + anyOf: + - type: string + - type: 'null' + title: Title + description: + anyOf: + - type: string + - type: 'null' + title: Description + type: object + title: RegisteredFunctionUpdate RegisteredProjectFunctionGet: properties: functionClass: @@ -15455,29 +16672,49 @@ components: application/schema+json: '#/components/schemas/JSONFunctionOutputSchema' defaultInputs: anyOf: - - type: object + - additionalProperties: true + type: object - type: 'null' title: Defaultinputs - uid: + uuid: type: string format: uuid - title: Uid - createdAt: + title: Uuid + creationDate: type: string format: date-time - title: Createdat - projectId: + title: Creationdate + lastChangeDate: + type: string + format: date-time + title: Lastchangedate + templateId: type: string format: uuid - title: Projectid + title: Templateid + accessRights: + additionalProperties: + $ref: '#/components/schemas/FunctionGroupAccessRightsGet' + type: object + title: Accessrights + thumbnail: + anyOf: + - type: string + maxLength: 2083 + minLength: 1 + format: uri + - type: 'null' + title: Thumbnail type: object required: - inputSchema - outputSchema - defaultInputs - - uid - - createdAt - - projectId + - uuid + - creationDate + - lastChangeDate + - templateId + - accessRights title: RegisteredProjectFunctionGet RegisteredSolverFunctionGet: properties: @@ -15512,17 +16749,22 @@ components: application/schema+json: '#/components/schemas/JSONFunctionOutputSchema' defaultInputs: anyOf: - - type: object + - additionalProperties: true + type: object - type: 'null' title: Defaultinputs - uid: + uuid: type: string format: uuid - title: Uid - createdAt: + title: Uuid + creationDate: type: string format: date-time - title: Createdat + title: Creationdate + lastChangeDate: + type: string + format: date-time + title: Lastchangedate solverKey: type: string pattern: ^simcore/services/((comp|dynamic|frontend))/([a-z0-9][a-z0-9_.-]*/)*([a-z0-9-_]+[a-z0-9])$ @@ -15531,15 +16773,30 @@ components: type: string pattern: ^(0|[1-9]\d*)(\.(0|[1-9]\d*)){2}(-(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*)(\.(0|[1-9]\d*|\d*[-a-zA-Z][-\da-zA-Z]*))*)?(\+[-\da-zA-Z]+(\.[-\da-zA-Z-]+)*)?$ title: Solverversion + accessRights: + additionalProperties: + $ref: '#/components/schemas/FunctionGroupAccessRightsGet' + type: object + title: Accessrights + thumbnail: + anyOf: + - type: string + maxLength: 2083 + minLength: 1 + format: uri + - type: 'null' + title: Thumbnail type: object required: - inputSchema - outputSchema - defaultInputs - - uid - - createdAt + - uuid + - creationDate + - lastChangeDate - solverKey - solverVersion + - accessRights title: RegisteredSolverFunctionGet ReplaceWalletAutoRecharge: properties: @@ -15721,7 +16978,6 @@ components: exclusiveMinimum: true title: Service Port description: the service swarm internal port - default: 8080 maximum: 65535 minimum: 0 published_port: @@ -15751,6 +17007,11 @@ components: - type: 'null' title: Service Message description: additional information related to service state + is_collaborative: + type: boolean + title: Is Collaborative + description: True if service allows collaboration (multi-tenant access) + default: false type: object required: - service_key @@ -15759,26 +17020,39 @@ components: - project_id - service_uuid - service_host + - service_port - service_state title: RunningDynamicServiceDetails RunningState: type: string enum: - UNKNOWN - - PUBLISHED - NOT_STARTED + - PUBLISHED - PENDING + - WAITING_FOR_CLUSTER - WAITING_FOR_RESOURCES - STARTED - SUCCESS - FAILED - ABORTED - - WAITING_FOR_CLUSTER title: RunningState - description: 'State of execution of a project''s computational workflow - - - SEE StateType for task state' + description: "State of execution of a project's computational workflow\n\nSEE\ + \ StateType for task state\n\n# Computational backend states explained:\n\ + - UNKNOWN - The backend doesn't know about the task anymore, it has disappeared\ + \ from the system or it was never created (eg. when we are asking for the\ + \ task)\n- NOT_STARTED - Default state when the task is created\n- PUBLISHED\ + \ - The task has been submitted to the computational backend (click on \"\ + Run\" button in the UI)\n- PENDING - Task has been transferred to the Dask\ + \ scheduler and is waiting for a worker to pick it up (director-v2 --> Dask\ + \ scheduler)\n - But! it is also transition state (ex. PENDING -> WAITING_FOR_CLUSTER\ + \ -> PENDING -> WAITING_FOR_RESOURCES -> PENDING -> STARTED)\n- WAITING_FOR_CLUSTER\ + \ - No cluster (Dask scheduler) is available to run the task; waiting for\ + \ one to become available\n- WAITING_FOR_RESOURCES - No worker (Dask worker)\ + \ is available to run the task; waiting for one to become available\n- STARTED\ + \ - A worker has picked up the task and is executing it\n- SUCCESS - Task\ + \ finished successfully\n- FAILED - Task finished with an error\n- ABORTED\ + \ - Task was aborted before completion" SelectBox: properties: structure: @@ -15815,6 +17089,9 @@ components: description: Long description of the service thumbnail: type: string + maxLength: 2083 + minLength: 1 + format: uri title: Thumbnail description: Url to service thumbnail file_extensions: @@ -15825,6 +17102,9 @@ components: description: File extensions that this service can process view_url: type: string + maxLength: 2083 + minLength: 1 + format: uri title: View Url description: Redirection to open a service in osparc (see /view) type: object @@ -15895,7 +17175,8 @@ components: is allowed contentSchema: anyOf: - - type: object + - additionalProperties: true + type: object - type: 'null' title: Contentschema description: jsonschema of this input/output. Required when type='ref_contentSchema' @@ -16012,7 +17293,8 @@ components: is allowed contentSchema: anyOf: - - type: object + - additionalProperties: true + type: object - type: 'null' title: Contentschema description: jsonschema of this input/output. Required when type='ref_contentSchema' @@ -16365,7 +17647,8 @@ components: application/schema+json: '#/components/schemas/JSONFunctionOutputSchema' defaultInputs: anyOf: - - type: object + - additionalProperties: true + type: object - type: 'null' title: Defaultinputs solverKey: @@ -16523,6 +17806,17 @@ components: const: hypertool - type: 'null' title: Templatetype + mode: + anyOf: + - type: string + enum: + - workbench + - app + - guided + - standalone + - pipeline + - type: 'null' + title: Mode additionalProperties: true type: object title: StudyUI @@ -16531,6 +17825,9 @@ components: icon: anyOf: - type: string + maxLength: 2083 + minLength: 1 + format: uri - type: 'null' title: Icon workbench: @@ -16560,6 +17857,17 @@ components: const: hypertool - type: 'null' title: Templatetype + mode: + anyOf: + - type: string + enum: + - workbench + - app + - guided + - standalone + - pipeline + - type: 'null' + title: Mode additionalProperties: true type: object title: StudyUI @@ -16716,7 +18024,6 @@ components: type: object required: - task_id - - task_name - status_href - result_href - abort_href @@ -16821,6 +18128,7 @@ components: title: Template Name default: registration_email.jinja2 template_context: + additionalProperties: true type: object title: Template Context default: {} @@ -17009,7 +18317,7 @@ components: title: Email invitation: anyOf: - - $ref: '#/components/schemas/InvitationGenerate' + - $ref: '#/components/schemas/_InvitationDetails' - type: 'null' type: object required: @@ -17068,6 +18376,7 @@ components: - type: 'null' title: Country extras: + additionalProperties: true type: object title: Extras description: Keeps extra information provided in the request form @@ -17076,9 +18385,17 @@ components: - type: integer - type: 'null' title: Preregistrationid + preRegistrationCreated: + anyOf: + - type: string + format: date-time + - type: 'null' + title: Preregistrationcreated invitedBy: anyOf: - type: string + maxLength: 100 + minLength: 1 - type: 'null' title: Invitedby accountRequestStatus: @@ -17087,9 +18404,9 @@ components: - type: 'null' accountRequestReviewedBy: anyOf: - - type: integer - exclusiveMinimum: true - minimum: 0 + - type: string + maxLength: 100 + minLength: 1 - type: 'null' title: Accountrequestreviewedby accountRequestReviewedAt: @@ -17114,6 +18431,30 @@ components: title: Products description: List of products this users is included or None if fields is unset + userId: + anyOf: + - type: integer + exclusiveMinimum: true + minimum: 0 + - type: 'null' + title: Userid + description: Unique identifier of the user if an account was created + userName: + anyOf: + - type: string + maxLength: 100 + minLength: 1 + - type: 'null' + title: Username + description: Username of the user if an account was created + groupId: + anyOf: + - type: integer + exclusiveMinimum: true + minimum: 1 + - type: 'null' + title: Groupid + description: Primary group ID of the user if an account was created type: object required: - firstName @@ -17127,9 +18468,9 @@ components: - postalCode - country - preRegistrationId + - preRegistrationCreated - accountRequestStatus - registered - - status title: UserAccountGet UserAccountReject: properties: @@ -17141,6 +18482,63 @@ components: required: - email title: UserAccountReject + UserAccountRestPreRegister: + properties: + firstName: + type: string + title: Firstname + lastName: + type: string + title: Lastname + email: + type: string + format: email + title: Email + institution: + anyOf: + - type: string + - type: 'null' + title: Institution + description: company, university, ... + phone: + anyOf: + - type: string + format: phone + - type: 'null' + title: Phone + address: + type: string + title: Address + city: + type: string + title: City + state: + anyOf: + - type: string + - type: 'null' + title: State + postalCode: + type: string + title: Postalcode + country: + type: string + title: Country + extras: + additionalProperties: true + type: object + title: Extras + description: Keeps extra information provided in the request form. + type: object + required: + - firstName + - lastName + - email + - phone + - address + - city + - postalCode + - country + title: UserAccountRestPreRegister UserGet: properties: userId: @@ -17342,6 +18740,9 @@ components: description: Identifier for the file type view_url: type: string + maxLength: 2083 + minLength: 1 + format: uri title: View Url description: Base url to execute viewer. Needs appending file_size,[file_name] and download_link as query parameters @@ -17497,6 +18898,9 @@ components: paymentFormUrl: anyOf: - type: string + maxLength: 2083 + minLength: 1 + format: uri - type: 'null' title: Paymentformurl description: Link to external site that holds the payment submission form.None @@ -17702,6 +19106,59 @@ components: - write - delete title: WorkspacesGroupsBodyParams + _ConversationMessageCreateBodyParams: + properties: + content: + type: string + title: Content + type: + $ref: '#/components/schemas/ConversationMessageType' + additionalProperties: false + type: object + required: + - content + - type + title: _ConversationMessageCreateBodyParams + _ConversationsCreateBodyParams: + properties: + name: + type: string + title: Name + type: + $ref: '#/components/schemas/ConversationType' + extraContext: + anyOf: + - additionalProperties: true + type: object + - type: 'null' + title: Extracontext + type: object + required: + - name + - type + title: _ConversationsCreateBodyParams + _InvitationDetails: + properties: + trialAccountDays: + anyOf: + - type: integer + exclusiveMinimum: true + minimum: 0 + - type: 'null' + title: Trialaccountdays + description: Expiration time in days for trial accounts; `null` means not + a trial account + extraCreditsInUsd: + anyOf: + - type: integer + exclusiveMaximum: true + minimum: 0 + maximum: 500 + - type: 'null' + title: Extracreditsinusd + description: Welcome credits in USD; `null` means no welcome credits + type: object + title: _InvitationDetails _ItisVipResourceRestData: properties: source: diff --git a/services/web/server/src/simcore_service_webserver/api_keys/_controller/rest.py b/services/web/server/src/simcore_service_webserver/api_keys/_controller/rest.py index 69483f4ecd0b..05a3b4cff143 100644 --- a/services/web/server/src/simcore_service_webserver/api_keys/_controller/rest.py +++ b/services/web/server/src/simcore_service_webserver/api_keys/_controller/rest.py @@ -7,6 +7,7 @@ ApiKeyCreateRequest, ApiKeyCreateResponse, ApiKeyGet, + ApiKeyListQueryParams, ) from models_library.basic_types import IDStr from models_library.rest_base import StrictRequestParameters @@ -15,11 +16,12 @@ from servicelib.aiohttp.requests_validation import ( parse_request_body_as, parse_request_path_parameters_as, + parse_request_query_parameters_as, ) from ..._meta import API_VTAG from ...login.decorators import login_required -from ...models import RequestContext +from ...models import AuthenticatedRequestContext from ...security.decorators import permission_required from ...utils_aiohttp import envelope_json_response, get_api_base_url from .. import _service @@ -41,7 +43,7 @@ class ApiKeysPathParams(StrictRequestParameters): @permission_required("user.apikey.*") @handle_plugin_requests_exceptions async def create_api_key(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) new_api_key = await parse_request_body_as(ApiKeyCreateRequest, request) created_api_key: ApiKey = await _service.create_api_key( @@ -67,11 +69,17 @@ async def create_api_key(request: web.Request): @permission_required("user.apikey.*") @handle_plugin_requests_exceptions async def list_api_keys(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) + + query_params: ApiKeyListQueryParams = parse_request_query_parameters_as( + ApiKeyListQueryParams, request + ) + api_keys = await _service.list_api_keys( request.app, user_id=req_ctx.user_id, product_name=req_ctx.product_name, + include_autogenerated=query_params.include_autogenerated, ) return envelope_json_response( TypeAdapter(list[ApiKeyGet]).validate_python(api_keys) @@ -83,7 +91,7 @@ async def list_api_keys(request: web.Request): @permission_required("user.apikey.*") @handle_plugin_requests_exceptions async def get_api_key(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ApiKeysPathParams, request) api_key: ApiKey = await _service.get_api_key( request.app, @@ -99,7 +107,7 @@ async def get_api_key(request: web.Request): @permission_required("user.apikey.*") @handle_plugin_requests_exceptions async def delete_api_key(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ApiKeysPathParams, request) await _service.delete_api_key( diff --git a/services/web/server/src/simcore_service_webserver/api_keys/_controller/rest_exceptions.py b/services/web/server/src/simcore_service_webserver/api_keys/_controller/rest_exceptions.py index f5888adc99ba..e65fe8bfdc30 100644 --- a/services/web/server/src/simcore_service_webserver/api_keys/_controller/rest_exceptions.py +++ b/services/web/server/src/simcore_service_webserver/api_keys/_controller/rest_exceptions.py @@ -1,3 +1,4 @@ +from common_library.user_messages import user_message from servicelib.aiohttp import status from ...exception_handling import ( @@ -11,11 +12,11 @@ _TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = { ApiKeyDuplicatedDisplayNameError: HttpErrorInfo( status.HTTP_409_CONFLICT, - "API key display name duplicated", + user_message("An API key with this display name already exists", _version=1), ), ApiKeyNotFoundError: HttpErrorInfo( status.HTTP_404_NOT_FOUND, - "API key was not found", + user_message("The requested API key could not be found", _version=1), ), } diff --git a/services/web/server/src/simcore_service_webserver/api_keys/_controller/rpc.py b/services/web/server/src/simcore_service_webserver/api_keys/_controller/rpc.py index 26a3e5afc71e..d85d6e68d1b8 100644 --- a/services/web/server/src/simcore_service_webserver/api_keys/_controller/rpc.py +++ b/services/web/server/src/simcore_service_webserver/api_keys/_controller/rpc.py @@ -1,13 +1,12 @@ from datetime import timedelta from aiohttp import web -from models_library.api_schemas_webserver import WEBSERVER_RPC_NAMESPACE from models_library.products import ProductName from models_library.rpc.webserver.auth.api_keys import ApiKeyGet from models_library.users import UserID from servicelib.rabbitmq import RPCRouter -from ...rabbitmq import get_rabbitmq_rpc_server +from ...rabbitmq import create_register_rpc_routes_on_startup from .. import _service from ..errors import ApiKeyNotFoundError from ..models import ApiKey @@ -68,6 +67,4 @@ async def delete_api_key_by_key( ) -async def register_rpc_routes_on_startup(app: web.Application): - rpc_server = get_rabbitmq_rpc_server(app) - await rpc_server.register_router(router, WEBSERVER_RPC_NAMESPACE, app) +register_rpc_routes_on_startup = create_register_rpc_routes_on_startup(router) diff --git a/services/web/server/src/simcore_service_webserver/api_keys/_repository.py b/services/web/server/src/simcore_service_webserver/api_keys/_repository.py index 72e58d7bcf18..e787ca04f878 100644 --- a/services/web/server/src/simcore_service_webserver/api_keys/_repository.py +++ b/services/web/server/src/simcore_service_webserver/api_keys/_repository.py @@ -3,6 +3,7 @@ import sqlalchemy as sa from aiohttp import web +from models_library.auth import API_KEY_AUTOGENERATED_KEY_PREFIX from models_library.products import ProductName from models_library.users import UserID from simcore_postgres_database.models.api_keys import api_keys @@ -46,7 +47,7 @@ async def create_api_key( expires_at=(sa.func.now() + expiration) if expiration else None, ) .on_conflict_do_update( - index_elements=["user_id", "display_name"], + index_elements=["user_id", "display_name", "product_name"], set_={ "api_key": api_key, "api_secret": _hash_secret(api_secret), @@ -160,12 +161,18 @@ async def list_api_keys( *, user_id: UserID, product_name: ProductName, + include_autogenerated: bool = False, ) -> list[ApiKey]: async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: stmt = sa.select(api_keys.c.id, api_keys.c.display_name).where( (api_keys.c.user_id == user_id) & (api_keys.c.product_name == product_name) ) + if not include_autogenerated: + stmt = stmt.where( + ~api_keys.c.api_key.like(f"{API_KEY_AUTOGENERATED_KEY_PREFIX}%") + ) + result = await conn.stream(stmt) rows = [row async for row in result] diff --git a/services/web/server/src/simcore_service_webserver/api_keys/_service.py b/services/web/server/src/simcore_service_webserver/api_keys/_service.py index 93e8c3610942..c9171b04a298 100644 --- a/services/web/server/src/simcore_service_webserver/api_keys/_service.py +++ b/services/web/server/src/simcore_service_webserver/api_keys/_service.py @@ -2,9 +2,7 @@ from aiohttp import web from models_library.products import ProductName -from models_library.rpc.webserver.auth.api_keys import ( - generate_api_key_and_secret, -) +from models_library.rpc.webserver.auth.api_keys import generate_api_key_and_secret from models_library.users import UserID from . import _repository @@ -79,7 +77,9 @@ async def get_api_key( if api_key is not None: return api_key - raise ApiKeyNotFoundError(api_key_id=api_key_id) + raise ApiKeyNotFoundError( + api_key_id=api_key_id, product_name=product_name, user_id=user_id + ) async def list_api_keys( @@ -87,9 +87,13 @@ async def list_api_keys( *, product_name: ProductName, user_id: UserID, + include_autogenerated: bool = False, ) -> list[ApiKey]: api_keys: list[ApiKey] = await _repository.list_api_keys( - app, user_id=user_id, product_name=product_name + app, + user_id=user_id, + product_name=product_name, + include_autogenerated=include_autogenerated, ) return api_keys diff --git a/services/web/server/src/simcore_service_webserver/api_keys/errors.py b/services/web/server/src/simcore_service_webserver/api_keys/errors.py index 5fbe6c38bd98..91bac8db8fcb 100644 --- a/services/web/server/src/simcore_service_webserver/api_keys/errors.py +++ b/services/web/server/src/simcore_service_webserver/api_keys/errors.py @@ -1,13 +1,14 @@ from ..errors import WebServerBaseError -class ApiKeysValueError(WebServerBaseError, ValueError): - ... +class ApiKeysValueError(WebServerBaseError, ValueError): ... class ApiKeyDuplicatedDisplayNameError(ApiKeysValueError): - msg_template = "API Key with display name '{display_name}' already exists. {reason}" + msg_template = ( + "API Key with display name '{display_name}' already exists: {details}" + ) class ApiKeyNotFoundError(ApiKeysValueError): - msg_template = "API Key with ID '{api_key_id}' not found. {reason}" + msg_template = "API Key with ID '{api_key_id}' not found: {details}" diff --git a/services/web/server/src/simcore_service_webserver/api_keys/plugin.py b/services/web/server/src/simcore_service_webserver/api_keys/plugin.py index 9ba6062c5e46..c2edf129a43b 100644 --- a/services/web/server/src/simcore_service_webserver/api_keys/plugin.py +++ b/services/web/server/src/simcore_service_webserver/api_keys/plugin.py @@ -1,8 +1,8 @@ import logging from aiohttp import web -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from ..application_setup import ModuleCategory, app_setup_func from ..constants import APP_SETTINGS_KEY from ..db.plugin import setup_db from ..products.plugin import setup_products @@ -13,7 +13,7 @@ _logger = logging.getLogger(__name__) -@app_module_setup( +@app_setup_func( "simcore_service_webserver.api_keys", ModuleCategory.ADDON, settings_name="WEBSERVER_API_KEYS", diff --git a/services/web/server/src/simcore_service_webserver/application.py b/services/web/server/src/simcore_service_webserver/application.py index 65934275eb21..c996e7af5ad8 100644 --- a/services/web/server/src/simcore_service_webserver/application.py +++ b/services/web/server/src/simcore_service_webserver/application.py @@ -2,19 +2,28 @@ """Main application""" import logging +from collections.abc import Callable from pprint import pformat -from typing import Any +from typing import Any, Final from aiohttp import web from servicelib.aiohttp.application import create_safe_application -from simcore_service_webserver.tasks.plugin import setup_tasks -from ._meta import WELCOME_DB_LISTENER_MSG, WELCOME_GC_MSG, WELCOME_MSG, info +from ._meta import ( + WELCOME_AUTH_APP_MSG, + WELCOME_DB_LISTENER_MSG, + WELCOME_GC_MSG, + WELCOME_MSG, + info, +) from .activity.plugin import setup_activity from .announcements.plugin import setup_announcements from .api_keys.plugin import setup_api_keys from .application_settings import get_application_settings, setup_settings from .catalog.plugin import setup_catalog +from .collaboration.bootstrap import ( + setup_realtime_collaboration, +) from .conversations.plugin import setup_conversations from .db.plugin import setup_db from .db_listener.plugin import setup_db_listener @@ -23,6 +32,7 @@ from .dynamic_scheduler.plugin import setup_dynamic_scheduler from .email.plugin import setup_email from .exporter.plugin import setup_exporter +from .fogbugz.plugin import setup_fogbugz from .folders.plugin import setup_folders from .functions.plugin import setup_functions from .garbage_collector.plugin import setup_garbage_collector @@ -30,7 +40,8 @@ from .invitations.plugin import setup_invitations from .licenses.plugin import setup_licenses from .login.plugin import setup_login -from .long_running_tasks import setup_long_running_tasks +from .login_auth.plugin import setup_login_auth +from .long_running_tasks.plugin import setup_long_running_tasks from .notifications.plugin import setup_notifications from .payments.plugin import setup_payments from .products.plugin import setup_products @@ -49,6 +60,7 @@ from .storage.plugin import setup_storage from .studies_dispatcher.plugin import setup_studies_dispatcher from .tags.plugin import setup_tags +from .tasks.plugin import setup_tasks from .tracing import setup_app_tracing from .trash.plugin import setup_trash from .users.plugin import setup_users @@ -57,19 +69,33 @@ _logger = logging.getLogger(__name__) +# Define common app keys used across the webserver +APP_WEBSERVER_SETTINGS_KEY: Final = web.AppKey("APP_WEBSERVER_SETTINGS_KEY", object) + + +def _create_welcome_banner(banner_msg: str) -> Callable: + """Creates a welcome banner function with optional GC and DB listener messages""" + + async def _welcome_banner(app: web.Application): + settings = get_application_settings(app) + + print(banner_msg, flush=True) # noqa: T201 + if settings.WEBSERVER_GARBAGE_COLLECTOR: + print("with", WELCOME_GC_MSG, flush=True) # noqa: T201 + if settings.WEBSERVER_DB_LISTENER: + print("with", WELCOME_DB_LISTENER_MSG, flush=True) # noqa: T201 + + return _welcome_banner + -async def _welcome_banner(app: web.Application): - settings = get_application_settings(app) - print(WELCOME_MSG, flush=True) # noqa: T201 - if settings.WEBSERVER_GARBAGE_COLLECTOR: - print("with", WELCOME_GC_MSG, flush=True) # noqa: T201 - if settings.WEBSERVER_DB_LISTENER: - print("with", WELCOME_DB_LISTENER_MSG, flush=True) # noqa: T201 +def _create_finished_banner() -> Callable: + """Creates a finished banner function""" + async def _finished_banner(app: web.Application): + assert app # nosec + print(info.get_finished_banner(), flush=True) # noqa: T201 -async def _finished_banner(app: web.Application): - assert app # nosec - print(info.get_finished_banner(), flush=True) # noqa: T201 + return _finished_banner def create_application() -> web.Application: @@ -85,11 +111,11 @@ def create_application() -> web.Application: # core modules setup_app_tracing(app) # WARNING: must be UPPERMOST middleware setup_db(app) - setup_long_running_tasks(app) setup_redis(app) setup_session(app) setup_security(app) setup_rest(app) + setup_long_running_tasks(app) setup_rabbitmq(app) # front-end products @@ -144,6 +170,7 @@ def create_application() -> web.Application: # conversations setup_conversations(app) + setup_fogbugz(app) # Needed for support conversations # licenses setup_licenses(app) @@ -159,16 +186,49 @@ def create_application() -> web.Application: setup_publications(app) setup_studies_dispatcher(app) setup_exporter(app) + setup_realtime_collaboration(app) # NOTE: *last* events - app.on_startup.append(_welcome_banner) - app.on_shutdown.append(_finished_banner) + app.on_startup.append(_create_welcome_banner(WELCOME_MSG)) + app.on_shutdown.append(_create_finished_banner()) _logger.debug("Routes in app: \n %s", pformat(app.router.named_resources())) return app +def create_application_auth() -> web.Application: + app = create_safe_application() + + settings = setup_settings(app) + assert settings.WEBSERVER_APP_FACTORY_NAME == "WEBSERVER_AUTHZ_APP_FACTORY" # nosec + + # Monitoring and diagnostics + setup_app_tracing( + # WARNING: must be UPPERMOST middleware + # NOTE: uses settings.APP_NAME + app + ) + setup_diagnostics(app) + setup_profiling_middleware(app) + + # Core modules + setup_rest(app) + setup_db(app) + setup_login_auth(app) + + # NOTE: *last* events + app.on_startup.append(_create_welcome_banner(WELCOME_AUTH_APP_MSG)) + app.on_shutdown.append(_create_finished_banner()) + + _logger.debug( + "Routes in application-auth: \n %s", + lambda: pformat(app.router.named_resources()), + ) + + return app + + def run_service(app: web.Application, config: dict[str, Any]): web.run_app( app, @@ -177,9 +237,3 @@ def run_service(app: web.Application, config: dict[str, Any]): # this gets overriden by the gunicorn config in /docker/boot.sh access_log_format='%a %t "%r" %s %b --- [%Dus] "%{Referer}i" "%{User-Agent}i"', ) - - -__all__: tuple[str, ...] = ( - "create_application", - "run_service", -) diff --git a/services/web/server/src/simcore_service_webserver/application_settings.py b/services/web/server/src/simcore_service_webserver/application_settings.py index 40747e7ac170..7c67d38c1ade 100644 --- a/services/web/server/src/simcore_service_webserver/application_settings.py +++ b/services/web/server/src/simcore_service_webserver/application_settings.py @@ -1,9 +1,10 @@ import logging from functools import cached_property -from typing import Annotated, Any, Final +from typing import Annotated, Any, Final, Literal from aiohttp import web from common_library.basic_types import DEFAULT_FACTORY +from common_library.logging.logging_utils_filtering import LoggerName, MessageSubstring from common_library.pydantic_fields_extension import is_nullable from models_library.basic_types import LogLevel, PortInt, VersionTag from models_library.utils.change_case import snake_to_camel @@ -15,7 +16,7 @@ model_validator, ) from pydantic.fields import Field -from servicelib.logging_utils_filtering import LoggerName, MessageSubstring +from servicelib.logging_utils import LogLevelInt from settings_library.application import BaseApplicationSettings from settings_library.email import SMTPSettings from settings_library.postgres import PostgresSettings @@ -28,15 +29,18 @@ from ._meta import API_VERSION, API_VTAG, APP_NAME from .catalog.settings import CatalogSettings +from .collaboration.settings import RealTimeCollaborationSettings from .constants import APP_SETTINGS_KEY from .diagnostics.settings import DiagnosticsSettings from .director_v2.settings import DirectorV2Settings from .dynamic_scheduler.settings import DynamicSchedulerSettings from .exporter.settings import ExporterSettings +from .fogbugz.settings import FogbugzSettings from .garbage_collector.settings import GarbageCollectorSettings from .invitations.settings import InvitationsSettings from .licenses.settings import LicensesSettings from .login.settings import LoginSettings +from .long_running_tasks.settings import LongRunningTasksSettings from .payments.settings import PaymentsSettings from .projects.settings import ProjectsSettings from .resource_manager.settings import ResourceManagerSettings @@ -54,7 +58,7 @@ # NOTE: to mark a plugin as a DEV-FEATURE annotated it with -# `Field(json_schema_extra={_X_DEV_FEATURE_FLAG: True})` +# `Field(json_schema_extra={_X_FEATURE_UNDER_DEVELOPMENT: True})` # This will force it to be disabled when WEBSERVER_DEV_FEATURES_ENABLED=False _X_FEATURE_UNDER_DEVELOPMENT: Final[str] = "x-dev-feature" @@ -95,6 +99,13 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): Field(None, description="Stack name defined upon deploy (see main Makefile)"), ] + WEBSERVER_APP_FACTORY_NAME: Annotated[ + Literal["WEBSERVER_FULL_APP_FACTORY", "WEBSERVER_AUTHZ_APP_FACTORY"], + Field( + description="Application factory to be lauched by the gunicorn server", + ), + ] = "WEBSERVER_FULL_APP_FACTORY" + WEBSERVER_DEV_FEATURES_ENABLED: Annotated[ bool, Field( @@ -224,6 +235,14 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): description="exporter plugin", ), ] + + WEBSERVER_FOGBUGZ: Annotated[ + FogbugzSettings | None, + Field( + json_schema_extra={"auto_default_from_env": True}, + ), + ] + WEBSERVER_GARBAGE_COLLECTOR: Annotated[ GarbageCollectorSettings | None, Field( @@ -256,6 +275,14 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): ), ] + WEBSERVER_LONG_RUNNING_TASKS: Annotated[ + LongRunningTasksSettings | None, + Field( + json_schema_extra={"auto_default_from_env": True}, + description="long running tasks plugin", + ), + ] + WEBSERVER_PAYMENTS: Annotated[ PaymentsSettings | None, Field( @@ -269,6 +296,16 @@ class ApplicationSettings(BaseApplicationSettings, MixinLoggingSettings): Field(json_schema_extra={"auto_default_from_env": True}), ] + WEBSERVER_REALTIME_COLLABORATION: Annotated[ + RealTimeCollaborationSettings | None, + Field( + description="Enables real-time collaboration features", + json_schema_extra={ + "auto_default_from_env": True, + }, + ), + ] + WEBSERVER_REDIS: Annotated[ RedisSettings | None, Field(json_schema_extra={"auto_default_from_env": True}) ] @@ -455,8 +492,8 @@ def _get_healthcheck_timeout_in_seconds(cls, v): # HELPERS -------------------------------------------------------- @cached_property - def log_level(self) -> int: - level: int = getattr(logging, self.WEBSERVER_LOGLEVEL.upper()) + def log_level(self) -> LogLevelInt: + level: LogLevelInt = getattr(logging, self.WEBSERVER_LOGLEVEL.upper()) return level def is_enabled(self, field_name: str) -> bool: @@ -474,6 +511,7 @@ def _get_disabled_advertised_plugins(self) -> list[str]: "WEBSERVER_LICENSES", "WEBSERVER_PAYMENTS", "WEBSERVER_SCICRUNCH", + "WEBSERVER_REALTIME_COLLABORATION", } return [_ for _ in advertised_plugins if not self.is_enabled(_)] + [ # NOTE: Permanently retired in https://github.com/ITISFoundation/osparc-simcore/pull/7182 @@ -550,10 +588,16 @@ def to_client_statics(self) -> dict[str, Any]: "WEBSERVER_PROJECTS": { "PROJECTS_MAX_NUM_RUNNING_DYNAMIC_NODES", }, + "WEBSERVER_REALTIME_COLLABORATION": { + "RTC_MAX_NUMBER_OF_USERS", + }, "WEBSERVER_SESSION": {"SESSION_COOKIE_MAX_AGE"}, "WEBSERVER_TRASH": { "TRASH_RETENTION_DAYS", }, + "WEBSERVER_LONG_RUNNING_TASKS": { + "LONG_RUNNING_TASKS_NAMESPACE_SUFFIX", + }, }, exclude_none=True, ) @@ -568,7 +612,7 @@ def setup_settings(app: web.Application) -> ApplicationSettings: app[APP_SETTINGS_KEY] = settings _logger.debug( "Captured app settings:\n%s", - app[APP_SETTINGS_KEY].model_dump_json(indent=1), + lambda: settings.model_dump_json(indent=1), ) return settings diff --git a/services/web/server/src/simcore_service_webserver/application_settings_utils.py b/services/web/server/src/simcore_service_webserver/application_settings_utils.py index 4adf8936f944..00d322b2f6cf 100644 --- a/services/web/server/src/simcore_service_webserver/application_settings_utils.py +++ b/services/web/server/src/simcore_service_webserver/application_settings_utils.py @@ -51,6 +51,12 @@ def convert_to_app_config(app_settings: ApplicationSettings) -> AppConfigDict: "host": getattr(app_settings.WEBSERVER_DB, "POSTGRES_HOST", None), "maxsize": getattr(app_settings.WEBSERVER_DB, "POSTGRES_MAXSIZE", None), "minsize": getattr(app_settings.WEBSERVER_DB, "POSTGRES_MINSIZE", None), + "maxpoolsize": getattr( + app_settings.WEBSERVER_DB, "POSTGRES_MAX_POOLSIZE", None + ), + "maxoverflow": getattr( + app_settings.WEBSERVER_DB, "POSTGRES_MAX_OVERFLOW", None + ), "password": getattr( app_settings.WEBSERVER_DB, "POSTGRES_PASSWORD", SecretStr("") ).get_secret_value(), diff --git a/services/web/server/src/simcore_service_webserver/application_setup.py b/services/web/server/src/simcore_service_webserver/application_setup.py new file mode 100644 index 000000000000..a1502a3e64b3 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/application_setup.py @@ -0,0 +1,28 @@ +import functools +from typing import TypeAlias + +import servicelib.aiohttp.application_setup + +from .constants import APP_SETTINGS_KEY + +# models +ModuleCategory: TypeAlias = servicelib.aiohttp.application_setup.ModuleCategory + + +# free-functions +is_setup_completed = servicelib.aiohttp.application_setup.is_setup_completed + +# decorators +ensure_single_setup = servicelib.aiohttp.application_setup.ensure_single_setup + +app_setup_func = functools.partial( + servicelib.aiohttp.application_setup.app_module_setup, + app_settings_key=APP_SETTINGS_KEY, +) + +__all__: tuple[str, ...] = ( + "ModuleCategory", + "app_setup_func", + "ensure_single_setup", + "is_setup_completed", +) diff --git a/services/web/server/src/simcore_service_webserver/catalog/_catalog_rest_client_service.py b/services/web/server/src/simcore_service_webserver/catalog/_catalog_rest_client_service.py index 20095cc72af4..5a96d3483276 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/_catalog_rest_client_service.py +++ b/services/web/server/src/simcore_service_webserver/catalog/_catalog_rest_client_service.py @@ -2,7 +2,7 @@ import logging import urllib.parse -from collections.abc import Callable, Iterator +from collections.abc import Iterator from contextlib import contextmanager from typing import Any, Final @@ -23,27 +23,18 @@ from pydantic import TypeAdapter from servicelib.aiohttp.client_session import get_client_session from servicelib.rest_constants import X_PRODUCT_NAME_HEADER -from simcore_service_webserver.catalog.errors import ( - CatalogConnectionError, - CatalogResponseError, -) from yarl import URL from .._meta import api_version_prefix +from ._models import ServiceKeyVersionDict +from .errors import ( + CatalogConnectionError, + CatalogResponseError, +) from .settings import CatalogSettings, get_plugin_settings _logger = logging.getLogger(__name__) -# Cache settings -_SECOND = 1 # in seconds -_MINUTE = 60 * _SECOND -_CACHE_TTL: Final = 1 * _MINUTE - - -def _create_service_cache_key(_f: Callable[..., Any], *_args, **kw): - assert len(_args) == 1, f"Expected only app, got {_args}" # nosec - return f"get_service_{kw['user_id']}_{kw['service_key']}_{kw['service_version']}_{kw['product_name']}" - @contextmanager def _handle_client_exceptions(app: web.Application) -> Iterator[ClientSession]: @@ -96,10 +87,27 @@ def to_backend_service(rel_url: URL, origin: URL, version_prefix: str) -> URL: return origin.with_path(new_path).with_query(rel_url.query) +# Cache settings for services rest API +_SECOND = 1 # in seconds +_MINUTE = 60 * _SECOND +_CACHE_TTL: Final = 1 * _MINUTE + + +@cached( + ttl=_CACHE_TTL, + key_builder=lambda _f, *_args, **kw: f"get_services_for_user_in_product_{kw['user_id']}_{kw['product_name']}", + cache=Cache.MEMORY, +) async def get_services_for_user_in_product( - app: web.Application, user_id: UserID, product_name: str, *, only_key_versions: bool -) -> list[dict]: + app: web.Application, *, user_id: UserID, product_name: str +) -> list[ServiceKeyVersionDict]: + """ + DEPRECATED: see instead RPC interface. + SEE https://github.com/ITISFoundation/osparc-simcore/issues/7838 + """ settings: CatalogSettings = get_plugin_settings(app) + only_key_versions = True + url = (URL(settings.api_base_url) / "services").with_query( {"user_id": user_id, "details": f"{not only_key_versions}"} ) @@ -115,13 +123,18 @@ async def get_services_for_user_in_product( user_id, ) return [] - body: list[dict] = await response.json() - return body + services: list[dict] = await response.json() + + # This reduces the size cached in the memory + return [ + ServiceKeyVersionDict(key=service["key"], version=service["version"]) + for service in services + ] @cached( ttl=_CACHE_TTL, - key_builder=_create_service_cache_key, + key_builder=lambda _f, *_args, **kw: f"get_service_{kw['user_id']}_{kw['service_key']}_{kw['service_version']}_{kw['product_name']}", cache=Cache.MEMORY, # SEE https://github.com/ITISFoundation/osparc-simcore/pull/7802 ) @@ -133,6 +146,10 @@ async def get_service( service_version: ServiceVersion, product_name: ProductName, ) -> dict[str, Any]: + """ + DEPRECATED: see instead RPC interface. + SEE https://github.com/ITISFoundation/osparc-simcore/issues/7838 + """ settings: CatalogSettings = get_plugin_settings(app) url = URL( f"{settings.api_base_url}/services/{urllib.parse.quote_plus(service_key)}/{service_version}", @@ -144,8 +161,8 @@ async def get_service( url, headers={X_PRODUCT_NAME_HEADER: product_name} ) as response: response.raise_for_status() - body: dict[str, Any] = await response.json() - return body + service: dict[str, Any] = await response.json() + return service async def get_service_resources( diff --git a/services/web/server/src/simcore_service_webserver/catalog/_constants.py b/services/web/server/src/simcore_service_webserver/catalog/_constants.py index 5fe940220603..ba8059239478 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/_constants.py +++ b/services/web/server/src/simcore_service_webserver/catalog/_constants.py @@ -1,11 +1,14 @@ from typing import Final +from common_library.user_messages import user_message + from ..constants import MSG_TRY_AGAIN_OR_SUPPORT -MSG_CATALOG_SERVICE_UNAVAILABLE: Final[str] = ( +MSG_CATALOG_SERVICE_UNAVAILABLE: Final[str] = user_message( # Most likely the director service is down or misconfigured so the user is asked to try again later. - "This service is temporarily unavailable. The incident was logged and will be investigated. " - + MSG_TRY_AGAIN_OR_SUPPORT + "The catalog service is currently unavailable. This issue has been logged and will be investigated. " + + MSG_TRY_AGAIN_OR_SUPPORT, + _version=1, ) diff --git a/services/web/server/src/simcore_service_webserver/catalog/_controller_rest.py b/services/web/server/src/simcore_service_webserver/catalog/_controller_rest.py index 644edb14d9ae..1ed2b50ac6b8 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/_controller_rest.py +++ b/services/web/server/src/simcore_service_webserver/catalog/_controller_rest.py @@ -37,7 +37,6 @@ from ..utils_aiohttp import envelope_json_response from . import _catalog_rest_client_service, _service from ._controller_rest_exceptions import ( - DefaultPricingUnitForServiceNotFoundError, handle_plugin_requests_exceptions, ) from ._controller_rest_schemas import ( @@ -50,6 +49,7 @@ ServiceTagPathParams, ToServiceInputsQueryParams, ) +from .errors import DefaultPricingUnitForServiceNotFoundError _logger = logging.getLogger(__name__) diff --git a/services/web/server/src/simcore_service_webserver/catalog/_controller_rest_exceptions.py b/services/web/server/src/simcore_service_webserver/catalog/_controller_rest_exceptions.py index ae763d342efb..b3728e979658 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/_controller_rest_exceptions.py +++ b/services/web/server/src/simcore_service_webserver/catalog/_controller_rest_exceptions.py @@ -4,9 +4,10 @@ from aiohttp import web from common_library.error_codes import create_error_code +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs +from common_library.user_messages import user_message from models_library.rest_error import ErrorGet from servicelib.aiohttp import status -from servicelib.logging_errors import create_troubleshotting_log_kwargs from servicelib.rabbitmq._errors import RemoteMethodNotRegisteredError from servicelib.rabbitmq.rpc_interfaces.catalog.errors import ( CatalogForbiddenError, @@ -41,7 +42,6 @@ async def _handler_catalog_client_errors( request: web.Request, exception: Exception ) -> web.Response: - assert isinstance( # nosec exception, CatalogResponseError | CatalogConnectionError ), f"check mapping, got {exception=}" @@ -63,7 +63,7 @@ async def _handler_catalog_client_errors( # Log for further investigation oec = create_error_code(exception) _logger.exception( - **create_troubleshotting_log_kwargs( + **create_troubleshooting_log_kwargs( user_msg, error=exception, error_code=oec, @@ -85,39 +85,45 @@ async def _handler_catalog_client_errors( _TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = { RemoteMethodNotRegisteredError: HttpErrorInfo( status.HTTP_503_SERVICE_UNAVAILABLE, - MSG_CATALOG_SERVICE_UNAVAILABLE, + user_message( + "The catalog service is temporarily unavailable. Please try again later.", + _version=2, + ), ), CatalogForbiddenError: HttpErrorInfo( status.HTTP_403_FORBIDDEN, - "Forbidden catalog access", + user_message( + "Access denied: You don't have permission to view this catalog item.", + _version=2, + ), ), CatalogItemNotFoundError: HttpErrorInfo( status.HTTP_404_NOT_FOUND, - "Catalog item not found", + user_message( + "This catalog item does not exist or has been removed.", _version=2 + ), ), DefaultPricingPlanNotFoundError: HttpErrorInfo( status.HTTP_404_NOT_FOUND, - "Default pricing plan not found", + user_message( + "No default pricing plan is available for this operation.", _version=2 + ), ), DefaultPricingUnitForServiceNotFoundError: HttpErrorInfo( - status.HTTP_404_NOT_FOUND, "Default pricing unit not found" + status.HTTP_404_NOT_FOUND, + user_message( + "No default pricing unit is defined for this service.", _version=2 + ), ), } -_exceptions_handlers_map: ExceptionHandlersMap = { +catalog_exceptions_handlers_map: ExceptionHandlersMap = { CatalogResponseError: _handler_catalog_client_errors, CatalogConnectionError: _handler_catalog_client_errors, } -_exceptions_handlers_map.update(to_exceptions_handlers_map(_TO_HTTP_ERROR_MAP)) +catalog_exceptions_handlers_map.update(to_exceptions_handlers_map(_TO_HTTP_ERROR_MAP)) handle_plugin_requests_exceptions = exception_handling_decorator( - _exceptions_handlers_map -) - - -__all__: tuple[str, ...] = ( - "CatalogForbiddenError", - "CatalogItemNotFoundError", - "DefaultPricingUnitForServiceNotFoundError", + catalog_exceptions_handlers_map ) diff --git a/services/web/server/src/simcore_service_webserver/catalog/_controller_rest_schemas.py b/services/web/server/src/simcore_service_webserver/catalog/_controller_rest_schemas.py index 83c8dbe9fa4d..a27f71d61d29 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/_controller_rest_schemas.py +++ b/services/web/server/src/simcore_service_webserver/catalog/_controller_rest_schemas.py @@ -160,7 +160,6 @@ def create(cls, request: Request) -> "CatalogRequestContext": with handle_validation_as_http_error( error_msg_template="Invalid request", resource_name=request.rel_url.path, - use_error_v1=True, ): assert request.app # nosec return cls( diff --git a/services/web/server/src/simcore_service_webserver/catalog/_models.py b/services/web/server/src/simcore_service_webserver/catalog/_models.py index b589e629a9f5..18dd24dad6f8 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/_models.py +++ b/services/web/server/src/simcore_service_webserver/catalog/_models.py @@ -1 +1,6 @@ -# NOTE: missing. @bisgaard-itis will follow up here +from typing import TypedDict + + +class ServiceKeyVersionDict(TypedDict): + key: str + version: str diff --git a/services/web/server/src/simcore_service_webserver/catalog/_service.py b/services/web/server/src/simcore_service_webserver/catalog/_service.py index 166310ac7fb5..128aad83cb3f 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/_service.py +++ b/services/web/server/src/simcore_service_webserver/catalog/_service.py @@ -106,10 +106,12 @@ async def batch_get_my_services( product_name=product_name, ids=services_ids, ) + except RPCServerError as err: raise CatalogNotAvailableError( user_id=user_id, product_name=product_name, + services_ids=services_ids, ) from err diff --git a/services/web/server/src/simcore_service_webserver/catalog/catalog_service.py b/services/web/server/src/simcore_service_webserver/catalog/catalog_service.py index d6e42b376dd5..3bce6a913869 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/catalog_service.py +++ b/services/web/server/src/simcore_service_webserver/catalog/catalog_service.py @@ -6,6 +6,7 @@ is_catalog_service_responsive, to_backend_service, ) +from ._models import ServiceKeyVersionDict from ._service import batch_get_my_services __all__: tuple[str, ...] = ( @@ -16,5 +17,6 @@ "get_services_for_user_in_product", "is_catalog_service_responsive", "to_backend_service", + "ServiceKeyVersionDict", ) # nopycln: file diff --git a/services/web/server/src/simcore_service_webserver/catalog/plugin.py b/services/web/server/src/simcore_service_webserver/catalog/plugin.py index b8a5bbce743a..1aa64cb31947 100644 --- a/services/web/server/src/simcore_service_webserver/catalog/plugin.py +++ b/services/web/server/src/simcore_service_webserver/catalog/plugin.py @@ -1,17 +1,20 @@ """Subsystem to communicate with catalog service""" import logging +from typing import Final from aiohttp import web from pint import UnitRegistry -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from ..application_setup import ModuleCategory, app_setup_func from . import _controller_rest _logger = logging.getLogger(__name__) +APP_CATALOG_CLIENT_KEY: Final = web.AppKey("APP_CATALOG_CLIENT_KEY", object) -@app_module_setup( + +@app_setup_func( __name__, ModuleCategory.ADDON, settings_name="WEBSERVER_CATALOG", diff --git a/services/web/server/src/simcore_service_webserver/cli.py b/services/web/server/src/simcore_service_webserver/cli.py index feec4b29e0cc..de59ad46ddfe 100644 --- a/services/web/server/src/simcore_service_webserver/cli.py +++ b/services/web/server/src/simcore_service_webserver/cli.py @@ -1,4 +1,4 @@ -""" Application's command line . +"""Application's command line . Why does this file exist, and why not put this in __main__? @@ -15,13 +15,12 @@ import logging import os -from typing import Final +from typing import Annotated, Final import typer from aiohttp import web from common_library.json_serialization import json_dumps from settings_library.utils_cli import create_settings_command -from typing_extensions import Annotated from .application_settings import ApplicationSettings from .login import cli as login_cli @@ -42,7 +41,6 @@ def _setup_app_from_settings( # NOTE: keeping imports here to reduce CLI load time from .application import create_application from .application_settings_utils import convert_to_app_config - from .log import setup_logging # NOTE: By having an equivalent config allows us # to keep some of the code from the previous @@ -51,31 +49,37 @@ def _setup_app_from_settings( # given configs and changing those would not have # a meaningful RoI. config = convert_to_app_config(settings) - - setup_logging( - level=settings.log_level, - slow_duration=settings.AIODEBUG_SLOW_DURATION_SECS, - log_format_local_dev_enabled=settings.WEBSERVER_LOG_FORMAT_LOCAL_DEV_ENABLED, - logger_filter_mapping=settings.WEBSERVER_LOG_FILTER_MAPPING, - tracing_settings=settings.WEBSERVER_TRACING, - ) - app = create_application() return (app, config) async def app_factory() -> web.Application: - """Created to launch app from gunicorn (see docker/boot.sh)""" + """WARNING: this is called in the entrypoint of the service. DO NOT CHAGE THE NAME! + + Created to launch app from gunicorn (see docker/boot.sh) + """ + from .application import create_application_auth + from .log import setup_logging + app_settings = ApplicationSettings.create_from_envs() - assert app_settings.SC_BUILD_TARGET # nosec _logger.info( "Application settings: %s", json_dumps(app_settings, indent=2, sort_keys=True), ) - app, _ = _setup_app_from_settings(app_settings) + _logger.info( + "Using application factory: %s", app_settings.WEBSERVER_APP_FACTORY_NAME + ) + + logging_lifespan_cleanup_event = setup_logging(app_settings) + + if app_settings.WEBSERVER_APP_FACTORY_NAME == "WEBSERVER_AUTHZ_APP_FACTORY": + app = create_application_auth() + else: + app, _ = _setup_app_from_settings(app_settings) + app.on_cleanup.append(logging_lifespan_cleanup_event) return app diff --git a/services/web/server/src/simcore_service_webserver/collaboration/__init__.py b/services/web/server/src/simcore_service_webserver/collaboration/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/services/web/server/src/simcore_service_webserver/collaboration/bootstrap.py b/services/web/server/src/simcore_service_webserver/collaboration/bootstrap.py new file mode 100644 index 000000000000..d3c916aae4b4 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/collaboration/bootstrap.py @@ -0,0 +1,19 @@ +import logging + +from aiohttp import web + +from ..application_setup import ModuleCategory, app_setup_func + +_logger = logging.getLogger(__name__) + + +@app_setup_func( + __name__, + ModuleCategory.ADDON, + settings_name="WEBSERVER_REALTIME_COLLABORATION", + logger=_logger, +) +def setup_realtime_collaboration(app: web.Application): + from .settings import get_plugin_settings + + assert get_plugin_settings(app), "setup_settings not called?" diff --git a/services/web/server/src/simcore_service_webserver/collaboration/settings.py b/services/web/server/src/simcore_service_webserver/collaboration/settings.py new file mode 100644 index 000000000000..c658fdaa893d --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/collaboration/settings.py @@ -0,0 +1,26 @@ +from typing import Annotated + +from aiohttp import web +from pydantic import ( + PositiveInt, +) +from pydantic.fields import Field +from settings_library.base import BaseCustomSettings + +from ..constants import APP_SETTINGS_KEY + + +class RealTimeCollaborationSettings(BaseCustomSettings): + RTC_MAX_NUMBER_OF_USERS: Annotated[ + PositiveInt | None, + Field( + description="Maximum number of user sessions allowed on a single project at once. (null disables the limit)", + ), + ] + + +def get_plugin_settings(app: web.Application) -> RealTimeCollaborationSettings: + settings = app[APP_SETTINGS_KEY].WEBSERVER_REALTIME_COLLABORATION + assert settings, "setup_settings not called?" # nosec + assert isinstance(settings, RealTimeCollaborationSettings) # nosec + return settings diff --git a/services/web/server/src/simcore_service_webserver/constants.py b/services/web/server/src/simcore_service_webserver/constants.py index 6c0dae060da9..0bc871905247 100644 --- a/services/web/server/src/simcore_service_webserver/constants.py +++ b/services/web/server/src/simcore_service_webserver/constants.py @@ -1,14 +1,32 @@ # pylint:disable=unused-import -from typing import Final +from typing import TYPE_CHECKING, Final +from aiohttp import web +from common_library.user_messages import user_message from servicelib.aiohttp.application_keys import ( APP_AIOPG_ENGINE_KEY, + APP_CLIENT_SESSION_KEY, APP_CONFIG_KEY, APP_FIRE_AND_FORGET_TASKS_KEY, - APP_SETTINGS_KEY, ) -from servicelib.request_keys import RQT_USERID_KEY +from servicelib.aiohttp.request_keys import RQT_USERID_KEY + +from ._meta import APP_NAME + +if TYPE_CHECKING: + # Application settings key - defined here to avoid circular imports + from .application_settings import ApplicationSettings + + APP_SETTINGS_KEY: web.AppKey[ApplicationSettings] = web.AppKey( + "APP_SETTINGS_KEY", ApplicationSettings + ) +else: + APP_SETTINGS_KEY: web.AppKey = web.AppKey("APP_SETTINGS_KEY", None) + + +assert APP_CLIENT_SESSION_KEY # nosec +assert APP_CONFIG_KEY # nosec # Application storage keys APP_PRODUCTS_KEY: Final[str] = f"{__name__ }.APP_PRODUCTS_KEY" @@ -38,21 +56,22 @@ # main index route name = front-end INDEX_RESOURCE_NAME: Final[str] = "get_cached_frontend_index" -MSG_UNDER_DEVELOPMENT: Final[str] = ( +MSG_UNDER_DEVELOPMENT: Final[str] = user_message( "Under development. Use WEBSERVER_DEV_FEATURES_ENABLED=1 to enable current implementation" ) - # Request storage keys RQ_PRODUCT_KEY: Final[str] = f"{__name__}.RQ_PRODUCT_KEY" -MSG_TRY_AGAIN_OR_SUPPORT: Final[str] = ( - "Please try again shortly. If the issue persists, contact support." +MSG_TRY_AGAIN_OR_SUPPORT: Final[str] = user_message( + "Please try again shortly. If the issue persists, contact support.", _version=1 ) + __all__: tuple[str, ...] = ( "APP_AIOPG_ENGINE_KEY", + "APP_CLIENT_SESSION_KEY", "APP_CONFIG_KEY", "APP_FIRE_AND_FORGET_TASKS_KEY", "APP_SETTINGS_KEY", diff --git a/services/web/server/src/simcore_service_webserver/conversations/_controller/__init__.py b/services/web/server/src/simcore_service_webserver/conversations/_controller/__init__.py new file mode 100644 index 000000000000..8478d6a85126 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/conversations/_controller/__init__.py @@ -0,0 +1 @@ +# mypy: disable-error-code=truthy-function diff --git a/services/web/server/src/simcore_service_webserver/conversations/_controller/_common.py b/services/web/server/src/simcore_service_webserver/conversations/_controller/_common.py new file mode 100644 index 000000000000..d64587482930 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/conversations/_controller/_common.py @@ -0,0 +1,13 @@ +from models_library.conversations import ConversationID, ConversationType +from pydantic import BaseModel, ConfigDict + +from ..errors import ConversationUnsupportedTypeError + + +def raise_unsupported_type(conversation_type: ConversationType) -> None: + raise ConversationUnsupportedTypeError(conversation_type=conversation_type) + + +class ConversationPathParams(BaseModel): + conversation_id: ConversationID + model_config = ConfigDict(extra="forbid") diff --git a/services/web/server/src/simcore_service_webserver/conversations/_controller/_conversations_messages_rest.py b/services/web/server/src/simcore_service_webserver/conversations/_controller/_conversations_messages_rest.py new file mode 100644 index 000000000000..0a33c59d3083 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/conversations/_controller/_conversations_messages_rest.py @@ -0,0 +1,362 @@ +import functools +import logging +from typing import Any + +from aiohttp import web +from common_library.json_serialization import json_dumps +from models_library.api_schemas_webserver.conversations import ( + ConversationMessagePatch, + ConversationMessageRestGet, +) +from models_library.conversations import ( + ConversationMessageID, + ConversationMessagePatchDB, + ConversationMessageType, + ConversationType, +) +from models_library.rest_pagination import ( + Page, + PageQueryParameters, +) +from models_library.rest_pagination_utils import paginate_data +from models_library.utils.fastapi_encoders import jsonable_encoder +from pydantic import BaseModel, ConfigDict +from servicelib.aiohttp import status +from servicelib.aiohttp.requests_validation import ( + parse_request_body_as, + parse_request_path_parameters_as, + parse_request_query_parameters_as, +) +from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON +from servicelib.rest_constants import RESPONSE_MODEL_POLICY + +from ..._meta import API_VTAG as VTAG +from ...constants import APP_SETTINGS_KEY +from ...email import email_service +from ...fogbugz.settings import FogbugzSettings +from ...login.decorators import login_required +from ...models import AuthenticatedRequestContext +from ...products import products_web +from ...users import users_service +from ...utils_aiohttp import envelope_json_response +from .. import _conversation_message_service, _conversation_service +from ._common import ConversationPathParams, raise_unsupported_type +from ._rest_exceptions import _handle_exceptions + +_logger = logging.getLogger(__name__) + +routes = web.RouteTableDef() + + +class _ConversationMessagePathParams(ConversationPathParams): + message_id: ConversationMessageID + model_config = ConfigDict(extra="forbid") + + +class _ListConversationMessageQueryParams(PageQueryParameters): + + model_config = ConfigDict(extra="forbid") + + +class _ConversationMessageCreateBodyParams(BaseModel): + content: str + type: ConversationMessageType + model_config = ConfigDict(extra="forbid") + + +def _json_encoder_and_dumps(obj: Any, **kwargs): + return json_dumps(jsonable_encoder(obj), **kwargs) + + +@routes.post( + f"/{VTAG}/conversations/{{conversation_id}}/messages", + name="create_conversation_message", +) +@login_required +@_handle_exceptions +async def create_conversation_message(request: web.Request): + """Create a new message in a conversation""" + req_ctx = AuthenticatedRequestContext.model_validate(request) + path_params = parse_request_path_parameters_as(ConversationPathParams, request) + body_params = await parse_request_body_as( + _ConversationMessageCreateBodyParams, request + ) + + _conversation = await _conversation_service.get_conversation( + request.app, conversation_id=path_params.conversation_id + ) + if _conversation.type != ConversationType.SUPPORT: + raise_unsupported_type(_conversation.type) + + # This function takes care of granting support user access to the message + await _conversation_service.get_support_conversation_for_user( + app=request.app, + user_id=req_ctx.user_id, + product_name=req_ctx.product_name, + conversation_id=path_params.conversation_id, + ) + + message, is_first_message = ( + await _conversation_message_service.create_support_message_with_first_check( + app=request.app, + product_name=req_ctx.product_name, + user_id=req_ctx.user_id, + project_id=None, # Support conversations don't use project_id + conversation_id=path_params.conversation_id, + content=body_params.content, + type_=body_params.type, + ) + ) + + # NOTE: This is done here in the Controller layer, as the interface around email currently needs request + product = products_web.get_current_product(request) + fogbugz_settings_or_none: FogbugzSettings | None = request.app[ + APP_SETTINGS_KEY + ].WEBSERVER_FOGBUGZ + if ( + product.support_standard_group_id + and fogbugz_settings_or_none is not None + and is_first_message + ): + _logger.debug( + "Support settings available and FogBugz client configured, creating FogBugz case." + ) + assert product.support_assigned_fogbugz_project_id # nosec + + try: + _url = request.url + _conversation_url = f"{_url.scheme}://{_url.host}/#/conversation/{path_params.conversation_id}" + + await _conversation_service.create_fogbugz_case_for_support_conversation( + request.app, + conversation=_conversation, + user_id=req_ctx.user_id, + message_content=message.content, + conversation_url=_conversation_url, + host=request.host, + product_support_assigned_fogbugz_project_id=product.support_assigned_fogbugz_project_id, + fogbugz_url=str(fogbugz_settings_or_none.FOGBUGZ_URL), + ) + except Exception: # pylint: disable=broad-except + _logger.exception( + "Failed to create support request FogBugz case for conversation %s.", + _conversation.conversation_id, + ) + + elif ( + product.support_standard_group_id + and fogbugz_settings_or_none is None + and is_first_message + ): + _logger.debug( + "Support settings available, but no FogBugz client configured, sending email instead to create FogBugz case." + ) + try: + user = await users_service.get_user(request.app, req_ctx.user_id) + template_name = "request_support.jinja2" + destination_email = product.support_email + email_template_path = await products_web.get_product_template_path( + request, template_name + ) + _url = request.url + _conversation_url = f"{_url.scheme}://{_url.host}/#/conversation/{path_params.conversation_id}" + _extra_context = _conversation.extra_context + await email_service.send_email_from_template( + request, + from_=product.support_email, + to=destination_email, + template=email_template_path, + context={ + "host": request.host, + "first_name": user["first_name"], + "last_name": user["last_name"], + "user_email": user["email"], + "conversation_url": _conversation_url, + "message_content": message.content, + "extra_context": _extra_context, + "dumps": functools.partial(_json_encoder_and_dumps, indent=1), + }, + ) + except Exception: # pylint: disable=broad-except + _logger.exception( + "Failed to send '%s' email to %s (this means the FogBugz case for the request was not created).", + template_name, + destination_email, + ) + else: + _logger.debug("No support settings available, skipping FogBugz case creation.") + + data = ConversationMessageRestGet.from_domain_model(message) + return envelope_json_response(data, web.HTTPCreated) + + +@routes.get( + f"/{VTAG}/conversations/{{conversation_id}}/messages", + name="list_conversation_messages", +) +@login_required +@_handle_exceptions +async def list_conversation_messages(request: web.Request): + """List messages in a conversation""" + req_ctx = AuthenticatedRequestContext.model_validate(request) + path_params = parse_request_path_parameters_as(ConversationPathParams, request) + query_params = parse_request_query_parameters_as( + _ListConversationMessageQueryParams, request + ) + + _conversation = await _conversation_service.get_conversation( + request.app, conversation_id=path_params.conversation_id + ) + if _conversation.type != ConversationType.SUPPORT: + raise_unsupported_type(_conversation.type) + + # This function takes care of granting support user access to the message + await _conversation_service.get_support_conversation_for_user( + app=request.app, + user_id=req_ctx.user_id, + product_name=req_ctx.product_name, + conversation_id=path_params.conversation_id, + ) + + total, messages = ( + await _conversation_message_service.list_messages_for_conversation( + app=request.app, + conversation_id=path_params.conversation_id, + offset=query_params.offset, + limit=query_params.limit, + ) + ) + + page = Page[ConversationMessageRestGet].model_validate( + paginate_data( + chunk=[ + ConversationMessageRestGet.from_domain_model(message) + for message in messages + ], + request_url=request.url, + total=total, + limit=query_params.limit, + offset=query_params.offset, + ) + ) + return web.Response( + text=page.model_dump_json(**RESPONSE_MODEL_POLICY), + content_type=MIMETYPE_APPLICATION_JSON, + ) + + +@routes.get( + f"/{VTAG}/conversations/{{conversation_id}}/messages/{{message_id}}", + name="get_conversation_message", +) +@login_required +@_handle_exceptions +async def get_conversation_message(request: web.Request): + """Get a specific message in a conversation""" + req_ctx = AuthenticatedRequestContext.model_validate(request) + path_params = parse_request_path_parameters_as( + _ConversationMessagePathParams, request + ) + + _conversation = await _conversation_service.get_conversation( + request.app, conversation_id=path_params.conversation_id + ) + if _conversation.type != ConversationType.SUPPORT: + raise_unsupported_type(_conversation.type) + + # This function takes care of granting support user access to the message + await _conversation_service.get_support_conversation_for_user( + app=request.app, + user_id=req_ctx.user_id, + product_name=req_ctx.product_name, + conversation_id=path_params.conversation_id, + ) + + message = await _conversation_message_service.get_message( + app=request.app, + conversation_id=path_params.conversation_id, + message_id=path_params.message_id, + ) + + data = ConversationMessageRestGet.from_domain_model(message) + return envelope_json_response(data) + + +@routes.put( + f"/{VTAG}/conversations/{{conversation_id}}/messages/{{message_id}}", + name="update_conversation_message", +) +@login_required +@_handle_exceptions +async def update_conversation_message(request: web.Request): + """Update a message in a conversation""" + req_ctx = AuthenticatedRequestContext.model_validate(request) + path_params = parse_request_path_parameters_as( + _ConversationMessagePathParams, request + ) + body_params = await parse_request_body_as(ConversationMessagePatch, request) + + _conversation = await _conversation_service.get_conversation( + request.app, conversation_id=path_params.conversation_id + ) + if _conversation.type != ConversationType.SUPPORT: + raise_unsupported_type(_conversation.type) + + # This function takes care of granting support user access to the message + await _conversation_service.get_support_conversation_for_user( + app=request.app, + user_id=req_ctx.user_id, + product_name=req_ctx.product_name, + conversation_id=path_params.conversation_id, + ) + + message = await _conversation_message_service.update_message( + app=request.app, + product_name=req_ctx.product_name, + project_id=None, # Support conversations don't use project_id + conversation_id=path_params.conversation_id, + message_id=path_params.message_id, + updates=ConversationMessagePatchDB(content=body_params.content), + ) + + data = ConversationMessageRestGet.from_domain_model(message) + return envelope_json_response(data) + + +@routes.delete( + f"/{VTAG}/conversations/{{conversation_id}}/messages/{{message_id}}", + name="delete_conversation_message", +) +@login_required +@_handle_exceptions +async def delete_conversation_message(request: web.Request): + """Delete a message in a conversation""" + req_ctx = AuthenticatedRequestContext.model_validate(request) + path_params = parse_request_path_parameters_as( + _ConversationMessagePathParams, request + ) + + _conversation = await _conversation_service.get_conversation( + request.app, conversation_id=path_params.conversation_id + ) + if _conversation.type != ConversationType.SUPPORT: + raise_unsupported_type(_conversation.type) + + # This function takes care of granting support user access to the message + await _conversation_service.get_support_conversation_for_user( + app=request.app, + user_id=req_ctx.user_id, + product_name=req_ctx.product_name, + conversation_id=path_params.conversation_id, + ) + + await _conversation_message_service.delete_message( + app=request.app, + product_name=req_ctx.product_name, + user_id=req_ctx.user_id, + project_id=None, # Support conversations don't use project_id + conversation_id=path_params.conversation_id, + message_id=path_params.message_id, + ) + + return web.json_response(status=status.HTTP_204_NO_CONTENT) diff --git a/services/web/server/src/simcore_service_webserver/conversations/_controller/_conversations_rest.py b/services/web/server/src/simcore_service_webserver/conversations/_controller/_conversations_rest.py new file mode 100644 index 000000000000..bed8d79a8a6c --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/conversations/_controller/_conversations_rest.py @@ -0,0 +1,234 @@ +import logging +from typing import Any + +from aiohttp import web +from models_library.api_schemas_webserver._base import InputSchema +from models_library.api_schemas_webserver.conversations import ( + ConversationPatch, + ConversationRestGet, +) +from models_library.conversations import ( + ConversationPatchDB, + ConversationType, +) +from models_library.rest_pagination import ( + Page, + PageQueryParameters, +) +from models_library.rest_pagination_utils import paginate_data +from pydantic import ConfigDict, field_validator +from servicelib.aiohttp import status +from servicelib.aiohttp.requests_validation import ( + parse_request_body_as, + parse_request_path_parameters_as, + parse_request_query_parameters_as, +) +from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON +from servicelib.rest_constants import RESPONSE_MODEL_POLICY + +from ..._meta import API_VTAG as VTAG +from ...login.decorators import login_required +from ...models import AuthenticatedRequestContext +from ...users import users_service +from ...utils_aiohttp import envelope_json_response +from .. import _conversation_service, conversations_service +from ._common import ConversationPathParams, raise_unsupported_type +from ._rest_exceptions import _handle_exceptions + +_logger = logging.getLogger(__name__) + +routes = web.RouteTableDef() + + +class _ListConversationsQueryParams(PageQueryParameters): + type: ConversationType + model_config = ConfigDict(extra="forbid") + + @field_validator("type") + @classmethod + def validate_type(cls, value): + if value is not None and value != ConversationType.SUPPORT: + msg = "Only support type conversations are allowed" + raise ValueError(msg) + return value + + +class _ConversationsCreateBodyParams(InputSchema): + name: str + type: ConversationType + extra_context: dict[str, Any] | None = None + + +@routes.post( + f"/{VTAG}/conversations", + name="create_conversation", +) +@login_required +@_handle_exceptions +async def create_conversation(request: web.Request): + """Create a new conversation (supports only type='support')""" + req_ctx = AuthenticatedRequestContext.model_validate(request) + body_params = await parse_request_body_as(_ConversationsCreateBodyParams, request) + # Ensure only support conversations are allowed + if body_params.type != ConversationType.SUPPORT: + raise_unsupported_type(body_params.type) + + _extra_context = body_params.extra_context or {} + + conversation = await conversations_service.create_conversation( + app=request.app, + product_name=req_ctx.product_name, + user_id=req_ctx.user_id, + project_uuid=None, # Support conversations are not tied to projects + name=body_params.name, + type_=body_params.type, + extra_context=_extra_context, + ) + data = ConversationRestGet.from_domain_model(conversation) + + return envelope_json_response(data, web.HTTPCreated) + + +@routes.get( + f"/{VTAG}/conversations", + name="list_conversations", +) +@login_required +@_handle_exceptions +async def list_conversations(request: web.Request): + """List conversations for the authenticated user (supports only type='support')""" + req_ctx = AuthenticatedRequestContext.model_validate(request) + query_params = parse_request_query_parameters_as( + _ListConversationsQueryParams, request + ) + if query_params.type != ConversationType.SUPPORT: + raise_unsupported_type(query_params.type) + + total, conversations = ( + await _conversation_service.list_support_conversations_for_user( + app=request.app, + user_id=req_ctx.user_id, + product_name=req_ctx.product_name, + offset=query_params.offset, + limit=query_params.limit, + ) + ) + + page = Page[ConversationRestGet].model_validate( + paginate_data( + chunk=[ + ConversationRestGet.from_domain_model(conversation) + for conversation in conversations + ], + request_url=request.url, + total=total, + limit=query_params.limit, + offset=query_params.offset, + ) + ) + return web.Response( + text=page.model_dump_json(**RESPONSE_MODEL_POLICY), + content_type=MIMETYPE_APPLICATION_JSON, + ) + + +@routes.get( + f"/{VTAG}/conversations/{{conversation_id}}", + name="get_conversation", +) +@login_required +@_handle_exceptions +async def get_conversation(request: web.Request): + """Get a specific conversation""" + req_ctx = AuthenticatedRequestContext.model_validate(request) + path_params = parse_request_path_parameters_as(ConversationPathParams, request) + + conversation = await _conversation_service.get_conversation( + request.app, conversation_id=path_params.conversation_id + ) + if conversation.type != ConversationType.SUPPORT: + raise_unsupported_type(conversation.type) + + conversation = await _conversation_service.get_support_conversation_for_user( + app=request.app, + user_id=req_ctx.user_id, + product_name=req_ctx.product_name, + conversation_id=path_params.conversation_id, + ) + + data = ConversationRestGet.from_domain_model(conversation) + return envelope_json_response(data) + + +@routes.patch( + f"/{VTAG}/conversations/{{conversation_id}}", + name="update_conversation", +) +@login_required +@_handle_exceptions +async def update_conversation(request: web.Request): + """Update a conversation""" + req_ctx = AuthenticatedRequestContext.model_validate(request) + path_params = parse_request_path_parameters_as(ConversationPathParams, request) + body_params = await parse_request_body_as(ConversationPatch, request) + + conversation = await _conversation_service.get_conversation( + request.app, conversation_id=path_params.conversation_id + ) + if conversation.type != ConversationType.SUPPORT: + raise_unsupported_type(conversation.type) + + await _conversation_service.get_support_conversation_for_user( + app=request.app, + user_id=req_ctx.user_id, + product_name=req_ctx.product_name, + conversation_id=path_params.conversation_id, + ) + + conversation = await conversations_service.update_conversation( + app=request.app, + project_id=None, # Support conversations don't use project_id + conversation_id=path_params.conversation_id, + updates=ConversationPatchDB(**body_params.model_dump(exclude_unset=True)), + ) + + data = ConversationRestGet.from_domain_model(conversation) + return envelope_json_response(data) + + +@routes.delete( + f"/{VTAG}/conversations/{{conversation_id}}", + name="delete_conversation", +) +@login_required +@_handle_exceptions +async def delete_conversation(request: web.Request): + """Delete a conversation""" + req_ctx = AuthenticatedRequestContext.model_validate(request) + path_params = parse_request_path_parameters_as(ConversationPathParams, request) + + conversation = await _conversation_service.get_conversation( + request.app, conversation_id=path_params.conversation_id + ) + if conversation.type != ConversationType.SUPPORT: + raise_unsupported_type(conversation.type) + + # Only support conversation creator can delete conversation + _user_group_id = await users_service.get_user_primary_group_id( + request.app, user_id=req_ctx.user_id + ) + await _conversation_service.get_conversation_for_user( + app=request.app, + conversation_id=path_params.conversation_id, + user_group_id=_user_group_id, + ) + + await conversations_service.delete_conversation( + app=request.app, + product_name=req_ctx.product_name, + user_id=req_ctx.user_id, + project_id=None, # Support conversations don't use project_id + conversation_id=path_params.conversation_id, + ) + + return web.json_response(status=status.HTTP_204_NO_CONTENT) diff --git a/services/web/server/src/simcore_service_webserver/conversations/_controller/_rest_exceptions.py b/services/web/server/src/simcore_service_webserver/conversations/_controller/_rest_exceptions.py new file mode 100644 index 000000000000..08f6108c61b3 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/conversations/_controller/_rest_exceptions.py @@ -0,0 +1,50 @@ +"""Defines the different exceptions that may arise in the catalog subpackage""" + +import logging + +from common_library.user_messages import user_message +from servicelib.aiohttp import status + +from ...exception_handling import ( + ExceptionToHttpErrorMap, + HttpErrorInfo, + exception_handling_decorator, + to_exceptions_handlers_map, +) +from ..errors import ( + ConversationErrorNotFoundError, + ConversationMessageErrorNotFoundError, + ConversationUnsupportedTypeError, +) + +_logger = logging.getLogger(__name__) + + +_TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = { + ConversationErrorNotFoundError: HttpErrorInfo( + status.HTTP_404_NOT_FOUND, + user_message( + "Conversation not found.", + _version=1, + ), + ), + ConversationMessageErrorNotFoundError: HttpErrorInfo( + status.HTTP_404_NOT_FOUND, + user_message( + "Conversation not found.", + _version=1, + ), + ), + ConversationUnsupportedTypeError: HttpErrorInfo( + status.HTTP_400_BAD_REQUEST, + user_message( + "Unsupported conversation type.", + _version=1, + ), + ), +} + + +_handle_exceptions = exception_handling_decorator( + to_exceptions_handlers_map(_TO_HTTP_ERROR_MAP) +) diff --git a/services/web/server/src/simcore_service_webserver/conversations/_conversation_message_service.py b/services/web/server/src/simcore_service_webserver/conversations/_conversation_message_service.py index a417f0c66283..e920306c0428 100644 --- a/services/web/server/src/simcore_service_webserver/conversations/_conversation_message_service.py +++ b/services/web/server/src/simcore_service_webserver/conversations/_conversation_message_service.py @@ -11,28 +11,57 @@ ConversationMessagePatchDB, ConversationMessageType, ) +from models_library.products import ProductName +from models_library.projects import ProjectID from models_library.rest_ordering import OrderBy, OrderDirection from models_library.rest_pagination import PageTotalCount from models_library.users import UserID +from servicelib.redis import exclusive +from simcore_service_webserver.groups import api as group_service -from ..users.api import get_user_primary_group_id -from . import _conversation_message_repository +from ..products import products_service +from ..redis import get_redis_lock_manager_client_sdk +from ..users import users_service +from . import _conversation_message_repository, _conversation_service +from ._socketio import ( + notify_conversation_message_created, + notify_conversation_message_deleted, + notify_conversation_message_updated, +) _logger = logging.getLogger(__name__) +# Redis lock key for conversation message operations +CONVERSATION_MESSAGE_REDIS_LOCK_KEY = "conversation_message_update:{}" + + +async def _get_recipients_from_product_support_group( + app: web.Application, product_name: ProductName +) -> set[UserID]: + product = products_service.get_product(app, product_name=product_name) + _support_standard_group_id = product.support_standard_group_id + if _support_standard_group_id: + users = await group_service.list_group_members( + app, group_id=_support_standard_group_id + ) + return {user.id for user in users} + return set() + async def create_message( app: web.Application, *, + product_name: ProductName, user_id: UserID, + project_id: ProjectID | None, conversation_id: ConversationID, # Creation attributes content: str, type_: ConversationMessageType, ) -> ConversationMessageGetDB: - _user_group_id = await get_user_primary_group_id(app, user_id=user_id) + _user_group_id = await users_service.get_user_primary_group_id(app, user_id=user_id) - return await _conversation_message_repository.create( + created_message = await _conversation_message_repository.create( app, conversation_id=conversation_id, user_group_id=_user_group_id, @@ -40,6 +69,103 @@ async def create_message( type_=type_, ) + if project_id: + await notify_conversation_message_created( + app, + recipients=await _conversation_service.get_recipients_from_project( + app, project_id + ), + project_id=project_id, + conversation_message=created_message, + ) + else: + _conversation = await _conversation_service.get_conversation( + app, conversation_id=conversation_id + ) + _conversation_creator_user = await users_service.get_user_id_from_gid( + app, primary_gid=_conversation.user_group_id + ) + _product_group_users = await _get_recipients_from_product_support_group( + app, product_name=product_name + ) + await notify_conversation_message_created( + app, + recipients=_product_group_users | {_conversation_creator_user}, + project_id=None, + conversation_message=created_message, + ) + + return created_message + + +async def create_support_message_with_first_check( + app: web.Application, + *, + product_name: ProductName, + user_id: UserID, + project_id: ProjectID | None, + conversation_id: ConversationID, + # Creation attributes + content: str, + type_: ConversationMessageType, +) -> tuple[ConversationMessageGetDB, bool]: + """Create a message and check if it's the first one with Redis lock protection. + + This function is protected by Redis exclusive lock because: + - the message creation and first message check must be kept in sync + + Args: + app: The web application instance + user_id: ID of the user creating the message + project_id: ID of the project (optional) + conversation_id: ID of the conversation + content: Content of the message + type_: Type of the message + + Returns: + Tuple containing the created message and whether it's the first message + """ + + @exclusive( + get_redis_lock_manager_client_sdk(app), + lock_key=CONVERSATION_MESSAGE_REDIS_LOCK_KEY.format(conversation_id), + blocking=True, + blocking_timeout=None, # NOTE: this is a blocking call, a timeout has undefined effects + ) + async def _create_support_message_and_check_if_it_is_first_message() -> ( + tuple[ConversationMessageGetDB, bool] + ): + """This function is protected because + - the message creation and first message check must be kept in sync + """ + created_message = await create_message( + app, + product_name=product_name, + user_id=user_id, + project_id=project_id, + conversation_id=conversation_id, + content=content, + type_=type_, + ) + _, messages = await _conversation_message_repository.list_( + app, + conversation_id=conversation_id, + offset=0, + limit=1, + order_by=OrderBy( + field=IDStr("created"), direction=OrderDirection.ASC + ), # NOTE: ASC - first/oldest message first + ) + + is_first_message = False + if messages: + first_message = messages[0] + is_first_message = first_message.message_id == created_message.message_id + + return created_message, is_first_message + + return await _create_support_message_and_check_if_it_is_first_message() + async def get_message( app: web.Application, @@ -55,22 +181,55 @@ async def get_message( async def update_message( app: web.Application, *, + product_name: ProductName, + project_id: ProjectID | None, conversation_id: ConversationID, message_id: ConversationMessageID, # Update attributes updates: ConversationMessagePatchDB, ) -> ConversationMessageGetDB: - return await _conversation_message_repository.update( + updated_message = await _conversation_message_repository.update( app, conversation_id=conversation_id, message_id=message_id, updates=updates, ) + if project_id: + await notify_conversation_message_updated( + app, + recipients=await _conversation_service.get_recipients_from_project( + app, project_id + ), + project_id=project_id, + conversation_message=updated_message, + ) + else: + _conversation = await _conversation_service.get_conversation( + app, conversation_id=conversation_id + ) + _conversation_creator_user = await users_service.get_user_id_from_gid( + app, primary_gid=_conversation.user_group_id + ) + _product_group_users = await _get_recipients_from_product_support_group( + app, product_name=product_name + ) + await notify_conversation_message_updated( + app, + recipients=_product_group_users | {_conversation_creator_user}, + project_id=None, + conversation_message=updated_message, + ) + + return updated_message + async def delete_message( app: web.Application, *, + product_name: ProductName, + user_id: UserID, + project_id: ProjectID | None, conversation_id: ConversationID, message_id: ConversationMessageID, ) -> None: @@ -80,6 +239,38 @@ async def delete_message( message_id=message_id, ) + _user_group_id = await users_service.get_user_primary_group_id(app, user_id=user_id) + + if project_id: + await notify_conversation_message_deleted( + app, + recipients=await _conversation_service.get_recipients_from_project( + app, project_id + ), + user_group_id=_user_group_id, + project_id=project_id, + conversation_id=conversation_id, + message_id=message_id, + ) + else: + _conversation = await _conversation_service.get_conversation( + app, conversation_id=conversation_id + ) + _conversation_creator_user = await users_service.get_user_id_from_gid( + app, primary_gid=_conversation.user_group_id + ) + _product_group_users = await _get_recipients_from_product_support_group( + app, product_name=product_name + ) + await notify_conversation_message_deleted( + app, + recipients=_product_group_users | {_conversation_creator_user}, + user_group_id=_user_group_id, + project_id=None, + conversation_id=conversation_id, + message_id=message_id, + ) + async def list_messages_for_conversation( app: web.Application, diff --git a/services/web/server/src/simcore_service_webserver/conversations/_conversation_repository.py b/services/web/server/src/simcore_service_webserver/conversations/_conversation_repository.py index 62128d1a1943..7f0d8267d9ff 100644 --- a/services/web/server/src/simcore_service_webserver/conversations/_conversation_repository.py +++ b/services/web/server/src/simcore_service_webserver/conversations/_conversation_repository.py @@ -1,5 +1,5 @@ import logging -from typing import cast +from typing import Any, cast from aiohttp import web from models_library.conversations import ( @@ -14,7 +14,9 @@ from models_library.rest_ordering import OrderBy, OrderDirection from models_library.rest_pagination import PageTotalCount from pydantic import NonNegativeInt -from simcore_postgres_database.models.conversations import conversations +from simcore_postgres_database.models.conversations import ( + conversations, +) from simcore_postgres_database.utils_repos import ( get_columns_from_db_model, pass_or_acquire_connection, @@ -42,6 +44,7 @@ async def create( user_group_id: GroupID, type_: ConversationType, product_name: ProductName, + extra_context: dict[str, Any], ) -> ConversationGetDB: async with transaction_context(get_asyncpg_engine(app), connection) as conn: result = await conn.execute( @@ -54,6 +57,7 @@ async def create( created=func.now(), modified=func.now(), product_name=product_name, + extra_context=extra_context, ) .returning(*_SELECTION_ARGS) ) @@ -76,7 +80,111 @@ async def list_project_conversations( base_query = ( select(*_SELECTION_ARGS) .select_from(conversations) - .where(conversations.c.project_uuid == f"{project_uuid}") + .where( + (conversations.c.project_uuid == f"{project_uuid}") + & ( + conversations.c.type.in_( + ( + ConversationType.PROJECT_STATIC, + ConversationType.PROJECT_ANNOTATION, + ) + ) + ) + ) + ) + + # Select total count from base_query + subquery = base_query.subquery() + count_query = select(func.count()).select_from(subquery) + + # Ordering and pagination + if order_by.direction == OrderDirection.ASC: + list_query = base_query.order_by( + asc(getattr(conversations.c, order_by.field)), + conversations.c.conversation_id, + ) + else: + list_query = base_query.order_by( + desc(getattr(conversations.c, order_by.field)), + conversations.c.conversation_id, + ) + list_query = list_query.offset(offset).limit(limit) + + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + total_count = await conn.scalar(count_query) + + result = await conn.stream(list_query) + items: list[ConversationGetDB] = [ + ConversationGetDB.model_validate(row) async for row in result + ] + + return cast(int, total_count), items + + +async def list_support_conversations_for_user( + app: web.Application, + connection: AsyncConnection | None = None, + *, + user_group_id: GroupID, + # pagination + offset: NonNegativeInt, + limit: NonNegativeInt, + # ordering + order_by: OrderBy, +) -> tuple[PageTotalCount, list[ConversationGetDB]]: + + base_query = ( + select(*_SELECTION_ARGS) + .select_from(conversations) + .where( + (conversations.c.user_group_id == user_group_id) + & (conversations.c.type == ConversationType.SUPPORT) + ) + ) + + # Select total count from base_query + subquery = base_query.subquery() + count_query = select(func.count()).select_from(subquery) + + # Ordering and pagination + if order_by.direction == OrderDirection.ASC: + list_query = base_query.order_by( + asc(getattr(conversations.c, order_by.field)), + conversations.c.conversation_id, + ) + else: + list_query = base_query.order_by( + desc(getattr(conversations.c, order_by.field)), + conversations.c.conversation_id, + ) + list_query = list_query.offset(offset).limit(limit) + + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + total_count = await conn.scalar(count_query) + + result = await conn.stream(list_query) + items: list[ConversationGetDB] = [ + ConversationGetDB.model_validate(row) async for row in result + ] + + return cast(int, total_count), items + + +async def list_all_support_conversations_for_support_user( + app: web.Application, + connection: AsyncConnection | None = None, + *, + # pagination + offset: NonNegativeInt, + limit: NonNegativeInt, + # ordering + order_by: OrderBy, +) -> tuple[PageTotalCount, list[ConversationGetDB]]: + + base_query = ( + select(*_SELECTION_ARGS) + .select_from(conversations) + .where(conversations.c.type == ConversationType.SUPPORT) ) # Select total count from base_query @@ -112,6 +220,7 @@ async def get( connection: AsyncConnection | None = None, *, conversation_id: ConversationID, + type_: ConversationType | None = None, ) -> ConversationGetDB: select_query = ( select(*_SELECTION_ARGS) @@ -119,6 +228,37 @@ async def get( .where(conversations.c.conversation_id == f"{conversation_id}") ) + if type_ is not None: + select_query = select_query.where(conversations.c.type == type_) + + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + result = await conn.execute(select_query) + row = result.one_or_none() + if row is None: + raise ConversationErrorNotFoundError(conversation_id=conversation_id) + return ConversationGetDB.model_validate(row) + + +async def get_for_user( + app: web.Application, + connection: AsyncConnection | None = None, + *, + conversation_id: ConversationID, + user_group_id: GroupID, + type_: ConversationType | None = None, +) -> ConversationGetDB: + select_query = ( + select(*_SELECTION_ARGS) + .select_from(conversations) + .where( + (conversations.c.conversation_id == f"{conversation_id}") + & (conversations.c.user_group_id == user_group_id) + ) + ) + + if type_ is not None: + select_query = select_query.where(conversations.c.type == type_) + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: result = await conn.execute(select_query) row = result.one_or_none() @@ -139,6 +279,9 @@ async def update( **updates.model_dump(exclude_unset=True), conversations.c.modified.name: func.now(), } + _name = _updates.get("name", "Default") + if _name is None: + _updates["name"] = "no name" async with transaction_context(get_asyncpg_engine(app), connection) as conn: result = await conn.execute( diff --git a/services/web/server/src/simcore_service_webserver/conversations/_conversation_service.py b/services/web/server/src/simcore_service_webserver/conversations/_conversation_service.py index e4541f56c3fa..bd090239a1a4 100644 --- a/services/web/server/src/simcore_service_webserver/conversations/_conversation_service.py +++ b/services/web/server/src/simcore_service_webserver/conversations/_conversation_service.py @@ -1,6 +1,9 @@ # pylint: disable=unused-argument +import json import logging +from typing import Any +from urllib.parse import urljoin from aiohttp import web from models_library.basic_types import IDStr @@ -16,12 +19,34 @@ from models_library.rest_pagination import PageTotalCount from models_library.users import UserID -from ..users.api import get_user_primary_group_id +from ..conversations._socketio import ( + notify_conversation_created, + notify_conversation_deleted, + notify_conversation_updated, +) +from ..fogbugz import FogbugzCaseCreate, get_fogbugz_rest_client +from ..groups.api import list_user_groups_ids_with_read_access +from ..products import products_service +from ..projects._groups_repository import list_project_groups +from ..users import users_service +from ..users._users_service import get_users_in_group from . import _conversation_repository _logger = logging.getLogger(__name__) +async def get_recipients_from_project( + app: web.Application, project_id: ProjectID +) -> set[UserID]: + groups = await list_project_groups(app, project_id=project_id) + return { + user + for group in groups + if group.read + for user in await get_users_in_group(app, gid=group.gid) + } + + async def create_conversation( app: web.Application, *, @@ -31,50 +56,91 @@ async def create_conversation( # Creation attributes name: str, type_: ConversationType, + extra_context: dict[str, Any], ) -> ConversationGetDB: - if project_uuid is None: - raise NotImplementedError + _user_group_id = await users_service.get_user_primary_group_id(app, user_id=user_id) - _user_group_id = await get_user_primary_group_id(app, user_id=user_id) - - return await _conversation_repository.create( + created_conversation = await _conversation_repository.create( app, name=name, project_uuid=project_uuid, user_group_id=_user_group_id, type_=type_, product_name=product_name, + extra_context=extra_context, ) + if project_uuid: + await notify_conversation_created( + app, + recipients=await get_recipients_from_project(app, project_uuid), + project_id=project_uuid, + conversation=created_conversation, + ) + + return created_conversation + async def get_conversation( app: web.Application, *, conversation_id: ConversationID, + # filters + type_: ConversationType | None = None, ) -> ConversationGetDB: return await _conversation_repository.get( app, conversation_id=conversation_id, + type_=type_, + ) + + +async def get_conversation_for_user( + app: web.Application, + *, + conversation_id: ConversationID, + user_group_id: UserID, + type_: ConversationType | None = None, +) -> ConversationGetDB: + return await _conversation_repository.get_for_user( + app, + conversation_id=conversation_id, + user_group_id=user_group_id, + type_=type_, ) async def update_conversation( app: web.Application, *, + project_id: ProjectID | None, conversation_id: ConversationID, # Update attributes updates: ConversationPatchDB, ) -> ConversationGetDB: - return await _conversation_repository.update( + updated_conversation = await _conversation_repository.update( app, conversation_id=conversation_id, updates=updates, ) + if project_id: + await notify_conversation_updated( + app, + recipients=await get_recipients_from_project(app, project_id), + project_id=project_id, + conversation=updated_conversation, + ) + + return updated_conversation + async def delete_conversation( app: web.Application, *, + product_name: ProductName, + user_id: UserID, + project_id: ProjectID | None, conversation_id: ConversationID, ) -> None: await _conversation_repository.delete( @@ -82,8 +148,20 @@ async def delete_conversation( conversation_id=conversation_id, ) + _user_group_id = await users_service.get_user_primary_group_id(app, user_id=user_id) + + if project_id: + await notify_conversation_deleted( + app, + recipients=await get_recipients_from_project(app, project_id), + product_name=product_name, + user_group_id=_user_group_id, + project_id=project_id, + conversation_id=conversation_id, + ) + -async def list_conversations_for_project( +async def list_project_conversations( app: web.Application, *, project_uuid: ProjectID, @@ -98,3 +176,121 @@ async def list_conversations_for_project( limit=limit, order_by=OrderBy(field=IDStr("conversation_id"), direction=OrderDirection.DESC), ) + + +async def get_support_conversation_for_user( + app: web.Application, + *, + user_id: UserID, + product_name: ProductName, + conversation_id: ConversationID, +): + # Check if user is part of support group (in that case he has access to all support conversations) + product = products_service.get_product(app, product_name=product_name) + _support_standard_group_id = product.support_standard_group_id + if _support_standard_group_id is not None: + _user_group_ids = await list_user_groups_ids_with_read_access( + app, user_id=user_id + ) + if _support_standard_group_id in _user_group_ids: + # I am a support user + return await get_conversation( + app, conversation_id=conversation_id, type_=ConversationType.SUPPORT + ) + + _user_group_id = await users_service.get_user_primary_group_id(app, user_id=user_id) + return await get_conversation_for_user( + app, + conversation_id=conversation_id, + user_group_id=_user_group_id, + type_=ConversationType.SUPPORT, + ) + + +async def list_support_conversations_for_user( + app: web.Application, + *, + user_id: UserID, + product_name: ProductName, + # pagination + offset: int = 0, + limit: int = 20, +) -> tuple[PageTotalCount, list[ConversationGetDB]]: + + # Check if user is part of support group (in that case list all support conversations) + product = products_service.get_product(app, product_name=product_name) + _support_standard_group_id = product.support_standard_group_id + if _support_standard_group_id is not None: + _user_group_ids = await list_user_groups_ids_with_read_access( + app, user_id=user_id + ) + if _support_standard_group_id in _user_group_ids: + # I am a support user + return await _conversation_repository.list_all_support_conversations_for_support_user( + app, + offset=offset, + limit=limit, + order_by=OrderBy( + field=IDStr("conversation_id"), direction=OrderDirection.DESC + ), + ) + + _user_group_id = await users_service.get_user_primary_group_id(app, user_id=user_id) + return await _conversation_repository.list_support_conversations_for_user( + app, + user_group_id=_user_group_id, + offset=offset, + limit=limit, + order_by=OrderBy(field=IDStr("conversation_id"), direction=OrderDirection.DESC), + ) + + +async def create_fogbugz_case_for_support_conversation( + app: web.Application, + *, + conversation: ConversationGetDB, + user_id: UserID, + message_content: str, + conversation_url: str, + host: str, + product_support_assigned_fogbugz_project_id: int, + fogbugz_url: str, +) -> None: + """Creates a FogBugz case for a support conversation and updates the conversation with the case URL.""" + user = await users_service.get_user(app, user_id) + + description = f""" + Dear Support Team, + + We have received a support request from {user["first_name"]} {user["last_name"]} ({user["email"]}) on {host}. + + All communication should take place in the Platform Support Center at the following link: {conversation_url} + + First message content: {message_content} + + Extra content: {json.dumps(conversation.extra_context)} + """ + + fogbugz_client = get_fogbugz_rest_client(app) + fogbugz_case_data = FogbugzCaseCreate( + fogbugz_project_id=product_support_assigned_fogbugz_project_id, + title=f"Request for Support on {host}", + description=description, + ) + case_id = await fogbugz_client.create_case(fogbugz_case_data) + + # Update conversation with FogBugz case URL + await update_conversation( + app, + project_id=None, + conversation_id=conversation.conversation_id, + updates=ConversationPatchDB( + extra_context=conversation.extra_context + | { + "fogbugz_case_url": urljoin( + f"{fogbugz_url}", + f"f/cases/{case_id}", + ) + }, + ), + ) diff --git a/services/web/server/src/simcore_service_webserver/conversations/_socketio.py b/services/web/server/src/simcore_service_webserver/conversations/_socketio.py new file mode 100644 index 000000000000..06a07bf561be --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/conversations/_socketio.py @@ -0,0 +1,238 @@ +import datetime +from typing import Final + +from aiohttp import web +from models_library.conversations import ( + ConversationGetDB, + ConversationID, + ConversationMessageGetDB, + ConversationMessageID, + ConversationMessageType, + ConversationName, + ConversationType, +) +from models_library.groups import GroupID +from models_library.products import ProductName +from models_library.projects import ProjectID +from models_library.socketio import SocketMessageDict +from models_library.users import UserID +from pydantic import AliasGenerator, BaseModel, ConfigDict +from pydantic.alias_generators import to_camel +from servicelib.utils import limited_as_completed + +from ..socketio.messages import send_message_to_user + +_MAX_CONCURRENT_SENDS: Final[int] = 3 + +SOCKET_IO_CONVERSATION_CREATED_EVENT: Final[str] = "conversation:created" +SOCKET_IO_CONVERSATION_DELETED_EVENT: Final[str] = "conversation:deleted" +SOCKET_IO_CONVERSATION_UPDATED_EVENT: Final[str] = "conversation:updated" + +SOCKET_IO_CONVERSATION_MESSAGE_CREATED_EVENT: Final[str] = ( + "conversation:message:created" +) +SOCKET_IO_CONVERSATION_MESSAGE_DELETED_EVENT: Final[str] = ( + "conversation:message:deleted" +) +SOCKET_IO_CONVERSATION_MESSAGE_UPDATED_EVENT: Final[str] = ( + "conversation:message:updated" +) + + +class BaseEvent(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + from_attributes=True, + alias_generator=AliasGenerator( + serialization_alias=to_camel, + ), + ) + + +class BaseConversationEvent(BaseEvent): + product_name: ProductName + project_id: ProjectID | None + user_group_id: GroupID + conversation_id: ConversationID + type: ConversationType + + +class ConversationCreatedOrUpdatedEvent(BaseConversationEvent): + name: ConversationName + created: datetime.datetime + modified: datetime.datetime + + +class ConversationDeletedEvent(BaseConversationEvent): ... + + +class BaseConversationMessageEvent(BaseEvent): + conversation_id: ConversationID + project_id: ProjectID | None + message_id: ConversationMessageID + user_group_id: GroupID + type: ConversationMessageType + + model_config = ConfigDict( + populate_by_name=True, + from_attributes=True, + alias_generator=AliasGenerator( + serialization_alias=to_camel, + ), + ) + + +class ConversationMessageCreatedOrUpdatedEvent(BaseConversationMessageEvent): + content: str + created: datetime.datetime + modified: datetime.datetime + + +class ConversationMessageDeletedEvent(BaseConversationMessageEvent): ... + + +async def _send_message_to_recipients( + app: web.Application, + recipients: set[UserID], + notification_message: SocketMessageDict, +): + async for _ in limited_as_completed( + ( + send_message_to_user(app, recipient, notification_message) + for recipient in recipients + ), + limit=_MAX_CONCURRENT_SENDS, + ): + ... + + +async def notify_conversation_created( + app: web.Application, + *, + recipients: set[UserID], + project_id: ProjectID | None, + conversation: ConversationGetDB, +) -> None: + notification_message = SocketMessageDict( + event_type=SOCKET_IO_CONVERSATION_CREATED_EVENT, + data={ + **ConversationCreatedOrUpdatedEvent( + project_id=project_id, + **conversation.model_dump(), + ).model_dump(mode="json", by_alias=True), + }, + ) + + await _send_message_to_recipients(app, recipients, notification_message) + + +async def notify_conversation_updated( + app: web.Application, + *, + recipients: set[UserID], + project_id: ProjectID | None, + conversation: ConversationGetDB, +) -> None: + notification_message = SocketMessageDict( + event_type=SOCKET_IO_CONVERSATION_UPDATED_EVENT, + data={ + **ConversationCreatedOrUpdatedEvent( + project_id=project_id, + **conversation.model_dump(), + ).model_dump(mode="json", by_alias=True), + }, + ) + + await _send_message_to_recipients(app, recipients, notification_message) + + +async def notify_conversation_deleted( + app: web.Application, + *, + recipients: set[UserID], + product_name: ProductName, + user_group_id: GroupID, + project_id: ProjectID | None, + conversation_id: ConversationID, +) -> None: + notification_message = SocketMessageDict( + event_type=SOCKET_IO_CONVERSATION_DELETED_EVENT, + data={ + **ConversationDeletedEvent( + product_name=product_name, + project_id=project_id, + conversation_id=conversation_id, + user_group_id=user_group_id, + type=ConversationType.PROJECT_STATIC, + ).model_dump(mode="json", by_alias=True), + }, + ) + + await _send_message_to_recipients(app, recipients, notification_message) + + +async def notify_conversation_message_created( + app: web.Application, + *, + recipients: set[UserID], + project_id: ProjectID | None, + conversation_message: ConversationMessageGetDB, +) -> None: + notification_message = SocketMessageDict( + event_type=SOCKET_IO_CONVERSATION_MESSAGE_CREATED_EVENT, + data={ + **ConversationMessageCreatedOrUpdatedEvent( + **conversation_message.model_dump(), + project_id=project_id, + ).model_dump(mode="json", by_alias=True), + }, + ) + + await _send_message_to_recipients(app, recipients, notification_message) + + +async def notify_conversation_message_updated( + app: web.Application, + *, + recipients: set[UserID], + project_id: ProjectID | None, + conversation_message: ConversationMessageGetDB, +) -> None: + + notification_message = SocketMessageDict( + event_type=SOCKET_IO_CONVERSATION_MESSAGE_UPDATED_EVENT, + data={ + **ConversationMessageCreatedOrUpdatedEvent( + **conversation_message.model_dump(), + project_id=project_id, + ).model_dump(mode="json", by_alias=True), + }, + ) + + await _send_message_to_recipients(app, recipients, notification_message) + + +async def notify_conversation_message_deleted( + app: web.Application, + *, + recipients: set[UserID], + user_group_id: GroupID, + project_id: ProjectID | None, + conversation_id: ConversationID, + message_id: ConversationMessageID, +) -> None: + + notification_message = SocketMessageDict( + event_type=SOCKET_IO_CONVERSATION_MESSAGE_DELETED_EVENT, + data={ + **ConversationMessageDeletedEvent( + conversation_id=conversation_id, + message_id=message_id, + user_group_id=user_group_id, + type=ConversationMessageType.MESSAGE, + project_id=project_id, + ).model_dump(mode="json", by_alias=True), + }, + ) + + await _send_message_to_recipients(app, recipients, notification_message) diff --git a/services/web/server/src/simcore_service_webserver/conversations/conversations_service.py b/services/web/server/src/simcore_service_webserver/conversations/conversations_service.py index d6bb04672be1..61dcfe3330c7 100644 --- a/services/web/server/src/simcore_service_webserver/conversations/conversations_service.py +++ b/services/web/server/src/simcore_service_webserver/conversations/conversations_service.py @@ -10,7 +10,7 @@ create_conversation, delete_conversation, get_conversation, - list_conversations_for_project, + list_project_conversations, update_conversation, ) @@ -21,7 +21,7 @@ "delete_message", "get_conversation", "get_message", - "list_conversations_for_project", + "list_project_conversations", "list_messages_for_conversation", "update_conversation", "update_message", diff --git a/services/web/server/src/simcore_service_webserver/conversations/errors.py b/services/web/server/src/simcore_service_webserver/conversations/errors.py index 4caf6fe7eda1..cf4f0ce3b3ce 100644 --- a/services/web/server/src/simcore_service_webserver/conversations/errors.py +++ b/services/web/server/src/simcore_service_webserver/conversations/errors.py @@ -10,3 +10,7 @@ class ConversationErrorNotFoundError(ConversationError): class ConversationMessageErrorNotFoundError(ConversationError): msg_template = "Conversation {conversation_id} message {message_id} not found" + + +class ConversationUnsupportedTypeError(ConversationError): + msg_template = "Conversation type '{conversation_type}' is not supported. Only 'support' type is currently supported" diff --git a/services/web/server/src/simcore_service_webserver/conversations/plugin.py b/services/web/server/src/simcore_service_webserver/conversations/plugin.py index 2d151e25cbb2..c6cd5ee5151d 100644 --- a/services/web/server/src/simcore_service_webserver/conversations/plugin.py +++ b/services/web/server/src/simcore_service_webserver/conversations/plugin.py @@ -3,13 +3,15 @@ import logging from aiohttp import web -from servicelib.aiohttp.application_keys import APP_SETTINGS_KEY -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup + +from ..application_setup import ModuleCategory, app_setup_func +from ..constants import APP_SETTINGS_KEY +from ._controller import _conversations_messages_rest, _conversations_rest _logger = logging.getLogger(__name__) -@app_module_setup( +@app_setup_func( __name__, ModuleCategory.ADDON, settings_name="WEBSERVER_CONVERSATIONS", @@ -18,3 +20,6 @@ ) def setup_conversations(app: web.Application): assert app[APP_SETTINGS_KEY].WEBSERVER_CONVERSATIONS # nosec + + app.router.add_routes(_conversations_rest.routes) + app.router.add_routes(_conversations_messages_rest.routes) diff --git a/services/web/server/src/simcore_service_webserver/db/_aiopg.py b/services/web/server/src/simcore_service_webserver/db/_aiopg.py index 9d9feea1f807..bcd38ae9aac0 100644 --- a/services/web/server/src/simcore_service_webserver/db/_aiopg.py +++ b/services/web/server/src/simcore_service_webserver/db/_aiopg.py @@ -12,7 +12,6 @@ from aiopg.sa import Engine, create_engine from common_library.json_serialization import json_dumps from servicelib.aiohttp.aiopg_utils import is_pg_responsive -from servicelib.aiohttp.application_keys import APP_AIOPG_ENGINE_KEY from servicelib.logging_utils import log_context from servicelib.retry_policies import PostgresRetryPolicyUponInitialization from simcore_postgres_database.aiopg_errors import DBAPIError @@ -24,20 +23,24 @@ ) from tenacity import retry +from .._meta import APP_NAME +from ..constants import APP_AIOPG_ENGINE_KEY from .settings import PostgresSettings, get_plugin_settings _logger = logging.getLogger(__name__) @retry(**PostgresRetryPolicyUponInitialization(_logger).kwargs) -async def _ensure_pg_ready(settings: PostgresSettings) -> Engine: - engine: Engine = await create_engine( +async def _ensure_pg_ready( + settings: PostgresSettings, *, application_name: str +) -> Engine: + engine = await create_engine( settings.dsn, - application_name=settings.POSTGRES_CLIENT_NAME, + application_name=settings.client_name(f"{application_name}", suffix="aiopg"), minsize=settings.POSTGRES_MINSIZE, maxsize=settings.POSTGRES_MAXSIZE, ) - + assert isinstance(engine, Engine) # nosec try: await raise_if_migration_not_ready(engine) except (DBMigrationError, DBAPIError): @@ -48,7 +51,6 @@ async def _ensure_pg_ready(settings: PostgresSettings) -> Engine: async def postgres_cleanup_ctx(app: web.Application) -> AsyncIterator[None]: - settings = get_plugin_settings(app) with log_context( @@ -57,7 +59,7 @@ async def postgres_cleanup_ctx(app: web.Application) -> AsyncIterator[None]: "Connecting app[APP_AIOPG_ENGINE_KEY] to postgres with %s", f"{settings=}", ): - aiopg_engine = await _ensure_pg_ready(settings) + aiopg_engine = await _ensure_pg_ready(settings, application_name=APP_NAME) app[APP_AIOPG_ENGINE_KEY] = aiopg_engine _logger.info( diff --git a/services/web/server/src/simcore_service_webserver/db/_asyncpg.py b/services/web/server/src/simcore_service_webserver/db/_asyncpg.py index 03bac23ea2c2..51434bc50c6d 100644 --- a/services/web/server/src/simcore_service_webserver/db/_asyncpg.py +++ b/services/web/server/src/simcore_service_webserver/db/_asyncpg.py @@ -15,6 +15,7 @@ ) from sqlalchemy.ext.asyncio import AsyncEngine +from .._meta import APP_NAME from .settings import PostgresSettings, get_plugin_settings _logger = logging.getLogger(__name__) @@ -22,7 +23,7 @@ async def postgres_cleanup_ctx(app: web.Application) -> AsyncIterator[None]: settings: PostgresSettings = get_plugin_settings(app) - await connect_to_db(app, settings) + await connect_to_db(app, settings, application_name=APP_NAME) assert get_async_engine(app) # nosec assert isinstance(get_async_engine(app), AsyncEngine) # nosec diff --git a/services/web/server/src/simcore_service_webserver/db/plugin.py b/services/web/server/src/simcore_service_webserver/db/plugin.py index a01efdbc40c6..3e67a23641fb 100644 --- a/services/web/server/src/simcore_service_webserver/db/plugin.py +++ b/services/web/server/src/simcore_service_webserver/db/plugin.py @@ -1,20 +1,23 @@ -""" database submodule associated to the postgres uservice - -""" +"""database submodule associated to the postgres uservice""" import logging +from typing import Final from aiohttp import web -from servicelib.aiohttp.application_keys import APP_AIOPG_ENGINE_KEY -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from ..application_setup import ModuleCategory, app_setup_func +from ..constants import APP_AIOPG_ENGINE_KEY from . import _aiopg, _asyncpg _logger = logging.getLogger(__name__) +APP_DB_ENGINE_KEY: Final = web.AppKey( + "APP_DB_ENGINE_KEY", object +) # Can be aiopg.Engine or asyncpg engine + # API -get_database_engine = _aiopg.get_database_engine +get_database_engine_legacy = _aiopg.get_database_engine get_engine_state = _aiopg.get_engine_state is_service_responsive = _aiopg.is_service_responsive is_service_enabled = _aiopg.is_service_enabled @@ -24,7 +27,7 @@ get_asyncpg_engine = _asyncpg.get_async_engine -@app_module_setup( +@app_setup_func( "simcore_service_webserver.db", ModuleCategory.ADDON, settings_name="WEBSERVER_DB", @@ -34,7 +37,7 @@ def setup_db(app: web.Application): # ensures keys exist app[APP_AIOPG_ENGINE_KEY] = None - assert get_database_engine(app) is None # nosec + assert get_database_engine_legacy(app) is None # nosec # init engines app.cleanup_ctx.append(_aiopg.postgres_cleanup_ctx) diff --git a/services/web/server/src/simcore_service_webserver/db_listener/_db_comp_tasks_listening_task.py b/services/web/server/src/simcore_service_webserver/db_listener/_db_comp_tasks_listening_task.py index 2c72213b094a..79573c7689a3 100644 --- a/services/web/server/src/simcore_service_webserver/db_listener/_db_comp_tasks_listening_task.py +++ b/services/web/server/src/simcore_service_webserver/db_listener/_db_comp_tasks_listening_task.py @@ -22,7 +22,7 @@ from simcore_postgres_database.webserver_models import DB_CHANNEL_NAME, projects from sqlalchemy.sql import select -from ..db.plugin import get_database_engine +from ..db.plugin import get_database_engine_legacy from ..projects import _projects_service, exceptions from ..projects.nodes_utils import update_node_outputs from ._models import CompTaskNotificationPayload @@ -52,12 +52,18 @@ async def _update_project_state( node_errors: list[ErrorDict] | None, ) -> None: project = await _projects_service.update_project_node_state( - app, user_id, project_uuid, node_uuid, new_state + app, + user_id, + project_uuid, + node_uuid, + new_state, + client_session_id=None, # <-- The trigger for this update is not from the UI (its db listener) ) await _projects_service.notify_project_node_update( app, project, node_uuid, node_errors ) + await _projects_service.notify_project_state_update(app, project) @@ -94,6 +100,7 @@ async def _handle_db_notification( changed_row.run_hash, node_errors=changed_row.errors, ui_changed_keys=None, + client_session_id=None, # <-- The trigger for this update is not from the UI (its db listener) ) if "state" in payload.changes and (changed_row.state is not None): @@ -126,7 +133,7 @@ async def _handle_db_notification( async def _listen(app: web.Application) -> NoReturn: listen_query = f"LISTEN {DB_CHANNEL_NAME};" - db_engine = get_database_engine(app) + db_engine = get_database_engine_legacy(app) async with db_engine.acquire() as conn: assert conn.connection # nosec await conn.execute(listen_query) diff --git a/services/web/server/src/simcore_service_webserver/db_listener/plugin.py b/services/web/server/src/simcore_service_webserver/db_listener/plugin.py index 423e307f3de2..9e220aa2539e 100644 --- a/services/web/server/src/simcore_service_webserver/db_listener/plugin.py +++ b/services/web/server/src/simcore_service_webserver/db_listener/plugin.py @@ -6,8 +6,8 @@ import logging from aiohttp import web -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from ..application_setup import ModuleCategory, app_setup_func from ..db.plugin import setup_db from ..projects._projects_repository_legacy import setup_projects_db from ..socketio.plugin import setup_socketio @@ -16,7 +16,7 @@ _logger = logging.getLogger(__name__) -@app_module_setup( +@app_setup_func( __name__, ModuleCategory.ADDON, settings_name="WEBSERVER_DB_LISTENER", diff --git a/services/web/server/src/simcore_service_webserver/diagnostics/_handlers.py b/services/web/server/src/simcore_service_webserver/diagnostics/_handlers.py index a25b1442d653..f715e09b248a 100644 --- a/services/web/server/src/simcore_service_webserver/diagnostics/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/diagnostics/_handlers.py @@ -7,7 +7,7 @@ from aiohttp import ClientError, ClientSession, web from models_library.app_diagnostics import AppStatusCheck -from pydantic import BaseModel, Field +from pydantic import BaseModel from servicelib.aiohttp.client_session import get_client_session from servicelib.aiohttp.requests_validation import parse_request_query_parameters_as from servicelib.utils import logged_gather @@ -29,7 +29,7 @@ class StatusDiagnosticsQueryParam(BaseModel): - top_tracemalloc: int | None = Field(default=None) + top_tracemalloc: int | None = None class StatusDiagnosticsGet(BaseModel): diff --git a/services/web/server/src/simcore_service_webserver/diagnostics/_monitoring.py b/services/web/server/src/simcore_service_webserver/diagnostics/_monitoring.py index dd7d169462df..4781fc9e45d2 100644 --- a/services/web/server/src/simcore_service_webserver/diagnostics/_monitoring.py +++ b/services/web/server/src/simcore_service_webserver/diagnostics/_monitoring.py @@ -8,7 +8,8 @@ from servicelib.aiohttp.monitoring import get_collector_registry from servicelib.aiohttp.monitoring import setup_monitoring as service_lib_setup -from .. import _meta +from ..application_settings import get_application_settings +from ..application_setup import ensure_single_setup from ._healthcheck import HEALTH_LATENCY_PROBE, DelayWindowProbe, is_sensing_enabled _logger = logging.getLogger(__name__) @@ -46,16 +47,20 @@ async def exit_middleware_cb(request: web.Request, _response: web.StreamResponse request.app[HEALTH_LATENCY_PROBE].observe(resp_time_secs) +@ensure_single_setup(f"{__name__}.setup_monitoring", logger=_logger) def setup_monitoring(app: web.Application): + settings = get_application_settings(app) + prometheus_friendly_app_name = settings.APP_NAME.replace("-", "_") + service_lib_setup( app, - _meta.APP_NAME, + app_name=prometheus_friendly_app_name, enter_middleware_cb=enter_middleware_cb, exit_middleware_cb=exit_middleware_cb, ) monitor_services.add_instrumentation( - app, get_collector_registry(app), _meta.APP_NAME + app, reg=get_collector_registry(app), app_name=prometheus_friendly_app_name ) # on-the fly stats diff --git a/services/web/server/src/simcore_service_webserver/diagnostics/plugin.py b/services/web/server/src/simcore_service_webserver/diagnostics/plugin.py index 8c843699bd51..90462e0c7918 100644 --- a/services/web/server/src/simcore_service_webserver/diagnostics/plugin.py +++ b/services/web/server/src/simcore_service_webserver/diagnostics/plugin.py @@ -1,13 +1,14 @@ import logging import time from operator import attrgetter +from typing import Final from aiohttp import web from servicelib.aiohttp import monitor_slow_callbacks -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup from servicelib.aiohttp.profiler_middleware import profiling_middleware from ..application_settings import get_application_settings +from ..application_setup import ModuleCategory, app_setup_func from ..rest.healthcheck import HealthCheck from ..rest.plugin import setup_rest from . import _handlers @@ -22,20 +23,20 @@ _logger = logging.getLogger(__name__) +APP_DIAGNOSTICS_CLIENT_KEY: Final = web.AppKey("APP_DIAGNOSTICS_CLIENT_KEY", object) + async def _on_healthcheck_async_adapter(app: web.Application) -> None: assert_healthy_app(app) -@app_module_setup( +@app_setup_func( __name__, ModuleCategory.ADDON, settings_name="WEBSERVER_DIAGNOSTICS", logger=_logger, ) -def setup_diagnostics( - app: web.Application, -) -> None: +def setup_diagnostics(app: web.Application): setup_rest(app) settings: DiagnosticsSettings = get_plugin_settings(app) @@ -60,8 +61,12 @@ def setup_diagnostics( app[HEALTH_PLUGIN_START_TIME] = time.time() -def setup_profiling_middleware( - app: web.Application, -) -> None: - if get_application_settings(app).WEBSERVER_PROFILING: - app.middlewares.append(profiling_middleware) +@app_setup_func( + __name__, + ModuleCategory.ADDON, + settings_name="WEBSERVER_PROFILING", + logger=_logger, +) +def setup_profiling_middleware(app: web.Application): + assert get_application_settings(app).WEBSERVER_PROFILING # nosec + app.middlewares.append(profiling_middleware) diff --git a/services/web/server/src/simcore_service_webserver/diagnostics/settings.py b/services/web/server/src/simcore_service_webserver/diagnostics/settings.py index b9557f6d2313..801b69c482cb 100644 --- a/services/web/server/src/simcore_service_webserver/diagnostics/settings.py +++ b/services/web/server/src/simcore_service_webserver/diagnostics/settings.py @@ -7,9 +7,10 @@ ValidationInfo, field_validator, ) -from servicelib.aiohttp.application_keys import APP_SETTINGS_KEY from settings_library.base import BaseCustomSettings +from ..constants import APP_SETTINGS_KEY + class DiagnosticsSettings(BaseCustomSettings): DIAGNOSTICS_SLOW_DURATION_SECS: PositiveFloat = Field( diff --git a/services/web/server/src/simcore_service_webserver/director_v2/_client.py b/services/web/server/src/simcore_service_webserver/director_v2/_client.py index ac3a61b726ee..f3612ff9d422 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2/_client.py +++ b/services/web/server/src/simcore_service_webserver/director_v2/_client.py @@ -5,6 +5,7 @@ """ import logging +from typing import Final import aiohttp from aiohttp import ClientTimeout, web @@ -99,13 +100,15 @@ async def stop_computation(self, project_id: ProjectID, user_id: UserID): ) -_APP_KEY = f"{__name__}.{DirectorV2RestClient.__name__}" +APP_DIRECTOR_V2_CLIENT_KEY: Final = web.AppKey( + "APP_DIRECTOR_V2_CLIENT_KEY", DirectorV2RestClient +) def set_directorv2_client(app: web.Application, obj: DirectorV2RestClient): - app[_APP_KEY] = obj + app[APP_DIRECTOR_V2_CLIENT_KEY] = obj def get_directorv2_client(app: web.Application) -> DirectorV2RestClient: - app_key: DirectorV2RestClient = app[_APP_KEY] + app_key: DirectorV2RestClient = app[APP_DIRECTOR_V2_CLIENT_KEY] return app_key diff --git a/services/web/server/src/simcore_service_webserver/director_v2/_client_base.py b/services/web/server/src/simcore_service_webserver/director_v2/_client_base.py index d86e937ef00b..fddf396ea1d9 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2/_client_base.py +++ b/services/web/server/src/simcore_service_webserver/director_v2/_client_base.py @@ -4,6 +4,7 @@ import aiohttp from aiohttp import ClientSession, ClientTimeout, web from servicelib.aiohttp import status +from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON from tenacity import retry from tenacity.before_sleep import before_sleep_log from tenacity.stop import stop_after_attempt @@ -36,13 +37,14 @@ def _get_exception_from( - status_code: int, on_error: _StatusToExceptionMapping | None, reason: str, url: URL + status_code: int, on_error: _StatusToExceptionMapping | None, details: str, url: URL ): if on_error and status_code in on_error: - exc, exc_ctx = on_error[status_code] - return exc(**exc_ctx, status=status_code, reason=reason) + exc_cls, exc_ctx = on_error[status_code] + return exc_cls(**exc_ctx, status=status_code, details=details) + # default - return DirectorV2ServiceError(status=status_code, reason=reason, url=url) + return DirectorV2ServiceError(status=status_code, details=details, url=url) @retry(**DEFAULT_RETRY_POLICY) @@ -61,13 +63,13 @@ async def _make_request( ) as response: payload: dict[str, Any] | list[dict[str, Any]] | None | str = ( await response.json() - if response.content_type == "application/json" + if response.content_type == MIMETYPE_APPLICATION_JSON else await response.text() ) if response.status != expected_status.status_code: raise _get_exception_from( - response.status, on_error, reason=f"{payload}", url=url + response.status, on_error, details=f"{payload}", url=url ) return payload @@ -99,13 +101,13 @@ async def request_director_v2( except TimeoutError as err: raise DirectorV2ServiceError( status=status.HTTP_503_SERVICE_UNAVAILABLE, - reason=f"request to director-v2 timed-out: {err}", + details=f"request to director-v2 timed-out: {err}", url=url, ) from err except aiohttp.ClientError as err: raise DirectorV2ServiceError( status=status.HTTP_503_SERVICE_UNAVAILABLE, - reason=f"request to director-v2 service unexpected error {err}", + details=f"request to director-v2 service unexpected error {err}", url=url, ) from err diff --git a/services/web/server/src/simcore_service_webserver/director_v2/_comp_runs_collections_models.py b/services/web/server/src/simcore_service_webserver/director_v2/_comp_runs_collections_models.py new file mode 100644 index 000000000000..7812025920cd --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/director_v2/_comp_runs_collections_models.py @@ -0,0 +1,17 @@ +import logging +from datetime import datetime + +from models_library.computations import CollectionRunID +from pydantic import BaseModel, ConfigDict + +_logger = logging.getLogger(__name__) + + +class CompRunCollectionDBGet(BaseModel): + collection_run_id: CollectionRunID + client_or_system_generated_id: str + client_or_system_generated_display_name: str + is_generated_by_system: bool + created: datetime + + model_config = ConfigDict(from_attributes=True) diff --git a/services/web/server/src/simcore_service_webserver/director_v2/_comp_runs_collections_repository.py b/services/web/server/src/simcore_service_webserver/director_v2/_comp_runs_collections_repository.py new file mode 100644 index 000000000000..346af7301617 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/director_v2/_comp_runs_collections_repository.py @@ -0,0 +1,93 @@ +import logging +from uuid import UUID + +from models_library.computations import CollectionRunID +from pydantic import TypeAdapter +from simcore_postgres_database.models.comp_runs_collections import comp_runs_collections +from sqlalchemy import func +from sqlalchemy.dialects.postgresql import insert as pg_insert + +from ._comp_runs_collections_models import CompRunCollectionDBGet + +_logger = logging.getLogger(__name__) + + +# Comp run collections CRUD operations + + +async def create_comp_run_collection( + conn, + client_or_system_generated_id: str, + client_or_system_generated_display_name: str, + is_generated_by_system: bool, +) -> CollectionRunID: + """Create a new computational run collection.""" + result = await conn.execute( + comp_runs_collections.insert() + .values( + client_or_system_generated_id=client_or_system_generated_id, + client_or_system_generated_display_name=client_or_system_generated_display_name, + is_generated_by_system=is_generated_by_system, + created=func.now(), + modified=func.now(), + ) + .returning(comp_runs_collections.c.collection_run_id) + ) + collection_id_tuple: tuple[UUID] = result.one() + return TypeAdapter(CollectionRunID).validate_python(collection_id_tuple[0]) + + +async def get_comp_run_collection_or_none_by_id( + conn, collection_run_id: CollectionRunID +) -> CompRunCollectionDBGet | None: + result = await conn.execute( + comp_runs_collections.select().where( + comp_runs_collections.c.collection_run_id == f"{collection_run_id}" + ) + ) + row = result.one_or_none() + if row is None: + return None + return CompRunCollectionDBGet.model_validate(row) + + +async def get_comp_run_collection_or_none_by_client_generated_id( + conn, client_or_system_generated_id: str +) -> CompRunCollectionDBGet | None: + result = await conn.execute( + comp_runs_collections.select().where( + comp_runs_collections.c.client_or_system_generated_id + == client_or_system_generated_id + ) + ) + row = result.one_or_none() + if row is None: + return None + return CompRunCollectionDBGet.model_validate(row) + + +async def upsert_comp_run_collection( + conn, + client_or_system_generated_id: str, + client_or_system_generated_display_name: str, + is_generated_by_system: bool, +) -> CollectionRunID: + """Upsert a computational run collection. If it exists, only update the modified time.""" + insert_stmt = pg_insert(comp_runs_collections).values( + client_or_system_generated_id=client_or_system_generated_id, + client_or_system_generated_display_name=client_or_system_generated_display_name, + is_generated_by_system=is_generated_by_system, + created=func.now(), + modified=func.now(), + ) + on_update_stmt = insert_stmt.on_conflict_do_update( + index_elements=[comp_runs_collections.c.client_or_system_generated_id], + set_={ + "modified": func.now(), + }, + ) + result = await conn.stream( + on_update_stmt.returning(comp_runs_collections.c.collection_run_id) + ) + collection_id_tuple: tuple[UUID] = await result.one() + return TypeAdapter(CollectionRunID).validate_python(collection_id_tuple[0]) diff --git a/services/web/server/src/simcore_service_webserver/director_v2/_comp_runs_collections_service.py b/services/web/server/src/simcore_service_webserver/director_v2/_comp_runs_collections_service.py new file mode 100644 index 000000000000..57aa0ce6656f --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/director_v2/_comp_runs_collections_service.py @@ -0,0 +1,63 @@ +import logging + +from aiohttp import web +from models_library.computations import CollectionRunID +from simcore_postgres_database.utils_repos import transaction_context + +from ..db.plugin import get_asyncpg_engine +from . import _comp_runs_collections_repository +from ._comp_runs_collections_models import CompRunCollectionDBGet + +_logger = logging.getLogger(__name__) + + +async def create_comp_run_collection( + app: web.Application, + *, + client_or_system_generated_id: str, + client_or_system_generated_display_name: str, + is_generated_by_system: bool, +) -> CollectionRunID: + async with transaction_context(get_asyncpg_engine(app)) as conn: + return await _comp_runs_collections_repository.create_comp_run_collection( + conn=conn, + client_or_system_generated_id=client_or_system_generated_id, + client_or_system_generated_display_name=client_or_system_generated_display_name, + is_generated_by_system=is_generated_by_system, + ) + + +async def upsert_comp_run_collection( + app: web.Application, + *, + client_or_system_generated_id: str, + client_or_system_generated_display_name: str, + is_generated_by_system: bool, +) -> CollectionRunID: + async with transaction_context(get_asyncpg_engine(app)) as conn: + return await _comp_runs_collections_repository.upsert_comp_run_collection( + conn=conn, + client_or_system_generated_id=client_or_system_generated_id, + client_or_system_generated_display_name=client_or_system_generated_display_name, + is_generated_by_system=is_generated_by_system, + ) + + +async def get_comp_run_collection_or_none_by_id( + app: web.Application, *, collection_run_id: CollectionRunID +) -> CompRunCollectionDBGet | None: + async with transaction_context(get_asyncpg_engine(app)) as conn: + return await _comp_runs_collections_repository.get_comp_run_collection_or_none_by_id( + conn=conn, collection_run_id=collection_run_id + ) + + +async def get_comp_run_collection_or_none_by_client_generated_id( + app: web.Application, + *, + client_or_system_generated_id: str, +) -> CompRunCollectionDBGet | None: + async with transaction_context(get_asyncpg_engine(app)) as conn: + return await _comp_runs_collections_repository.get_comp_run_collection_or_none_by_client_generated_id( + conn=conn, client_or_system_generated_id=client_or_system_generated_id + ) diff --git a/services/web/server/src/simcore_service_webserver/director_v2/_computations_service.py b/services/web/server/src/simcore_service_webserver/director_v2/_computations_service.py index 22667ded1c87..8611373d95d1 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2/_computations_service.py +++ b/services/web/server/src/simcore_service_webserver/director_v2/_computations_service.py @@ -1,8 +1,14 @@ from decimal import Decimal from aiohttp import web -from models_library.api_schemas_directorv2.comp_runs import ComputationRunRpcGet +from models_library.api_schemas_directorv2.comp_runs import ( + ComputationCollectionRunRpcGet, + ComputationRunRpcGet, +) from models_library.computations import ( + CollectionRunID, + ComputationCollectionRunTaskWithAttributes, + ComputationCollectionRunWithAttributes, ComputationRunWithAttributes, ComputationTaskWithAttributes, ) @@ -35,6 +41,7 @@ get_project_uuids_by_root_parent_project_id, ) from ..rabbitmq import get_rabbitmq_rpc_client +from ._comp_runs_collections_service import get_comp_run_collection_or_none_by_id async def _get_projects_metadata( @@ -289,3 +296,177 @@ async def list_computations_latest_iteration_tasks( ) ] return _tasks_get.total, _tasks_get_output + + +async def _get_root_project_names_v2( + app: web.Application, items: list[ComputationCollectionRunRpcGet] +) -> list[str]: + root_uuids: list[ProjectID] = [] + for item in items: + if root_id := item.info.get("project_metadata", {}).get( + "root_parent_project_id" + ): + root_uuids.append(ProjectID(root_id)) + else: + assert len(item.project_ids) > 0 # nosec + root_uuids.append(ProjectID(item.project_ids[0])) + + return await batch_get_project_name(app, projects_uuids=root_uuids) + + +async def list_computation_collection_runs( + app: web.Application, + *, + product_name: ProductName, + user_id: UserID, + # filters + filter_by_root_project_id: ProjectID | None = None, + filter_only_running: bool = False, + # pagination + offset: int, + limit: NonNegativeInt, +) -> tuple[int, list[ComputationCollectionRunWithAttributes]]: + child_projects_with_root = None + if filter_by_root_project_id: + await check_user_project_permission( + app, + project_id=filter_by_root_project_id, + user_id=user_id, + product_name=product_name, + ) + # NOTE: Can be improved with checking if the provided project is a root project + child_projects = await get_project_uuids_by_root_parent_project_id( + app, root_parent_project_uuid=filter_by_root_project_id + ) + child_projects_with_root = [*child_projects, filter_by_root_project_id] + + rpc_client = get_rabbitmq_rpc_client(app) + _runs_get = await computations.list_computation_collection_runs_page( + rpc_client, + product_name=product_name, + user_id=user_id, + project_ids=child_projects_with_root, + filter_only_running=filter_only_running, + offset=offset, + limit=limit, + ) + + # NOTE: MD: can be improved with a single batch call + _comp_runs_collections = await limited_gather( + *[ + get_comp_run_collection_or_none_by_id( + app, collection_run_id=_run.collection_run_id + ) + for _run in _runs_get.items + ], + limit=20, + ) + # Get Root project names + _projects_root_names = await _get_root_project_names_v2(app, _runs_get.items) + + _computational_runs_output = [ + ComputationCollectionRunWithAttributes( + collection_run_id=item.collection_run_id, + project_ids=item.project_ids, + state=item.state, + info=item.info, + submitted_at=item.submitted_at, + started_at=item.started_at, + ended_at=item.ended_at, + name=( + run_collection.client_or_system_generated_display_name + if run_collection and run_collection.is_generated_by_system is False + else project_root_name + ), + ) + for item, run_collection, project_root_name in zip( + _runs_get.items, _comp_runs_collections, _projects_root_names, strict=True + ) + ] + + return _runs_get.total, _computational_runs_output + + +async def list_computation_collection_run_tasks( + app: web.Application, + *, + product_name: ProductName, + user_id: UserID, + collection_run_id: CollectionRunID, + # pagination + offset: int, + limit: NonNegativeInt, +) -> tuple[int, list[ComputationCollectionRunTaskWithAttributes]]: + rpc_client = get_rabbitmq_rpc_client(app) + _tasks_get = await computations.list_computation_collection_run_tasks_page( + rpc_client, + product_name=product_name, + user_id=user_id, + collection_run_id=collection_run_id, + offset=offset, + limit=limit, + ) + + # Get unique set of all project_uuids from comp_tasks + unique_project_uuids = {task.project_uuid for task in _tasks_get.items} + # NOTE: MD: can be improved with a single batch call + project_dicts = await limited_gather( + *[ + get_project_dict_legacy(app, project_uuid=project_uuid) + for project_uuid in unique_project_uuids + ], + limit=20, + ) + # Build a dict: project_uuid -> workbench + project_uuid_to_workbench = {prj["uuid"]: prj["workbench"] for prj in project_dicts} + + # Fetch projects metadata concurrently + _projects_metadata = await _get_projects_metadata( + app, project_uuids=[item.project_uuid for item in _tasks_get.items] + ) + + _service_run_ids = [item.service_run_id for item in _tasks_get.items] + _is_product_billable = await is_product_billable(app, product_name=product_name) + _service_run_osparc_credits: list[Decimal | None] + if _is_product_billable: + # NOTE: MD: can be improved with a single batch call + _service_run_osparc_credits = await limited_gather( + *[ + _get_credits_or_zero_by_service_run_id( + rpc_client, service_run_id=_run_id + ) + for _run_id in _service_run_ids + ], + limit=20, + ) + else: + _service_run_osparc_credits = [None for _ in _service_run_ids] + + # Final output + _tasks_get_output = [ + ComputationCollectionRunTaskWithAttributes( + project_uuid=item.project_uuid, + node_id=item.node_id, + state=item.state, + progress=item.progress, + image=item.image, + started_at=item.started_at, + ended_at=item.ended_at, + log_download_link=item.log_download_link, + name=( + custom_metadata.get("job_name") + or project_uuid_to_workbench[f"{item.project_uuid}"][ + f"{item.node_id}" + ].get("label") + or "Unknown" + ), + osparc_credits=credits_or_none, + ) + for item, credits_or_none, custom_metadata in zip( + _tasks_get.items, + _service_run_osparc_credits, + _projects_metadata, + strict=True, + ) + ] + return _tasks_get.total, _tasks_get_output diff --git a/services/web/server/src/simcore_service_webserver/director_v2/_controller/_rest_exceptions.py b/services/web/server/src/simcore_service_webserver/director_v2/_controller/_rest_exceptions.py index 81f9197fc3c1..89888cb31757 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2/_controller/_rest_exceptions.py +++ b/services/web/server/src/simcore_service_webserver/director_v2/_controller/_rest_exceptions.py @@ -2,10 +2,11 @@ from aiohttp import web from common_library.error_codes import create_error_code +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs +from common_library.user_messages import user_message from models_library.rest_error import ErrorGet from servicelib import status_codes_utils from servicelib.aiohttp import status -from servicelib.logging_errors import create_troubleshotting_log_kwargs from ...constants import MSG_TRY_AGAIN_OR_SUPPORT from ...exception_handling import ( @@ -43,16 +44,17 @@ async def _handler_director_service_error_as_503_or_4xx( if status_codes_utils.is_5xx_server_error(exception.status): # NOTE: All directorv2 5XX are mapped to 503 status_code = status.HTTP_503_SERVICE_UNAVAILABLE - user_msg = ( + user_msg = user_message( # Most likely the director service is down or misconfigured so the user is asked to try again later. - "This service is temporarily unavailable. The incident was logged and will be investigated. " - + MSG_TRY_AGAIN_OR_SUPPORT + "This service is temporarily unavailable. The incident has been logged and will be investigated. " + + MSG_TRY_AGAIN_OR_SUPPORT, + _version=1, ) # Log for further investigation oec = create_error_code(exception) _logger.exception( - **create_troubleshotting_log_kwargs( + **create_troubleshooting_log_kwargs( user_msg, error=exception, error_code=oec, @@ -72,7 +74,7 @@ async def _handler_director_service_error_as_503_or_4xx( exception.status ), f"DirectorV2ServiceError must be a client error, got {exception=}" # nosec - error = ErrorGet(status=exception.status, message="{exception}") + error = ErrorGet(status=exception.status, message=f"{exception}") return create_error_response(error, status_code=error.status) @@ -85,11 +87,17 @@ async def _handler_director_service_error_as_503_or_4xx( _TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = { UserDefaultWalletNotFoundError: HttpErrorInfo( status.HTTP_404_NOT_FOUND, - "Default wallet not found but necessary for computations", + user_message( + "A default wallet is required for running computations but could not be found.", + _version=1, + ), ), WalletNotEnoughCreditsError: HttpErrorInfo( status.HTTP_402_PAYMENT_REQUIRED, - "Wallet does not have enough credits for computations. {reason}", + user_message( + "Your wallet does not have sufficient credits to run this computation: {details}", + _version=1, + ), ), } diff --git a/services/web/server/src/simcore_service_webserver/director_v2/_controller/computations_rest.py b/services/web/server/src/simcore_service_webserver/director_v2/_controller/computations_rest.py index 96766fe97af7..1b204b21e4b1 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2/_controller/computations_rest.py +++ b/services/web/server/src/simcore_service_webserver/director_v2/_controller/computations_rest.py @@ -2,6 +2,11 @@ from aiohttp import web from models_library.api_schemas_webserver.computations import ( + ComputationCollectionRunListQueryParams, + ComputationCollectionRunPathParams, + ComputationCollectionRunRestGet, + ComputationCollectionRunTaskListQueryParams, + ComputationCollectionRunTaskRestGet, ComputationRunIterationsLatestListQueryParams, ComputationRunIterationsListQueryParams, ComputationRunPathParams, @@ -16,13 +21,14 @@ from models_library.rest_pagination_utils import paginate_data from models_library.users import UserID from pydantic import Field +from servicelib.aiohttp.request_keys import RQT_USERID_KEY from servicelib.aiohttp.requests_validation import ( parse_request_path_parameters_as, parse_request_query_parameters_as, ) from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON -from servicelib.request_keys import RQT_USERID_KEY from servicelib.rest_constants import RESPONSE_MODEL_POLICY +from servicelib.tracing import with_profiled_span from ..._meta import API_VTAG as VTAG from ...constants import RQ_PRODUCT_KEY @@ -187,3 +193,104 @@ async def list_computations_latest_iteration_tasks( text=page.model_dump_json(**RESPONSE_MODEL_POLICY), content_type=MIMETYPE_APPLICATION_JSON, ) + + +#### NEW: + + +@routes.get( + f"/{VTAG}/computation-collection-runs", + name="list_computation_collection_runs", +) +@login_required +@with_profiled_span +@permission_required("services.pipeline.*") +@permission_required("project.read") +async def list_computation_collection_runs(request: web.Request) -> web.Response: + req_ctx = ComputationsRequestContext.model_validate(request) + query_params: ComputationCollectionRunListQueryParams = ( + parse_request_query_parameters_as( + ComputationCollectionRunListQueryParams, request + ) + ) + + total, items = await _computations_service.list_computation_collection_runs( + request.app, + product_name=req_ctx.product_name, + user_id=req_ctx.user_id, + # filters + filter_by_root_project_id=query_params.filter_by_root_project_id, + filter_only_running=query_params.filter_only_running, + # pagination + offset=query_params.offset, + limit=query_params.limit, + ) + + page = Page[ComputationCollectionRunRestGet].model_validate( + paginate_data( + chunk=[ + ComputationCollectionRunRestGet.model_validate( + run, from_attributes=True + ) + for run in items + ], + total=total, + limit=query_params.limit, + offset=query_params.offset, + request_url=request.url, + ) + ) + + return web.Response( + text=page.model_dump_json(**RESPONSE_MODEL_POLICY), + content_type=MIMETYPE_APPLICATION_JSON, + ) + + +@routes.get( + f"/{VTAG}/computation-collection-runs/{{collection_run_id}}/tasks", + name="list_computation_collection_run_tasks", +) +@login_required +@permission_required("services.pipeline.*") +@permission_required("project.read") +async def list_computation_collection_run_tasks(request: web.Request) -> web.Response: + req_ctx = ComputationsRequestContext.model_validate(request) + path_params = parse_request_path_parameters_as( + ComputationCollectionRunPathParams, request + ) + query_params: ComputationCollectionRunTaskListQueryParams = ( + parse_request_query_parameters_as( + ComputationCollectionRunTaskListQueryParams, request + ) + ) + + total, items = await _computations_service.list_computation_collection_run_tasks( + request.app, + product_name=req_ctx.product_name, + user_id=req_ctx.user_id, + collection_run_id=path_params.collection_run_id, + # pagination + offset=query_params.offset, + limit=query_params.limit, + ) + + page = Page[ComputationCollectionRunTaskRestGet].model_validate( + paginate_data( + chunk=[ + ComputationCollectionRunTaskRestGet.model_validate( + run, from_attributes=True + ) + for run in items + ], + total=total, + limit=query_params.limit, + offset=query_params.offset, + request_url=request.url, + ) + ) + + return web.Response( + text=page.model_dump_json(**RESPONSE_MODEL_POLICY), + content_type=MIMETYPE_APPLICATION_JSON, + ) diff --git a/services/web/server/src/simcore_service_webserver/director_v2/_controller/rest.py b/services/web/server/src/simcore_service_webserver/director_v2/_controller/rest.py index 12ab71bd96fd..197ff4a50da9 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2/_controller/rest.py +++ b/services/web/server/src/simcore_service_webserver/director_v2/_controller/rest.py @@ -1,5 +1,7 @@ import asyncio import logging +import uuid +from datetime import UTC, datetime, timedelta from typing import Any from aiohttp import web @@ -11,6 +13,7 @@ ) from models_library.projects import CommitID, ProjectID from servicelib.aiohttp import status +from servicelib.aiohttp.request_keys import RQT_USERID_KEY from servicelib.aiohttp.requests_validation import ( parse_request_body_as, parse_request_path_parameters_as, @@ -19,16 +22,19 @@ UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, X_SIMCORE_USER_AGENT, ) -from servicelib.request_keys import RQT_USERID_KEY from ..._meta import API_VTAG as VTAG from ...login.decorators import login_required -from ...models import RequestContext +from ...models import AuthenticatedRequestContext from ...products import products_web +from ...projects.projects_metadata_service import ( + get_project_custom_metadata_or_empty_dict, +) from ...security.decorators import permission_required from ...utils_aiohttp import envelope_json_response, get_api_base_url -from .. import _director_v2_service +from .. import _comp_runs_collections_service, _director_v2_service from .._client import DirectorV2RestClient +from .._comp_runs_collections_models import CompRunCollectionDBGet from .._director_v2_abc_service import get_project_run_policy from ._rest_exceptions import handle_rest_requests_exceptions @@ -47,7 +53,7 @@ async def start_computation(request: web.Request) -> web.Response: simcore_user_agent = request.headers.get( X_SIMCORE_USER_AGENT, UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE ) - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ComputationPathParams, request) subgraph: set[str] = set() @@ -72,6 +78,46 @@ async def start_computation(request: web.Request) -> web.Response: product_name=req_ctx.product_name, ) + # Get Project custom metadata information + # inject the collection_id to the options + custom_metadata = await get_project_custom_metadata_or_empty_dict( + request.app, project_uuid=path_params.project_id + ) + group_id_or_none = custom_metadata.get("group_id") + + comp_run_collection: CompRunCollectionDBGet | None = None + if group_id_or_none: + comp_run_collection = await _comp_runs_collections_service.get_comp_run_collection_or_none_by_client_generated_id( + request.app, client_or_system_generated_id=str(group_id_or_none) + ) + if comp_run_collection is not None: + created_at: datetime = comp_run_collection.created + now = datetime.now(UTC) + if now - created_at > timedelta(minutes=5): + raise web.HTTPBadRequest( + text=( + "This client generated collection is not new, " + "it was created more than 5 minutes ago. " + "Therefore, the client is probably wrongly generating it." + ) + ) + is_generated_by_system = False + if group_id_or_none in {None, "", "00000000-0000-0000-0000-000000000000"}: + is_generated_by_system = True + client_or_system_generated_id = ( + f"system-generated/{path_params.project_id}/{uuid.uuid4()}" + ) + else: + client_or_system_generated_id = f"{group_id_or_none}" + group_name = custom_metadata.get("group_name", "No Group Name") + + collection_run_id = await _comp_runs_collections_service.upsert_comp_run_collection( + request.app, + client_or_system_generated_id=client_or_system_generated_id, + client_or_system_generated_display_name=str(group_name), + is_generated_by_system=is_generated_by_system, + ) + options = { "start_pipeline": True, "subgraph": list(subgraph), # sets are not natively json serializable @@ -79,6 +125,7 @@ async def start_computation(request: web.Request) -> web.Response: "simcore_user_agent": simcore_user_agent, "use_on_demand_clusters": group_properties.use_on_demand_clusters, "wallet_info": wallet_info, + "collection_run_id": collection_run_id, } run_policy = get_project_run_policy(request.app) @@ -140,7 +187,7 @@ async def start_computation(request: web.Request) -> web.Response: @permission_required("project.read") @handle_rest_requests_exceptions async def stop_computation(request: web.Request) -> web.Response: - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) computations = DirectorV2RestClient(request.app) run_policy = get_project_run_policy(request.app) assert run_policy # nosec diff --git a/services/web/server/src/simcore_service_webserver/director_v2/_director_v2_service.py b/services/web/server/src/simcore_service_webserver/director_v2/_director_v2_service.py index a69f204c8416..0cd58f93a8f0 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2/_director_v2_service.py +++ b/services/web/server/src/simcore_service_webserver/director_v2/_director_v2_service.py @@ -2,6 +2,7 @@ from uuid import UUID from aiohttp import web +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from models_library.api_schemas_directorv2.computations import ( TasksOutputs, TasksSelection, @@ -15,21 +16,22 @@ from pydantic import TypeAdapter from pydantic.types import PositiveInt from servicelib.aiohttp import status +from servicelib.exception_utils import suppress_exceptions from servicelib.logging_utils import log_decorator from simcore_postgres_database.utils_groups_extra_properties import ( GroupExtraProperties, GroupExtraPropertiesRepo, ) -from simcore_service_webserver.director_v2._client import DirectorV2RestClient from ..application_settings import get_application_settings -from ..db.plugin import get_database_engine +from ..db.plugin import get_database_engine_legacy from ..products import products_service from ..products.models import Product from ..projects import projects_wallets_service -from ..users import preferences_api as user_preferences_service +from ..user_preferences import user_preferences_service from ..users.exceptions import UserDefaultWalletNotFoundError from ..wallets import api as wallets_service +from ._client import DirectorV2RestClient from ._client_base import DataType, request_director_v2 from .exceptions import ComputationNotFoundError, DirectorV2ServiceError from .settings import DirectorV2Settings, get_plugin_settings @@ -65,6 +67,7 @@ async def create_or_update_pipeline( user_id=user_id, project_id=project_id, product_name=product_name, + check_user_wallet_permission=False, ), } @@ -76,10 +79,12 @@ async def create_or_update_pipeline( return computation_task_out except DirectorV2ServiceError as exc: - _logger.error( # noqa: TRY400 - "could not create pipeline from project %s: %s", - project_id, - exc, + _logger.exception( + **create_troubleshooting_log_kwargs( + f"Could not create pipeline from project {project_id}", + error=exc, + error_context={**body, "backend_url": backend_url}, + ) ) return None @@ -108,7 +113,6 @@ async def is_pipeline_running( async def get_computation_task( app: web.Application, user_id: UserID, project_id: ProjectID ) -> ComputationTask | None: - try: dv2_computation = await DirectorV2RestClient(app).get_computation( project_id=project_id, user_id=user_id @@ -127,7 +131,17 @@ async def get_computation_task( return None +def _skip_if_pipeline_not_found(exception: BaseException) -> bool: + assert isinstance(exception, DirectorV2ServiceError) # nosec + return exception.status == status.HTTP_404_NOT_FOUND + + @log_decorator(logger=_logger) +@suppress_exceptions( + (DirectorV2ServiceError,), + reason="silence in case the pipeline does not exist", + predicate=_skip_if_pipeline_not_found, +) async def stop_pipeline( app: web.Application, *, user_id: PositiveInt, project_id: ProjectID ): @@ -200,6 +214,7 @@ async def get_wallet_info( user_id: UserID, project_id: ProjectID, product_name: ProductName, + check_user_wallet_permission: bool = True, ) -> WalletInfo | None: app_settings = get_application_settings(app) if not ( @@ -231,13 +246,24 @@ async def get_wallet_info( else: project_wallet_id = project_wallet.wallet_id - # Check whether user has access to the wallet - wallet = await wallets_service.get_wallet_with_available_credits_by_user_and_wallet( - app, - user_id=user_id, - wallet_id=project_wallet_id, - product_name=product_name, - ) + if check_user_wallet_permission: + # Check whether user has access to the wallet + wallet = ( + await wallets_service.get_wallet_with_available_credits_by_user_and_wallet( + app, + user_id=user_id, + wallet_id=project_wallet_id, + product_name=product_name, + ) + ) + else: + # This function is used also when we are synchronizing the projects/projects_nodes with the comp_pipelines/comp_tasks tables in director-v2 + # In situations where a project is connected to a wallet, but the user does not have access to it and is performing an action such as + # upgrading the service version, we still want to retrieve the wallet info and pass it to director-v2. + wallet = await wallets_service.get_wallet_with_available_credits( + app, wallet_id=project_wallet_id, product_name=product_name + ) + return WalletInfo( wallet_id=project_wallet_id, wallet_name=wallet.name, @@ -251,7 +277,7 @@ async def get_group_properties( product_name: ProductName, user_id: UserID, ) -> GroupExtraProperties: - async with get_database_engine(app).acquire() as conn: + async with get_database_engine_legacy(app).acquire() as conn: return await GroupExtraPropertiesRepo.get_aggregated_properties_for_user( conn, user_id=user_id, product_name=product_name ) diff --git a/services/web/server/src/simcore_service_webserver/director_v2/exceptions.py b/services/web/server/src/simcore_service_webserver/director_v2/exceptions.py index f127c08c795a..069b2b92da74 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2/exceptions.py +++ b/services/web/server/src/simcore_service_webserver/director_v2/exceptions.py @@ -8,12 +8,12 @@ class DirectorV2ServiceError(WebServerBaseError, RuntimeError): """Basic exception for errors raised by director-v2""" - msg_template = "Unexpected error: director-v2 returned '{status}', reason '{reason}' after calling '{url}'" + msg_template = "Unexpected error: director-v2 returned '{status}', details '{details}' after calling '{url}'" - def __init__(self, *, status: int, reason: str, **ctx: Any) -> None: + def __init__(self, *, status: int, details: str, **ctx: Any) -> None: super().__init__(**ctx) self.status = status - self.reason = reason + self.details = details class ComputationNotFoundError(DirectorV2ServiceError): diff --git a/services/web/server/src/simcore_service_webserver/director_v2/plugin.py b/services/web/server/src/simcore_service_webserver/director_v2/plugin.py index d847156841b4..a971bfbb91ee 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2/plugin.py +++ b/services/web/server/src/simcore_service_webserver/director_v2/plugin.py @@ -1,13 +1,13 @@ import logging from aiohttp import web -from servicelib.aiohttp.application_keys import APP_SETTINGS_KEY -from servicelib.aiohttp.application_setup import ( + +from ..application_setup import ( ModuleCategory, - app_module_setup, + app_setup_func, is_setup_completed, ) - +from ..constants import APP_SETTINGS_KEY from ..rest.plugin import setup_rest from . import _controller from ._client import DirectorV2RestClient, get_directorv2_client, set_directorv2_client @@ -17,7 +17,7 @@ _logger = logging.getLogger(__name__) -@app_module_setup( +@app_setup_func( __name__, ModuleCategory.ADDON, settings_name="WEBSERVER_DIRECTOR_V2", diff --git a/services/web/server/src/simcore_service_webserver/director_v2/settings.py b/services/web/server/src/simcore_service_webserver/director_v2/settings.py index 79429dbd696b..5166468d0d75 100644 --- a/services/web/server/src/simcore_service_webserver/director_v2/settings.py +++ b/services/web/server/src/simcore_service_webserver/director_v2/settings.py @@ -1,19 +1,16 @@ -""" director v2 susystem configuration -""" +"""director v2 susystem configuration""" from functools import cached_property -from typing import cast from aiohttp import ClientSession, ClientTimeout, web from models_library.basic_types import VersionTag from pydantic import AliasChoices, Field, PositiveInt -from servicelib.aiohttp.application_keys import APP_CLIENT_SESSION_KEY from settings_library.base import BaseCustomSettings from settings_library.basic_types import PortInt from settings_library.utils_service import DEFAULT_FASTAPI_PORT, MixinServiceSettings from yarl import URL -from ..constants import APP_SETTINGS_KEY +from ..constants import APP_CLIENT_SESSION_KEY, APP_SETTINGS_KEY _MINUTE = 60 _HOUR = 60 * _MINUTE @@ -62,4 +59,4 @@ def get_plugin_settings(app: web.Application) -> DirectorV2Settings: def get_client_session(app: web.Application) -> ClientSession: - return cast(ClientSession, app[APP_CLIENT_SESSION_KEY]) + return app[APP_CLIENT_SESSION_KEY] diff --git a/services/web/server/src/simcore_service_webserver/dynamic_scheduler/api.py b/services/web/server/src/simcore_service_webserver/dynamic_scheduler/api.py index 83a90e286c2a..c1bfb3ebe445 100644 --- a/services/web/server/src/simcore_service_webserver/dynamic_scheduler/api.py +++ b/services/web/server/src/simcore_service_webserver/dynamic_scheduler/api.py @@ -24,7 +24,6 @@ from models_library.services import ServicePortKey from models_library.users import UserID from pydantic import NonNegativeInt -from pydantic.types import PositiveInt from servicelib.progress_bar import ProgressBarData from servicelib.rabbitmq import RabbitMQClient, RPCServerError from servicelib.rabbitmq.rpc_interfaces.dynamic_scheduler import services @@ -94,13 +93,13 @@ async def stop_dynamic_service( async def _post_progress_message( rabbitmq_client: RabbitMQClient, - user_id: PositiveInt, - project_id: str, + user_id: UserID, + project_id: ProjectID, report: ProgressReport, ) -> None: progress_message = ProgressRabbitMessageProject( user_id=user_id, - project_id=ProjectID(project_id), + project_id=project_id, progress_type=ProgressType.PROJECT_CLOSING, report=report, ) @@ -111,14 +110,14 @@ async def _post_progress_message( async def stop_dynamic_services_in_project( app: web.Application, *, - user_id: PositiveInt, - project_id: str, + user_id: UserID, + project_id: ProjectID, simcore_user_agent: str, save_state: bool, ) -> None: """Stops all dynamic services in the project""" running_dynamic_services = await list_dynamic_services( - app, user_id=user_id, project_id=ProjectID(project_id) + app, user_id=user_id, project_id=project_id ) async with AsyncExitStack() as stack: diff --git a/services/web/server/src/simcore_service_webserver/dynamic_scheduler/plugin.py b/services/web/server/src/simcore_service_webserver/dynamic_scheduler/plugin.py index 905026d97be2..50db735fc0b4 100644 --- a/services/web/server/src/simcore_service_webserver/dynamic_scheduler/plugin.py +++ b/services/web/server/src/simcore_service_webserver/dynamic_scheduler/plugin.py @@ -1,19 +1,19 @@ """ - Plugin to interact with the 'dynamic-scheduler' service +Plugin to interact with the 'dynamic-scheduler' service """ import logging from aiohttp import web -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from ..application_setup import ModuleCategory, app_setup_func from ..constants import APP_SETTINGS_KEY from ..rabbitmq import setup_rabbitmq _logger = logging.getLogger(__name__) -@app_module_setup( +@app_setup_func( __name__, ModuleCategory.ADDON, settings_name="WEBSERVER_DYNAMIC_SCHEDULER", diff --git a/services/web/server/src/simcore_service_webserver/email/_core.py b/services/web/server/src/simcore_service_webserver/email/_core.py index c9a137337449..8736a640ab96 100644 --- a/services/web/server/src/simcore_service_webserver/email/_core.py +++ b/services/web/server/src/simcore_service_webserver/email/_core.py @@ -15,6 +15,7 @@ from aiohttp_jinja2 import render_string from settings_library.email import EmailProtocol, SMTPSettings +from ..products import products_web from .settings import get_plugin_settings _logger = logging.getLogger(__name__) @@ -225,6 +226,10 @@ def _render_template( return subject, html_body +async def get_template_path(request: web.Request, filename: str) -> Path: + return await products_web.get_product_template_path(request, filename) + + async def send_email_from_template( request: web.Request, *, diff --git a/services/web/server/src/simcore_service_webserver/email/_handlers.py b/services/web/server/src/simcore_service_webserver/email/_handlers.py index 6b195dc54e84..8ba75dee3b7b 100644 --- a/services/web/server/src/simcore_service_webserver/email/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/email/_handlers.py @@ -2,6 +2,7 @@ from typing import Any, Literal from aiohttp import web +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from models_library.emails import LowerCaseEmailStr from pydantic import BaseModel, Field from servicelib.aiohttp.requests_validation import parse_request_body_as @@ -11,17 +12,11 @@ from ..products import products_web from ..products.models import Product from ..security.decorators import permission_required -from ..utils import get_traceback_string from ..utils_aiohttp import envelope_json_response from ._core import check_email_server_responsiveness, send_email_from_template from .settings import get_plugin_settings -logger = logging.getLogger(__name__) - - -# -# API schema models -# +_logger = logging.getLogger(__name__) class TestEmail(BaseModel): @@ -39,18 +34,8 @@ class TestEmail(BaseModel): class EmailTestFailed(BaseModel): test_name: str - error_type: str - error_message: str - traceback: str - - @classmethod - def create_from_exception(cls, error: Exception, test_name: str): - return cls( - test_name=test_name, - error_type=f"{type(error)}", - error_message=f"{error}", - traceback=get_traceback_string(error), - ) + error_code: str | None = None + user_message: str = "Email test failed" class EmailTestPassed(BaseModel): @@ -58,9 +43,6 @@ class EmailTestPassed(BaseModel): info: dict[str, Any] -# -# API routes -# routes = web.RouteTableDef() @@ -109,10 +91,23 @@ async def test_email(request: web.Request): ) except Exception as err: # pylint: disable=broad-except - logger.exception( - "test_email failed for %s", - f"{settings.model_dump_json(indent=1)}", + _logger.exception( + **create_troubleshooting_log_kwargs( + user_error_msg="Email test failed", + error=err, + error_context={ + "template_name": body.template_name, + "to": body.to, + "from_": body.from_ or product.support_email, + "settings": settings.model_dump(), + }, + tip="Check SMTP settings and network connectivity", + ) ) return envelope_json_response( - EmailTestFailed.create_from_exception(error=err, test_name="test_email") + EmailTestFailed( + test_name="test_email", + error_code=getattr(err, "error_code", None), + user_message="Email test failed. Please check the logs for more details.", + ) ) diff --git a/services/web/server/src/simcore_service_webserver/email/email_service.py b/services/web/server/src/simcore_service_webserver/email/email_service.py new file mode 100644 index 000000000000..0648210d9f14 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/email/email_service.py @@ -0,0 +1,19 @@ +import logging + +from ._core import AttachmentTuple, get_template_path, send_email_from_template + +log = logging.getLogger(__name__) + + +# prevents auto-removal by pycln +# mypy: disable-error-code=truthy-function +assert AttachmentTuple # nosec +assert send_email_from_template # nosec +assert get_template_path # nosec + + +__all__: tuple[str, ...] = ( + "AttachmentTuple", + "send_email_from_template", + "get_template_path", +) diff --git a/services/web/server/src/simcore_service_webserver/email/plugin.py b/services/web/server/src/simcore_service_webserver/email/plugin.py index cb72ea8135fd..e8f03cf749fb 100644 --- a/services/web/server/src/simcore_service_webserver/email/plugin.py +++ b/services/web/server/src/simcore_service_webserver/email/plugin.py @@ -1,8 +1,8 @@ -""" Plugin to send emails and render email templates +"""Plugin to send emails and render email templates - SMTP: Simple Mail Transfer Protocol - MIME: Multipurpose Internet Mail Extensions +SMTP: Simple Mail Transfer Protocol +MIME: Multipurpose Internet Mail Extensions """ @@ -11,15 +11,15 @@ import aiohttp_jinja2 import jinja_app_loader # type: ignore[import-untyped] from aiohttp import web -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup from .._resources import webserver_resources +from ..application_setup import ModuleCategory, app_setup_func from . import _handlers log = logging.getLogger(__name__) -@app_module_setup( +@app_setup_func( __name__, ModuleCategory.ADDON, settings_name="WEBSERVER_EMAIL", logger=log ) def setup_email(app: web.Application): diff --git a/services/web/server/src/simcore_service_webserver/email/utils.py b/services/web/server/src/simcore_service_webserver/email/utils.py deleted file mode 100644 index 9be692219271..000000000000 --- a/services/web/server/src/simcore_service_webserver/email/utils.py +++ /dev/null @@ -1,3 +0,0 @@ -from ._core import AttachmentTuple, send_email_from_template - -__all__ = ("AttachmentTuple", "send_email_from_template") diff --git a/services/web/server/src/simcore_service_webserver/exception_handling/_factory.py b/services/web/server/src/simcore_service_webserver/exception_handling/_factory.py index b8e1bb4276e2..49118ad8b8de 100644 --- a/services/web/server/src/simcore_service_webserver/exception_handling/_factory.py +++ b/services/web/server/src/simcore_service_webserver/exception_handling/_factory.py @@ -4,11 +4,15 @@ from aiohttp import web from common_library.error_codes import create_error_code from common_library.json_serialization import json_dumps +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from models_library.rest_error import ErrorGet from servicelib.aiohttp.rest_responses import safe_status_message from servicelib.aiohttp.web_exceptions_extension import get_all_aiohttp_http_exceptions -from servicelib.logging_errors import create_troubleshotting_log_kwargs -from servicelib.status_codes_utils import is_5xx_server_error, is_error +from servicelib.status_codes_utils import ( + get_code_display_name, + is_5xx_server_error, + is_error, +) from ._base import AiohttpExceptionHandler, ExceptionHandlersMap @@ -50,7 +54,7 @@ def create_error_response(error: ErrorGet, status_code: int) -> web.Response: return web.json_response( data={"error": error.model_dump(exclude_unset=True, mode="json")}, dumps=json_dumps, - reason=safe_status_message(error.message), + reason=safe_status_message(get_code_display_name(status_code)), status=status_code, ) @@ -84,7 +88,6 @@ async def _exception_handler( request: web.Request, exception: BaseException, ) -> web.Response: - # safe formatting, i.e. does not raise user_msg = msg_template.format_map( _DefaultDict(getattr(exception, "__dict__", {})) @@ -102,7 +105,7 @@ async def _exception_handler( } _logger.exception( - **create_troubleshotting_log_kwargs( + **create_troubleshooting_log_kwargs( user_msg, error=exception, error_code=oec, @@ -141,7 +144,7 @@ def create_http_error_exception_handlers_map() -> ExceptionHandlersMap: """ exc_handlers_map: ExceptionHandlersMap = { exc_type: create_exception_handler_from_http_info( - status_code=code, msg_template="{reason}" + status_code=code, msg_template="{text}" ) for code, exc_type in _STATUS_CODE_TO_HTTP_ERRORS.items() } diff --git a/services/web/server/src/simcore_service_webserver/exporter/_formatter/xlsx/code_description.py b/services/web/server/src/simcore_service_webserver/exporter/_formatter/xlsx/code_description.py index 15ee7bac3b08..e53142df73d3 100644 --- a/services/web/server/src/simcore_service_webserver/exporter/_formatter/xlsx/code_description.py +++ b/services/web/server/src/simcore_service_webserver/exporter/_formatter/xlsx/code_description.py @@ -1,6 +1,7 @@ from abc import abstractmethod -from typing import Any, ClassVar, Final, cast +from typing import Annotated, Any, ClassVar, Final, cast +from common_library.basic_types import DEFAULT_FACTORY from models_library.services import ServiceKey, ServiceVersion from pydantic import BaseModel, Field, StrictStr @@ -10,20 +11,23 @@ class RRIDEntry(BaseModel): - rrid_term: StrictStr = Field(..., description="Associated tools or resources used") - rrid_identifier: StrictStr = Field( - ..., description="Associated tools or resources identifier (with 'RRID:')" - ) + rrid_term: Annotated[ + StrictStr, Field(description="Associated tools or resources used") + ] + rrid_identifier: Annotated[ + StrictStr, + Field(description="Associated tools or resources identifier (with 'RRID:')"), + ] # the 2 items below are not enabled for now - ontological_term: StrictStr = Field( - "", description="Associated ontological term (human-readable)" - ) - ontological_identifier: StrictStr = Field( - "", - description=( - "Associated ontological identifier from SciCrunch https://scicrunch.org/sawg" + ontological_term: Annotated[ + StrictStr, Field(description="Associated ontological term (human-readable)") + ] = "" + ontological_identifier: Annotated[ + StrictStr, + Field( + description="Associated ontological identifier from SciCrunch https://scicrunch.org/sawg" ), - ) + ] = "" class TSREntry(BaseModel): @@ -33,99 +37,117 @@ class TSREntry(BaseModel): class CodeDescriptionModel(BaseModel): - rrid_entires: list[RRIDEntry] = Field( - default_factory=list, description="composed from the classifiers" - ) + rrid_entires: Annotated[ + list[RRIDEntry], + Field(default_factory=list, description="composed from the classifiers"), + ] = DEFAULT_FACTORY # TSR - tsr_entries: dict[str, TSREntry] = Field( - default_factory=dict, description="list of rules to generate tsr" - ) + tsr_entries: Annotated[ + dict[str, TSREntry], + Field(default_factory=dict, description="list of rules to generate tsr"), + ] = DEFAULT_FACTORY class InputsEntryModel(BaseModel): - service_alias: StrictStr = Field( - ..., description="Name of the service containing this input, given by the user" - ) - service_name: StrictStr = Field( - ..., description="Name of the service containing this input" - ) - service_key: ServiceKey = Field( - ..., description="Key of the service containing this input" - ) - service_version: ServiceVersion = Field( - ..., description="Version of the service containing this input" - ) - input_name: StrictStr = Field( - "", description="An input field to the MSoP submission" - ) - input_parameter_description: StrictStr = Field( - "", description="Description of what the parameter represents" - ) - input_data_type: StrictStr = Field( - "", description="Data type for the input field (in plain text)" - ) - input_data_units: StrictStr = Field( - "", description="Units of data for the input field, if applicable" - ) - input_data_default_value: StrictStr = Field( - "", - description="Default value for the input field, if applicable (doi or value)", - ) - input_data_constraints: StrictStr = Field( - "", - description="Range [min, max] of acceptable parameter values, or other constraints as formulas / sets", - ) + service_alias: Annotated[ + StrictStr, + Field( + description="Name of the service containing this input, given by the user" + ), + ] + service_name: Annotated[ + StrictStr, Field(description="Name of the service containing this input") + ] + service_key: Annotated[ + ServiceKey, Field(description="Key of the service containing this input") + ] + service_version: Annotated[ + ServiceVersion, + Field(description="Version of the service containing this input"), + ] + input_name: Annotated[ + StrictStr, Field(description="An input field to the MSoP submission") + ] = "" + input_parameter_description: Annotated[ + StrictStr, Field(description="Description of what the parameter represents") + ] = "" + input_data_type: Annotated[ + StrictStr, Field(description="Data type for the input field (in plain text)") + ] = "" + input_data_units: Annotated[ + StrictStr, Field(description="Units of data for the input field, if applicable") + ] = "" + input_data_default_value: Annotated[ + StrictStr, + Field( + description="Default value for the input field, if applicable (doi or value)" + ), + ] = "" + input_data_constraints: Annotated[ + StrictStr, + Field( + description="Range [min, max] of acceptable parameter values, or other constraints as formulas / sets" + ), + ] = "" class OutputsEntryModel(BaseModel): - service_alias: StrictStr = Field( - ..., description="Name of the service producing this output, given by the user" - ) - service_name: StrictStr = Field( - ..., description="Name of the service containing this output" - ) - service_key: ServiceKey = Field( - ..., description="Key of the service containing this output" - ) - service_version: ServiceVersion = Field( - ..., description="Version of the service containing this output" - ) - output_name: StrictStr = Field( - "", description="An output field to the MSoP submission" - ) - output_parameter_description: StrictStr = Field( - "", description="Description of what the parameter represents" - ) - output_data_ontology_identifier: StrictStr = Field( - "", - description=( - "Ontology identifier for the input field, if applicable , " - "https://scicrunch.org/scicrunch/interlex/search?q=NLXOEN&l=NLXOEN&types=term" + service_alias: Annotated[ + StrictStr, + Field( + description="Name of the service producing this output, given by the user" ), - ) - output_data_type: StrictStr = Field( - "", description="Data type for the output field" - ) - output_data_units: StrictStr = Field( - "", description="Units of data for the output field, if applicable" - ) - output_data_constraints: StrictStr = Field( - "", - description="Range [min, max] of acceptable parameter values, or other constraints as formulas / sets", - ) + ] + service_name: Annotated[ + StrictStr, Field(description="Name of the service containing this output") + ] + service_key: Annotated[ + ServiceKey, Field(description="Key of the service containing this output") + ] + service_version: Annotated[ + ServiceVersion, + Field(description="Version of the service containing this output"), + ] + output_name: Annotated[ + StrictStr, Field(description="An output field to the MSoP submission") + ] = "" + output_parameter_description: Annotated[ + StrictStr, Field(description="Description of what the parameter represents") + ] = "" + output_data_ontology_identifier: Annotated[ + StrictStr, + Field( + description="Ontology identifier for the input field, if applicable , https://scicrunch.org/scicrunch/interlex/search?q=NLXOEN&l=NLXOEN&types=term" + ), + ] = "" + output_data_type: Annotated[ + StrictStr, Field(description="Data type for the output field") + ] = "" + output_data_units: Annotated[ + StrictStr, + Field(description="Units of data for the output field, if applicable"), + ] = "" + output_data_constraints: Annotated[ + StrictStr, + Field( + description="Range [min, max] of acceptable parameter values, or other constraints as formulas / sets" + ), + ] = "" class CodeDescriptionParams(BaseModel): - code_description: CodeDescriptionModel = Field( - ..., description="code description data" - ) - inputs: list[InputsEntryModel] = Field( - default_factory=list, description="List of inputs, if any" - ) - outputs: list[OutputsEntryModel] = Field( - default_factory=list, description="List of outputs, if any" - ) + code_description: Annotated[ + CodeDescriptionModel, Field(description="code description data") + ] + inputs: Annotated[ + list[InputsEntryModel], + Field(default_factory=list, description="List of inputs, if any"), + ] = DEFAULT_FACTORY + outputs: Annotated[ + list[OutputsEntryModel], + Field(default_factory=list, description="List of outputs, if any"), + ] = DEFAULT_FACTORY def _include_ports_from_this_service(service_key: ServiceKey) -> bool: diff --git a/services/web/server/src/simcore_service_webserver/exporter/_handlers.py b/services/web/server/src/simcore_service_webserver/exporter/_handlers.py index db7466c8e73e..f57541b06800 100644 --- a/services/web/server/src/simcore_service_webserver/exporter/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/exporter/_handlers.py @@ -9,8 +9,8 @@ from models_library.projects import ProjectID from models_library.projects_access import Owner from models_library.projects_state import ProjectStatus +from servicelib.aiohttp.request_keys import RQT_USERID_KEY from servicelib.redis import with_project_locked -from servicelib.request_keys import RQT_USERID_KEY from .._meta import API_VTAG from ..constants import RQ_PRODUCT_KEY @@ -18,7 +18,6 @@ from ..projects._projects_service import create_user_notification_cb from ..redis import get_redis_lock_manager_client_sdk from ..security.decorators import permission_required -from ..users.api import get_user_fullname from ._formatter.archive import get_sds_archive_path from .exceptions import SDSException from .utils import CleanupFileResponse @@ -51,9 +50,7 @@ async def export_project(request: web.Request): get_redis_lock_manager_client_sdk(request.app), project_uuid=project_uuid, status=ProjectStatus.EXPORTING, - owner=Owner( - user_id=user_id, **await get_user_fullname(request.app, user_id=user_id) - ), + owner=Owner(user_id=user_id), notification_cb=create_user_notification_cb( user_id, ProjectID(f"{project_uuid}"), request.app ), diff --git a/services/web/server/src/simcore_service_webserver/exporter/plugin.py b/services/web/server/src/simcore_service_webserver/exporter/plugin.py index 1adc3d1d65d1..30f9ec6407f3 100644 --- a/services/web/server/src/simcore_service_webserver/exporter/plugin.py +++ b/services/web/server/src/simcore_service_webserver/exporter/plugin.py @@ -1,14 +1,14 @@ import logging from aiohttp import web -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from ..application_setup import ModuleCategory, app_setup_func from . import _handlers _logger = logging.getLogger(__name__) -@app_module_setup( +@app_setup_func( "simcore_service_webserver.exporter", ModuleCategory.ADDON, settings_name="WEBSERVER_EXPORTER", diff --git a/services/web/server/src/simcore_service_webserver/exporter/settings.py b/services/web/server/src/simcore_service_webserver/exporter/settings.py index b0c2b63b5fd5..9404c304977d 100644 --- a/services/web/server/src/simcore_service_webserver/exporter/settings.py +++ b/services/web/server/src/simcore_service_webserver/exporter/settings.py @@ -1,8 +1,9 @@ from aiohttp.web import Application from pydantic import Field -from servicelib.aiohttp.application_keys import APP_SETTINGS_KEY from settings_library.base import BaseCustomSettings +from ..constants import APP_SETTINGS_KEY + class ExporterSettings(BaseCustomSettings): EXPORTER_ENABLED: bool = Field( diff --git a/services/web/server/src/simcore_service_webserver/fogbugz/__init__.py b/services/web/server/src/simcore_service_webserver/fogbugz/__init__.py new file mode 100644 index 000000000000..9ed9609c63c2 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/fogbugz/__init__.py @@ -0,0 +1,8 @@ +# mypy: disable-error-code=truthy-function +from ._client import FogbugzCaseCreate, FogbugzRestClient, get_fogbugz_rest_client + +__all__ = [ + "get_fogbugz_rest_client", + "FogbugzCaseCreate", + "FogbugzRestClient", +] diff --git a/services/web/server/src/simcore_service_webserver/fogbugz/_client.py b/services/web/server/src/simcore_service_webserver/fogbugz/_client.py new file mode 100644 index 000000000000..c92ca3f5d240 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/fogbugz/_client.py @@ -0,0 +1,199 @@ +"""Interface to communicate with Fogbugz API + +- Simple client to create cases in Fogbugz +""" + +import json +import logging +from typing import Any +from urllib.parse import urljoin + +import httpx +from aiohttp import web +from pydantic import AnyUrl, BaseModel, Field, SecretStr + +from ..products import products_service +from ..products.models import Product +from .settings import get_plugin_settings + +_logger = logging.getLogger(__name__) + +_JSON_CONTENT_TYPE = "application/json" +_UNKNOWN_ERROR_MESSAGE = "Unknown error occurred" + + +class FogbugzCaseCreate(BaseModel): + fogbugz_project_id: int = Field(description="Project ID in Fogbugz") + title: str = Field(description="Case title") + description: str = Field(description="Case description/first comment") + + +class FogbugzRestClient: + """REST client for Fogbugz API""" + + def __init__(self, api_token: SecretStr, base_url: AnyUrl) -> None: + self._client = httpx.AsyncClient() + self._api_token = api_token + self._base_url = base_url + + async def _make_api_request(self, json_payload: dict[str, Any]) -> dict[str, Any]: + """Make a request to Fogbugz API with common formatting""" + # Fogbugz requires multipart/form-data with stringified JSON + files = {"request": (None, json.dumps(json_payload), _JSON_CONTENT_TYPE)} + + url = urljoin(f"{self._base_url}", "f/api/0/jsonapi") + + response = await self._client.post(url, files=files) + response.raise_for_status() + response_data: dict[str, Any] = response.json() + return response_data + + async def create_case(self, data: FogbugzCaseCreate) -> str: + """Create a new case in Fogbugz""" + json_payload = { + "cmd": "new", + "token": self._api_token.get_secret_value(), + "ixProject": f"{data.fogbugz_project_id}", + "sTitle": data.title, + "sEvent": data.description, + } + + response_data = await self._make_api_request(json_payload) + + # Fogbugz API returns case ID in the response + case_id = response_data.get("data", {}).get("case", {}).get("ixBug", None) + if case_id is None: + msg = "Failed to create case in Fogbugz" + raise ValueError(msg) + + return str(case_id) + + async def resolve_case(self, case_id: str) -> None: + """Resolve a case in Fogbugz""" + json_payload = { + "cmd": "resolve", + "token": self._api_token.get_secret_value(), + "ixBug": case_id, + } + + response_data = await self._make_api_request(json_payload) + + # Check if the operation was successful + if response_data.get("error"): + error_msg = response_data.get("error", _UNKNOWN_ERROR_MESSAGE) + msg = f"Failed to resolve case in Fogbugz: {error_msg}" + raise ValueError(msg) + + async def get_case_status(self, case_id: str) -> str: + """Get the status of a case in Fogbugz""" + json_payload = { + "cmd": "search", + "token": self._api_token.get_secret_value(), + "q": case_id, + "cols": "sStatus", + } + + response_data = await self._make_api_request(json_payload) + + # Check if the operation was successful + if response_data.get("error"): + error_msg = response_data.get("error", _UNKNOWN_ERROR_MESSAGE) + msg = f"Failed to get case status from Fogbugz: {error_msg}" + raise ValueError(msg) + + # Extract the status from the search results + cases = response_data.get("data", {}).get("cases", []) + if not cases: + msg = f"Case {case_id} not found in Fogbugz" + raise ValueError(msg) + + # Find the case with matching ixBug + target_case = None + for case in cases: + if str(case.get("ixBug")) == str(case_id): + target_case = case + break + + if target_case is None: + msg = f"Case {case_id} not found in search results" + raise ValueError(msg) + + # Get the status from the found case + status: str = target_case.get("sStatus", "") + if not status: + msg = f"Status not found for case {case_id}" + raise ValueError(msg) + + return status + + async def reopen_case(self, case_id: str, assigned_fogbugz_person_id: str) -> None: + """Reopen a case in Fogbugz (uses reactivate for resolved cases, reopen for closed cases)""" + # First get the current status to determine which command to use + current_status = await self.get_case_status(case_id) + + # Determine the command based on current status + if current_status.lower().startswith("resolved"): + cmd = "reactivate" + elif current_status.lower().startswith("closed"): + cmd = "reopen" + else: + msg = f"Cannot reopen case {case_id} with status '{current_status}'. Only resolved or closed cases can be reopened." + raise ValueError(msg) + + json_payload = { + "cmd": cmd, + "token": self._api_token.get_secret_value(), + "ixBug": case_id, + "ixPersonAssignedTo": assigned_fogbugz_person_id, + } + + response_data = await self._make_api_request(json_payload) + + # Check if the operation was successful + if response_data.get("error"): + error_msg = response_data.get("error", _UNKNOWN_ERROR_MESSAGE) + msg = f"Failed to reopen case in Fogbugz: {error_msg}" + raise ValueError(msg) + + +_APP_KEY = f"{__name__}.{FogbugzRestClient.__name__}" + + +async def setup_fogbugz_rest_client(app: web.Application) -> None: + """Setup Fogbugz REST client""" + settings = get_plugin_settings(app) + + # Fail fast if unexpected configuration + products: list[Product] = products_service.list_products(app=app) + for product in products: + if product.support_standard_group_id is not None: + if product.support_assigned_fogbugz_person_id is None: + msg = ( + f"Product '{product.name}' has support_standard_group_id set " + "but `support_assigned_fogbugz_person_id` is not configured." + ) + raise ValueError(msg) + if product.support_assigned_fogbugz_project_id is None: + msg = ( + f"Product '{product.name}' has support_standard_group_id set " + "but `support_assigned_fogbugz_project_id` is not configured." + ) + raise ValueError(msg) + else: + _logger.info( + "Product '%s' has support conversation disabled (therefore Fogbugz integration is not necessary for this product)", + product.name, + ) + + client = FogbugzRestClient( + api_token=settings.FOGBUGZ_API_TOKEN, + base_url=settings.FOGBUGZ_URL, + ) + + app[_APP_KEY] = client + + +def get_fogbugz_rest_client(app: web.Application) -> FogbugzRestClient: + """Get Fogbugz REST client from app state""" + app_key: FogbugzRestClient = app[_APP_KEY] + return app_key diff --git a/services/web/server/src/simcore_service_webserver/fogbugz/plugin.py b/services/web/server/src/simcore_service_webserver/fogbugz/plugin.py new file mode 100644 index 000000000000..7b5c6dc7eb92 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/fogbugz/plugin.py @@ -0,0 +1,22 @@ +"""tags management subsystem""" + +import logging + +from aiohttp import web + +from ..application_setup import ModuleCategory, app_setup_func +from ..products.plugin import setup_products +from ._client import setup_fogbugz_rest_client + +_logger = logging.getLogger(__name__) + + +@app_setup_func( + __name__, + ModuleCategory.ADDON, + settings_name="WEBSERVER_FOGBUGZ", + logger=_logger, +) +def setup_fogbugz(app: web.Application): + setup_products(app) + app.on_startup.append(setup_fogbugz_rest_client) diff --git a/services/web/server/src/simcore_service_webserver/fogbugz/settings.py b/services/web/server/src/simcore_service_webserver/fogbugz/settings.py new file mode 100644 index 000000000000..78874a60a357 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/fogbugz/settings.py @@ -0,0 +1,17 @@ +from aiohttp import web +from pydantic import AnyUrl, SecretStr +from settings_library.base import BaseCustomSettings + +from ..constants import APP_SETTINGS_KEY + + +class FogbugzSettings(BaseCustomSettings): + FOGBUGZ_API_TOKEN: SecretStr + FOGBUGZ_URL: AnyUrl + + +def get_plugin_settings(app: web.Application) -> FogbugzSettings: + settings = app[APP_SETTINGS_KEY].WEBSERVER_FOGBUGZ + assert settings, "plugin.setup_fogbugz not called?" # nosec + assert isinstance(settings, FogbugzSettings) # nosec + return settings diff --git a/services/web/server/src/simcore_service_webserver/folders/_common/exceptions_handlers.py b/services/web/server/src/simcore_service_webserver/folders/_common/exceptions_handlers.py index d117b870c970..be8445c20135 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_common/exceptions_handlers.py +++ b/services/web/server/src/simcore_service_webserver/folders/_common/exceptions_handlers.py @@ -1,5 +1,6 @@ import logging +from common_library.user_messages import user_message from servicelib.aiohttp import status from ...exception_handling import ( @@ -31,44 +32,57 @@ _TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = { FolderNotFoundError: HttpErrorInfo( status.HTTP_404_NOT_FOUND, - "Folder was not found", + user_message("The requested folder could not be found.", _version=1), ), WorkspaceNotFoundError: HttpErrorInfo( status.HTTP_404_NOT_FOUND, - "Workspace was not found", + user_message("The requested workspace could not be found.", _version=1), ), FolderAccessForbiddenError: HttpErrorInfo( status.HTTP_403_FORBIDDEN, - "Does not have access to this folder", + user_message("You do not have permission to access this folder.", _version=1), ), WorkspaceAccessForbiddenError: HttpErrorInfo( status.HTTP_403_FORBIDDEN, - "Does not have access to this workspace", + user_message( + "You do not have permission to access this workspace.", _version=1 + ), ), WorkspaceFolderInconsistencyError: HttpErrorInfo( status.HTTP_403_FORBIDDEN, - "This folder does not exist in this workspace", + user_message( + "This folder is not available in the selected workspace.", _version=1 + ), ), FolderValueNotPermittedError: HttpErrorInfo( status.HTTP_409_CONFLICT, - "Provided folder value is not permitted: {reason}", + user_message("The folder operation cannot be completed: {details}", _version=1), ), FoldersValueError: HttpErrorInfo( status.HTTP_409_CONFLICT, - "Invalid folder value set: {reason}", + user_message("The folder configuration is invalid: {details}", _version=1), ), ProjectInvalidRightsError: HttpErrorInfo( status.HTTP_403_FORBIDDEN, - "Access Denied: You do not have permission to move the project with UUID: {project_uuid}. Tip: Copy and paste the UUID into the search bar to locate the project.", + user_message( + "You do not have permission to move the project with UUID: {project_uuid}. To locate this project, copy and paste the UUID into the search bar.", + _version=1, + ), ), # Trashing ProjectRunningConflictError: HttpErrorInfo( status.HTTP_409_CONFLICT, - "One or more studies in this folder are in use and cannot be trashed. Please stop all services first and try again", + user_message( + "Cannot move folder to trash because it contains projects that are currently running. Please stop all running services first and try again.", + _version=2, + ), ), ProjectStoppingError: HttpErrorInfo( status.HTTP_503_SERVICE_UNAVAILABLE, - "Something went wrong while stopping running services in studies within this folder before trashing. Aborting trash.", + user_message( + "Something went wrong while stopping running services in projects within this folder before trashing. Aborting trash.", + _version=2, + ), ), } diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_repository.py b/services/web/server/src/simcore_service_webserver/folders/_folders_repository.py index 140f0594b779..641aceb2c8f0 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_folders_repository.py +++ b/services/web/server/src/simcore_service_webserver/folders/_folders_repository.py @@ -338,7 +338,7 @@ async def get( row = result.first() if row is None: raise FolderAccessForbiddenError( - reason=f"Folder {folder_id} does not exist.", + details=f"Folder {folder_id} does not exist.", ) return FolderDB.model_validate(row) @@ -369,7 +369,7 @@ async def get_for_user_or_workspace( row = await result.first() if row is None: raise FolderAccessForbiddenError( - reason=f"User does not have access to the folder {folder_id}. Or folder does not exist.", + details=f"User does not have access to the folder {folder_id}. Or folder does not exist.", ) return FolderDB.model_validate(row) @@ -421,7 +421,7 @@ async def update( result = await conn.stream(query) row = await result.first() if row is None: - raise FolderNotFoundError(reason=f"Folder {folders_id_or_ids} not found.") + raise FolderNotFoundError(details=f"Folder {folders_id_or_ids} not found.") return FolderDB.model_validate(row) diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_rest.py b/services/web/server/src/simcore_service_webserver/folders/_folders_rest.py index 5219a500f1a0..b43c75ca571e 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_folders_rest.py +++ b/services/web/server/src/simcore_service_webserver/folders/_folders_rest.py @@ -8,7 +8,7 @@ ) from models_library.folders import FolderTuple from models_library.rest_ordering import OrderBy -from models_library.rest_pagination import ItemT, Page +from models_library.rest_pagination import Page from models_library.rest_pagination_utils import paginate_data from servicelib.aiohttp import status from servicelib.aiohttp.requests_validation import ( @@ -16,13 +16,11 @@ parse_request_path_parameters_as, parse_request_query_parameters_as, ) -from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON -from servicelib.rest_constants import RESPONSE_MODEL_POLICY from .._meta import API_VTAG as VTAG from ..login.decorators import login_required from ..security.decorators import permission_required -from ..utils_aiohttp import envelope_json_response +from ..utils_aiohttp import create_json_response_from_page, envelope_json_response from . import _folders_service from ._common.exceptions_handlers import handle_plugin_requests_exceptions from ._common.models import ( @@ -39,13 +37,6 @@ routes = web.RouteTableDef() -def _create_json_response_from_page(page: Page[ItemT]): - return web.Response( - text=page.model_dump_json(**RESPONSE_MODEL_POLICY), - content_type=MIMETYPE_APPLICATION_JSON, - ) - - @routes.post(f"/{VTAG}/folders", name="create_folder") @login_required @permission_required("folder.create") @@ -107,7 +98,7 @@ async def list_folders(request: web.Request): offset=query_params.offset, ) ) - return _create_json_response_from_page(page) + return create_json_response_from_page(page) @routes.get(f"/{VTAG}/folders:search", name="list_folders_full_search") @@ -143,7 +134,7 @@ async def list_folders_full_search(request: web.Request): offset=query_params.offset, ) ) - return _create_json_response_from_page(page) + return create_json_response_from_page(page) @routes.get(f"/{VTAG}/folders/{{folder_id}}", name="get_folder") diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_service.py b/services/web/server/src/simcore_service_webserver/folders/_folders_service.py index 8b0b63465b01..af727bde0474 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_folders_service.py +++ b/services/web/server/src/simcore_service_webserver/folders/_folders_service.py @@ -13,7 +13,7 @@ from pydantic import NonNegativeInt from ..projects._projects_service import delete_project_by_user -from ..users.api import get_user +from ..users.users_service import get_user from ..workspaces.api import check_user_workspace_access from ..workspaces.errors import ( WorkspaceAccessForbiddenError, @@ -70,7 +70,7 @@ async def create_folder( if workspace_id and parent_folder_db.workspace_id != workspace_id: # Check parent folder id exists inside the same workspace raise WorkspaceAccessForbiddenError( - reason=f"Folder {parent_folder_id} does not exists in workspace {workspace_id}." + details=f"Folder {parent_folder_id} does not exists in workspace {workspace_id}." ) folder_db = await _folders_repository.create( @@ -291,7 +291,7 @@ async def update_folder( ) if parent_folder_id in _child_folders: raise FolderValueNotPermittedError( - reason="Parent folder id should not be one of children" + details="Parent folder id should not be one of children" ) folder_db = await _folders_repository.update( diff --git a/services/web/server/src/simcore_service_webserver/folders/_trash_service.py b/services/web/server/src/simcore_service_webserver/folders/_trash_service.py index 266e1447a112..5cb56aa53d1c 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_trash_service.py +++ b/services/web/server/src/simcore_service_webserver/folders/_trash_service.py @@ -280,7 +280,7 @@ async def delete_trashed_folder( raise FolderNotTrashedError( folder_id=folder_id, user_id=user_id, - reason="Cannot delete trashed folder since it does not fit current criteria", + details="Cannot delete trashed folder since it does not fit current criteria", ) # NOTE: this function deletes folder AND its content recursively! diff --git a/services/web/server/src/simcore_service_webserver/folders/_workspaces_repository.py b/services/web/server/src/simcore_service_webserver/folders/_workspaces_repository.py index 9535ec3fd7c9..9d56d0c7236d 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_workspaces_repository.py +++ b/services/web/server/src/simcore_service_webserver/folders/_workspaces_repository.py @@ -8,11 +8,12 @@ from simcore_postgres_database.utils_repos import transaction_context from ..db.plugin import get_asyncpg_engine +from ..models import ClientSessionID from ..projects import _folders_repository as projects_folders_repository from ..projects import _groups_repository as projects_groups_repository -from ..projects import _projects_repository as _projects_repository from ..projects._access_rights_service import check_user_project_permission -from ..users.api import get_user +from ..projects.api import patch_project_and_notify_users +from ..users import users_service from ..workspaces.api import check_user_workspace_access from . import _folders_repository @@ -26,6 +27,7 @@ async def move_folder_into_workspace( folder_id: FolderID, workspace_id: WorkspaceID | None, product_name: ProductName, + client_session_id: ClientSessionID | None = None, ) -> None: # 1. User needs to have delete permission on source folder folder_db = await _folders_repository.get( @@ -75,14 +77,16 @@ async def move_folder_into_workspace( # ⬆️ Here we have already guaranties that user has all the right permissions to do this operation ⬆️ + user: dict = await users_service.get_user(app, user_id) async with transaction_context(get_asyncpg_engine(app)) as conn: # 4. Update workspace ID on the project resource for project_id in project_ids: - await _projects_repository.patch_project( + await patch_project_and_notify_users( app=app, - connection=conn, project_uuid=project_id, - new_partial_project_data={"workspace_id": workspace_id}, + patch_project_data={"workspace_id": workspace_id}, + user_primary_gid=user["primary_gid"], + client_session_id=client_session_id, ) # 5. BATCH update of folders with workspace_id @@ -122,7 +126,6 @@ async def move_folder_into_workspace( ) # 9. Remove all project permissions, leave only the user who moved the project - user = await get_user(app, user_id=user_id) for project_id in project_ids: await projects_groups_repository.delete_all_project_groups( app, connection=conn, project_id=project_id diff --git a/services/web/server/src/simcore_service_webserver/folders/_workspaces_rest.py b/services/web/server/src/simcore_service_webserver/folders/_workspaces_rest.py index b327e84e5747..a7ae1e1d67de 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_workspaces_rest.py +++ b/services/web/server/src/simcore_service_webserver/folders/_workspaces_rest.py @@ -2,10 +2,14 @@ from aiohttp import web from servicelib.aiohttp import status -from servicelib.aiohttp.requests_validation import parse_request_path_parameters_as +from servicelib.aiohttp.requests_validation import ( + parse_request_headers_as, + parse_request_path_parameters_as, +) from .._meta import api_version_prefix as VTAG from ..login.decorators import login_required +from ..models import ClientSessionHeaderParams from ..security.decorators import permission_required from . import _workspaces_repository from ._common.exceptions_handlers import handle_plugin_requests_exceptions @@ -27,6 +31,7 @@ async def move_folder_to_workspace(request: web.Request): req_ctx = FoldersRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(FolderWorkspacesPathParams, request) + header_params = parse_request_headers_as(ClientSessionHeaderParams, request) await _workspaces_repository.move_folder_into_workspace( app=request.app, @@ -34,5 +39,6 @@ async def move_folder_to_workspace(request: web.Request): folder_id=path_params.folder_id, workspace_id=path_params.workspace_id, product_name=req_ctx.product_name, + client_session_id=header_params.client_session_id, ) return web.json_response(status=status.HTTP_204_NO_CONTENT) diff --git a/services/web/server/src/simcore_service_webserver/folders/errors.py b/services/web/server/src/simcore_service_webserver/folders/errors.py index e8f2e346868b..39789d165684 100644 --- a/services/web/server/src/simcore_service_webserver/folders/errors.py +++ b/services/web/server/src/simcore_service_webserver/folders/errors.py @@ -1,33 +1,31 @@ from ..errors import WebServerBaseError -class FoldersValueError(WebServerBaseError, ValueError): - ... +class FoldersValueError(WebServerBaseError, ValueError): ... class FolderValueNotPermittedError(FoldersValueError): - msg_template = "Provided value is not permitted. {reason}" + msg_template = "Provided value is not permitted: {details}" class FolderNotFoundError(FoldersValueError): - msg_template = "Folder not found. {reason}" + msg_template = "Folder not found: {details}" class FolderAccessForbiddenError(FoldersValueError): - msg_template = "Folder access forbidden. {reason}" + msg_template = "Folder access forbidden: {details}" class FolderGroupNotFoundError(FoldersValueError): - msg_template = "Folder group not found. {reason}" + msg_template = "Folder group not found: {details}" -class FoldersRuntimeError(WebServerBaseError, RuntimeError): - ... +class FoldersRuntimeError(WebServerBaseError, RuntimeError): ... class FolderNotTrashedError(FoldersRuntimeError): msg_template = ( - "Cannot delete folder {folder_id} since it was not trashed first: {reason}" + "Cannot delete folder {folder_id} since it was not trashed first: {details}" ) diff --git a/services/web/server/src/simcore_service_webserver/folders/plugin.py b/services/web/server/src/simcore_service_webserver/folders/plugin.py index ec1e3f80ffe4..7422e98aabc9 100644 --- a/services/web/server/src/simcore_service_webserver/folders/plugin.py +++ b/services/web/server/src/simcore_service_webserver/folders/plugin.py @@ -1,18 +1,17 @@ -""" tags management subsystem +"""tags management subsystem""" -""" import logging from aiohttp import web -from servicelib.aiohttp.application_keys import APP_SETTINGS_KEY -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from ..application_setup import ModuleCategory, app_setup_func +from ..constants import APP_SETTINGS_KEY from . import _folders_rest, _trash_rest, _workspaces_rest _logger = logging.getLogger(__name__) -@app_module_setup( +@app_setup_func( __name__, ModuleCategory.ADDON, settings_name="WEBSERVER_FOLDERS", diff --git a/services/web/server/src/simcore_service_webserver/functions/_controller/_functions_rest.py b/services/web/server/src/simcore_service_webserver/functions/_controller/_functions_rest.py index 534940e67e88..d1be5c5928ce 100644 --- a/services/web/server/src/simcore_service_webserver/functions/_controller/_functions_rest.py +++ b/services/web/server/src/simcore_service_webserver/functions/_controller/_functions_rest.py @@ -1,44 +1,143 @@ +from typing import Any + from aiohttp import web from models_library.api_schemas_webserver.functions import ( Function, + FunctionGroupAccessRightsGet, + FunctionGroupAccessRightsUpdate, FunctionToRegister, RegisteredFunction, RegisteredFunctionGet, + RegisteredFunctionUpdate, +) +from models_library.api_schemas_webserver.users import MyFunctionPermissionsGet +from models_library.functions import ( + FunctionClass, + FunctionGroupAccessRights, + FunctionID, + RegisteredProjectFunction, + RegisteredSolverFunction, ) +from models_library.groups import GroupID +from models_library.products import ProductName +from models_library.rest_ordering import OrderBy +from models_library.rest_pagination import Page +from models_library.rest_pagination_utils import paginate_data +from models_library.users import UserID from pydantic import TypeAdapter from servicelib.aiohttp import status from servicelib.aiohttp.requests_validation import ( handle_validation_as_http_error, parse_request_path_parameters_as, + parse_request_query_parameters_as, ) -from simcore_service_webserver.utils_aiohttp import envelope_json_response from ..._meta import API_VTAG as VTAG from ...login.decorators import login_required -from ...models import RequestContext +from ...models import AuthenticatedRequestContext +from ...projects import _projects_service +from ...projects.models import ProjectDBGet from ...security.decorators import permission_required +from ...utils_aiohttp import create_json_response_from_page, envelope_json_response from .. import _functions_service +from .._services_metadata import proxy as _services_metadata_proxy +from .._services_metadata.proxy import ServiceMetadata from ._functions_rest_exceptions import handle_rest_requests_exceptions -from ._functions_rest_schemas import FunctionPathParams +from ._functions_rest_schemas import ( + FunctionDeleteQueryParams, + FunctionFilters, + FunctionGetQueryParams, + FunctionGroupPathParams, + FunctionPathParams, + FunctionsListQueryParams, +) routes = web.RouteTableDef() +async def _build_function_group_access_rights( + app: web.Application, + user_id: UserID, + product_name: ProductName, + function_id: FunctionID, +) -> dict[GroupID, FunctionGroupAccessRightsGet]: + access_rights_list = await _functions_service.list_function_group_permissions( + app=app, + user_id=user_id, + product_name=product_name, + function_id=function_id, + ) + + return { + access_rights.group_id: FunctionGroupAccessRightsGet( + read=access_rights.read, + write=access_rights.write, + execute=access_rights.execute, + ) + for access_rights in access_rights_list + } + + +def _build_project_function_extras_dict( + project: ProjectDBGet, +) -> dict[str, Any]: + extras: dict[str, Any] = {} + if thumbnail := project.thumbnail: + extras["thumbnail"] = thumbnail + return extras + + +def _build_solver_function_extras_dict( + service_metadata: ServiceMetadata, +) -> dict[str, Any]: + + extras: dict[str, Any] = {} + if thumbnail := service_metadata.thumbnail: + extras["thumbnail"] = thumbnail + return extras + + +async def _build_function_extras( + app: web.Application, *, function: RegisteredFunction +) -> dict[str, Any]: + extras: dict[str, Any] = {} + match function.function_class: + case FunctionClass.PROJECT: + assert isinstance(function, RegisteredProjectFunction) + projects = await _projects_service.batch_get_projects( + app=app, + project_uuids=[function.project_id], + ) + if project := projects.get(function.project_id): + extras |= _build_project_function_extras_dict( + project=project, + ) + case FunctionClass.SOLVER: + assert isinstance(function, RegisteredSolverFunction) + services_metadata = await _services_metadata_proxy.get_service_metadata( + app, + key=function.solver_key, + version=function.solver_version, + ) + extras |= _build_solver_function_extras_dict( + service_metadata=services_metadata, + ) + return extras + + @routes.post(f"/{VTAG}/functions", name="register_function") @login_required -@permission_required("function.create") @handle_rest_requests_exceptions async def register_function(request: web.Request) -> web.Response: with handle_validation_as_http_error( error_msg_template="Invalid parameter/s '{failed}' in request path", resource_name=request.rel_url.path, - use_error_v1=True, ): function_to_register: FunctionToRegister = TypeAdapter( FunctionToRegister ).validate_python(await request.json()) - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) registered_function: RegisteredFunction = ( await _functions_service.register_function( app=request.app, @@ -48,14 +147,129 @@ async def register_function(request: web.Request) -> web.Response: ) ) + access_rights = await _build_function_group_access_rights( + request.app, + user_id=req_ctx.user_id, + product_name=req_ctx.product_name, + function_id=registered_function.uid, + ) + return envelope_json_response( TypeAdapter(RegisteredFunctionGet).validate_python( registered_function.model_dump(mode="json") + | {"access_rights": access_rights} ), web.HTTPCreated, ) +@routes.get( + f"/{VTAG}/functions", + name="list_functions", +) +@login_required +@permission_required("function.read") +@handle_rest_requests_exceptions +async def list_functions(request: web.Request) -> web.Response: # noqa: C901 + query_params: FunctionsListQueryParams = parse_request_query_parameters_as( + FunctionsListQueryParams, request + ) + + if not query_params.filters: + query_params.filters = FunctionFilters() + + assert query_params.filters # nosec + + req_ctx = AuthenticatedRequestContext.model_validate(request) + functions, page_meta_info = await _functions_service.list_functions( + request.app, + user_id=req_ctx.user_id, + product_name=req_ctx.product_name, + pagination_limit=query_params.limit, + pagination_offset=query_params.offset, + order_by=OrderBy.model_construct(**query_params.order_by.model_dump()), + search_by_function_title=query_params.filters.search_by_title, + search_by_multi_columns=query_params.search, + ) + + chunk: list[RegisteredFunctionGet] = [] + + extras_map: dict[FunctionID, dict[str, Any]] = {} + + if query_params.include_extras: + if any( + function.function_class == FunctionClass.PROJECT for function in functions + ): + project_uuids = [ + function.project_id + for function in functions + if function.function_class == FunctionClass.PROJECT + ] + projects_cache = await _projects_service.batch_get_projects( + request.app, + project_uuids=project_uuids, + ) + for function in functions: + if function.function_class == FunctionClass.PROJECT: + project = projects_cache.get(function.project_id) + if not project: + continue + extras_map[function.uid] = _build_project_function_extras_dict( + project=project + ) + + if any( + function.function_class == FunctionClass.SOLVER for function in functions + ): + service_keys_and_versions = { + (function.solver_key, function.solver_version) + for function in functions + if function.function_class == FunctionClass.SOLVER + } + service_metadata_cache = ( + await _services_metadata_proxy.batch_get_service_metadata( + app=request.app, keys_and_versions=service_keys_and_versions + ) + ) + for function in functions: + if function.function_class == FunctionClass.SOLVER: + service_metadata = service_metadata_cache.get( + (function.solver_key, function.solver_version) + ) + if not service_metadata: + continue + extras_map[function.uid] = _build_solver_function_extras_dict( + service_metadata=service_metadata + ) + + for function in functions: + access_rights = await _build_function_group_access_rights( + request.app, + user_id=req_ctx.user_id, + product_name=req_ctx.product_name, + function_id=function.uid, + ) + + extras = extras_map.get(function.uid, {}) + + chunk.append( + TypeAdapter(RegisteredFunctionGet).validate_python( + function.model_dump() | {"access_rights": access_rights, **extras} + ) + ) + + page = Page[RegisteredFunctionGet].model_validate( + paginate_data( + chunk=chunk, + request_url=request.url, + total=page_meta_info.total, + limit=query_params.limit, + offset=query_params.offset, + ) + ) + return create_json_response_from_page(page) + + @routes.get( f"/{VTAG}/functions/{{function_id}}", name="get_function", @@ -67,17 +281,82 @@ async def get_function(request: web.Request) -> web.Response: path_params = parse_request_path_parameters_as(FunctionPathParams, request) function_id = path_params.function_id - req_ctx = RequestContext.model_validate(request) - registered_function: RegisteredFunction = await _functions_service.get_function( + query_params: FunctionGetQueryParams = parse_request_query_parameters_as( + FunctionGetQueryParams, request + ) + + req_ctx = AuthenticatedRequestContext.model_validate(request) + function = await _functions_service.get_function( app=request.app, function_id=function_id, user_id=req_ctx.user_id, product_name=req_ctx.product_name, ) + access_rights = await _build_function_group_access_rights( + request.app, + user_id=req_ctx.user_id, + product_name=req_ctx.product_name, + function_id=function_id, + ) + + extras = ( + await _build_function_extras(request.app, function=function) + if query_params.include_extras + else {} + ) + return envelope_json_response( TypeAdapter(RegisteredFunctionGet).validate_python( - registered_function.model_dump(mode="json") + function.model_dump() | {"access_rights": access_rights, **extras} + ) + ) + + +@routes.patch( + f"/{VTAG}/functions/{{function_id}}", + name="update_function", +) +@login_required +@permission_required("function.update") +@handle_rest_requests_exceptions +async def update_function(request: web.Request) -> web.Response: + path_params = parse_request_path_parameters_as(FunctionPathParams, request) + function_id = path_params.function_id + + query_params: FunctionGetQueryParams = parse_request_query_parameters_as( + FunctionGetQueryParams, request + ) + + function_update = TypeAdapter(RegisteredFunctionUpdate).validate_python( + await request.json() + ) + req_ctx = AuthenticatedRequestContext.model_validate(request) + + function = await _functions_service.update_function( + request.app, + user_id=req_ctx.user_id, + product_name=req_ctx.product_name, + function_id=function_id, + function=function_update, + ) + + access_rights = await _build_function_group_access_rights( + request.app, + user_id=req_ctx.user_id, + product_name=req_ctx.product_name, + function_id=function_id, + ) + + extras = ( + await _build_function_extras(request.app, function=function) + if query_params.include_extras + else {} + ) + + return envelope_json_response( + TypeAdapter(RegisteredFunctionGet).validate_python( + function.model_dump() | {"access_rights": access_rights, **extras} ) ) @@ -92,12 +371,150 @@ async def get_function(request: web.Request) -> web.Response: async def delete_function(request: web.Request) -> web.Response: path_params = parse_request_path_parameters_as(FunctionPathParams, request) function_id = path_params.function_id - req_ctx = RequestContext.model_validate(request) + + query_params: FunctionDeleteQueryParams = parse_request_query_parameters_as( + FunctionDeleteQueryParams, request + ) + + req_ctx = AuthenticatedRequestContext.model_validate(request) await _functions_service.delete_function( app=request.app, + user_id=req_ctx.user_id, + product_name=req_ctx.product_name, function_id=function_id, + force=query_params.force, + ) + + return web.json_response(status=status.HTTP_204_NO_CONTENT) + + +# +# /functions/{function_id}/groups/* +# + + +@routes.get( + f"/{VTAG}/functions/{{function_id}}/groups", + name="get_function_groups", +) +@login_required +@permission_required("function.read") +@handle_rest_requests_exceptions +async def get_function_groups(request: web.Request) -> web.Response: + path_params = parse_request_path_parameters_as(FunctionPathParams, request) + function_id = path_params.function_id + + req_ctx = AuthenticatedRequestContext.model_validate(request) + access_rights_list = await _functions_service.list_function_group_permissions( + request.app, user_id=req_ctx.user_id, product_name=req_ctx.product_name, + function_id=function_id, + ) + + return envelope_json_response( + { + access_rights.group_id: FunctionGroupAccessRightsGet( + read=access_rights.read, + write=access_rights.write, + execute=access_rights.execute, + ) + for access_rights in access_rights_list + } + ) + + +@routes.put( + f"/{VTAG}/functions/{{function_id}}/groups/{{group_id}}", + name="create_or_update_function_group", +) +@login_required +@permission_required("function.update") +@handle_rest_requests_exceptions +async def create_or_update_function_group(request: web.Request) -> web.Response: + path_params = parse_request_path_parameters_as(FunctionGroupPathParams, request) + function_id = path_params.function_id + group_id = path_params.group_id + + req_ctx = AuthenticatedRequestContext.model_validate(request) + + function_group_update = FunctionGroupAccessRightsUpdate.model_validate( + await request.json() + ) + + updated_function_access_rights = ( + await _functions_service.set_function_group_permissions( + request.app, + user_id=req_ctx.user_id, + product_name=req_ctx.product_name, + function_id=function_id, + permissions=FunctionGroupAccessRights( + group_id=group_id, + read=function_group_update.read, + write=function_group_update.write, + execute=function_group_update.execute, + ), + ) + ) + + return envelope_json_response( + FunctionGroupAccessRightsGet( + read=updated_function_access_rights.read, + write=updated_function_access_rights.write, + execute=updated_function_access_rights.execute, + ) + ) + + +@routes.delete( + f"/{VTAG}/functions/{{function_id}}/groups/{{group_id}}", + name="delete_function_group", +) +@login_required +@permission_required("function.update") +@handle_rest_requests_exceptions +async def delete_function_group(request: web.Request) -> web.Response: + path_params = parse_request_path_parameters_as(FunctionGroupPathParams, request) + function_id = path_params.function_id + group_id = path_params.group_id + + req_ctx = AuthenticatedRequestContext.model_validate(request) + + await _functions_service.remove_function_group_permissions( + request.app, + user_id=req_ctx.user_id, + product_name=req_ctx.product_name, + function_id=function_id, + permission_group_id=group_id, ) return web.json_response(status=status.HTTP_204_NO_CONTENT) + + +# +# /me/* endpoints +# + + +@routes.get(f"/{VTAG}/me/function-permissions", name="list_user_functions_permissions") +@login_required +@handle_rest_requests_exceptions +async def list_user_functions_permissions(request: web.Request) -> web.Response: + req_ctx = AuthenticatedRequestContext.model_validate(request) + + function_permissions = ( + await _functions_service.get_functions_user_api_access_rights( + app=request.app, + user_id=req_ctx.user_id, + product_name=req_ctx.product_name, + ) + ) + + assert function_permissions.user_id == req_ctx.user_id # nosec + + return envelope_json_response( + MyFunctionPermissionsGet( + read_functions=function_permissions.read_functions, + write_functions=function_permissions.write_functions, + ) + ) diff --git a/services/web/server/src/simcore_service_webserver/functions/_controller/_functions_rest_exceptions.py b/services/web/server/src/simcore_service_webserver/functions/_controller/_functions_rest_exceptions.py index 44d7c691c574..4277cb874b5d 100644 --- a/services/web/server/src/simcore_service_webserver/functions/_controller/_functions_rest_exceptions.py +++ b/services/web/server/src/simcore_service_webserver/functions/_controller/_functions_rest_exceptions.py @@ -1,12 +1,5 @@ -from models_library.functions_errors import ( - FunctionIDNotFoundError, - FunctionJobCollectionIDNotFoundError, - FunctionJobIDNotFoundError, - UnsupportedFunctionClassError, - UnsupportedFunctionFunctionJobClassCombinationError, - UnsupportedFunctionJobClassError, -) -from servicelib.aiohttp import status +import inspect +import sys from ...exception_handling import ( ExceptionToHttpErrorMap, @@ -15,35 +8,24 @@ to_exceptions_handlers_map, ) +# Get all classes defined in functions_errors +function_error_classes = [ + obj + for name, obj in inspect.getmembers(sys.modules["models_library.functions_errors"]) + if inspect.isclass(obj) + and obj.__module__.startswith("models_library.functions_errors") +] + _TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = { - FunctionIDNotFoundError: HttpErrorInfo( - status.HTTP_404_NOT_FOUND, - "Function id {function_id} was not found", - ), - UnsupportedFunctionClassError: HttpErrorInfo( - status.HTTP_400_BAD_REQUEST, - "Function class {function_class} is not supported. ", - ), - UnsupportedFunctionJobClassError: HttpErrorInfo( - status.HTTP_400_BAD_REQUEST, - "Function job class {function_job_class} is not supported. ", - ), - UnsupportedFunctionFunctionJobClassCombinationError: HttpErrorInfo( - status.HTTP_400_BAD_REQUEST, - "Function class {function_class} and function job class {function_job_class} " - "combination is not supported. ", - ), - FunctionJobIDNotFoundError: HttpErrorInfo( - status.HTTP_404_NOT_FOUND, - "Function job id {function_job_id} was not found", - ), - FunctionJobCollectionIDNotFoundError: HttpErrorInfo( - status.HTTP_404_NOT_FOUND, - "Function job collection id {function_job_collection_id} was not found", - ), + # Dynamically create error mappings for all function-related errors + cls: HttpErrorInfo( + status_code=cls.status_code, + msg_template=cls.msg_template, + ) + for cls in function_error_classes + if hasattr(cls, "status_code") and hasattr(cls, "msg_template") } - handle_rest_requests_exceptions = exception_handling_decorator( to_exceptions_handlers_map(_TO_HTTP_ERROR_MAP) ) diff --git a/services/web/server/src/simcore_service_webserver/functions/_controller/_functions_rest_schemas.py b/services/web/server/src/simcore_service_webserver/functions/_controller/_functions_rest_schemas.py index 1ec679477bcb..2e0e8e8b9d9e 100644 --- a/services/web/server/src/simcore_service_webserver/functions/_controller/_functions_rest_schemas.py +++ b/services/web/server/src/simcore_service_webserver/functions/_controller/_functions_rest_schemas.py @@ -1,9 +1,21 @@ +from typing import Annotated + +from models_library.basic_types import IDStr from models_library.functions import FunctionID -from pydantic import BaseModel, ConfigDict +from models_library.groups import GroupID +from models_library.rest_base import RequestParameters +from models_library.rest_filters import Filters, FiltersQueryParameters +from models_library.rest_ordering import ( + OrderBy, + OrderDirection, + create_ordering_query_model_class, +) +from models_library.rest_pagination import PageQueryParameters +from pydantic import BaseModel, ConfigDict, Field -from ...models import RequestContext +from ...models import AuthenticatedRequestContext -assert RequestContext.__name__ # nosec +assert AuthenticatedRequestContext.__name__ # nosec class FunctionPathParams(BaseModel): @@ -11,4 +23,69 @@ class FunctionPathParams(BaseModel): model_config = ConfigDict(populate_by_name=True, extra="forbid") -__all__: tuple[str, ...] = ("RequestContext",) +class FunctionGroupPathParams(FunctionPathParams): + group_id: GroupID + + +class FunctionQueryParams(BaseModel): + include_extras: bool = False + + +class FunctionGetQueryParams(FunctionQueryParams): ... + + +class FunctionFilters(Filters): + search_by_title: Annotated[ + str | None, + Field( + description="A search query to filter functions by their title. This field performs a case-insensitive partial match against the function title field.", + ), + ] = None + + +FunctionListOrderQueryParams: type[RequestParameters] = ( + create_ordering_query_model_class( + ordering_fields={ + "created_at", + "modified_at", + "name", + }, + default=OrderBy(field=IDStr("modified_at"), direction=OrderDirection.DESC), + ordering_fields_api_to_column_map={ + "created_at": "created", + "modified_at": "modified", + }, + ) +) + + +class FunctionsListExtraQueryParams(RequestParameters): + search: Annotated[ + str | None, + Field( + description="Multi column full text search", + max_length=100, + examples=["My Function"], + ), + ] = None + + +class FunctionsListQueryParams( + PageQueryParameters, + FunctionListOrderQueryParams, # type: ignore[misc, valid-type] + FiltersQueryParameters[FunctionFilters], + FunctionsListExtraQueryParams, + FunctionQueryParams, +): ... + + +class FunctionDeleteQueryParams(BaseModel): + force: Annotated[ + bool, + Field( + description="If true, deletes the function even if it has associated jobs; otherwise, returns HTTP_409_CONFLICT if jobs exist.", + ), + ] = False + + +__all__: tuple[str, ...] = ("AuthenticatedRequestContext",) diff --git a/services/web/server/src/simcore_service_webserver/functions/_controller/_functions_rpc.py b/services/web/server/src/simcore_service_webserver/functions/_controller/_functions_rpc.py index b678df299c37..00ae3660ff89 100644 --- a/services/web/server/src/simcore_service_webserver/functions/_controller/_functions_rpc.py +++ b/services/web/server/src/simcore_service_webserver/functions/_controller/_functions_rpc.py @@ -1,43 +1,69 @@ +from typing import Literal + from aiohttp import web -from models_library.api_schemas_webserver import WEBSERVER_RPC_NAMESPACE from models_library.functions import ( Function, + FunctionAccessRights, + FunctionClass, + FunctionGroupAccessRights, FunctionID, FunctionInputs, FunctionInputSchema, FunctionJob, FunctionJobCollection, + FunctionJobCollectionID, FunctionJobCollectionsListFilters, FunctionJobID, + FunctionJobStatus, + FunctionOutputs, FunctionOutputSchema, + FunctionUpdate, + FunctionUserApiAccessRights, RegisteredFunction, RegisteredFunctionJob, RegisteredFunctionJobCollection, + RegisteredFunctionJobPatch, + RegisteredFunctionJobWithStatus, ) from models_library.functions_errors import ( FunctionIDNotFoundError, FunctionJobCollectionIDNotFoundError, FunctionJobCollectionReadAccessDeniedError, + FunctionJobCollectionsReadApiAccessDeniedError, + FunctionJobCollectionsWriteApiAccessDeniedError, + FunctionJobCollectionWriteAccessDeniedError, FunctionJobIDNotFoundError, + FunctionJobPatchModelIncompatibleError, FunctionJobReadAccessDeniedError, + FunctionJobsReadApiAccessDeniedError, + FunctionJobsWriteApiAccessDeniedError, FunctionJobWriteAccessDeniedError, FunctionReadAccessDeniedError, + FunctionsReadApiAccessDeniedError, + FunctionsWriteApiAccessDeniedError, FunctionWriteAccessDeniedError, UnsupportedFunctionClassError, UnsupportedFunctionJobClassError, ) +from models_library.groups import GroupID from models_library.products import ProductName +from models_library.rest_ordering import OrderBy from models_library.rest_pagination import PageMetaInfoLimitOffset from models_library.users import UserID from servicelib.rabbitmq import RPCRouter -from ...rabbitmq import get_rabbitmq_rpc_server +from ...rabbitmq import create_register_rpc_routes_on_startup from .. import _functions_repository, _functions_service router = RPCRouter() -@router.expose(reraise_if_error_type=(UnsupportedFunctionClassError,)) +@router.expose( + reraise_if_error_type=( + UnsupportedFunctionClassError, + FunctionsWriteApiAccessDeniedError, + ) +) async def register_function( app: web.Application, *, @@ -50,7 +76,12 @@ async def register_function( ) -@router.expose(reraise_if_error_type=(UnsupportedFunctionJobClassError,)) +@router.expose( + reraise_if_error_type=( + UnsupportedFunctionJobClassError, + FunctionJobsWriteApiAccessDeniedError, + ) +) async def register_function_job( app: web.Application, *, @@ -63,7 +94,32 @@ async def register_function_job( ) -@router.expose(reraise_if_error_type=()) +@router.expose( + reraise_if_error_type=( + UnsupportedFunctionJobClassError, + FunctionJobsWriteApiAccessDeniedError, + FunctionJobPatchModelIncompatibleError, + ) +) +async def patch_registered_function_job( + app: web.Application, + *, + user_id: UserID, + product_name: ProductName, + function_job_uuid: FunctionJobID, + registered_function_job_patch: RegisteredFunctionJobPatch, +) -> RegisteredFunctionJob: + + return await _functions_service.patch_registered_function_job( + app=app, + user_id=user_id, + product_name=product_name, + function_job_uuid=function_job_uuid, + registered_function_job_patch=registered_function_job_patch, + ) + + +@router.expose(reraise_if_error_type=(FunctionJobCollectionsWriteApiAccessDeniedError,)) async def register_function_job_collection( app: web.Application, *, @@ -80,7 +136,11 @@ async def register_function_job_collection( @router.expose( - reraise_if_error_type=(FunctionIDNotFoundError, FunctionReadAccessDeniedError) + reraise_if_error_type=( + FunctionIDNotFoundError, + FunctionReadAccessDeniedError, + FunctionsReadApiAccessDeniedError, + ) ) async def get_function( app: web.Application, @@ -98,7 +158,11 @@ async def get_function( @router.expose( - reraise_if_error_type=(FunctionJobIDNotFoundError, FunctionJobReadAccessDeniedError) + reraise_if_error_type=( + FunctionJobIDNotFoundError, + FunctionJobReadAccessDeniedError, + FunctionJobsReadApiAccessDeniedError, + ) ) async def get_function_job( app: web.Application, @@ -119,6 +183,7 @@ async def get_function_job( reraise_if_error_type=( FunctionJobCollectionIDNotFoundError, FunctionJobCollectionReadAccessDeniedError, + FunctionJobCollectionsReadApiAccessDeniedError, ) ) async def get_function_job_collection( @@ -136,7 +201,7 @@ async def get_function_job_collection( ) -@router.expose() +@router.expose(reraise_if_error_type=(FunctionsReadApiAccessDeniedError,)) async def list_functions( app: web.Application, *, @@ -144,6 +209,10 @@ async def list_functions( product_name: ProductName, pagination_limit: int, pagination_offset: int, + order_by: OrderBy | None = None, + filter_by_function_class: FunctionClass | None = None, + search_by_function_title: str | None = None, + search_by_multi_columns: str | None = None, ) -> tuple[list[RegisteredFunction], PageMetaInfoLimitOffset]: return await _functions_service.list_functions( app=app, @@ -151,10 +220,19 @@ async def list_functions( product_name=product_name, pagination_limit=pagination_limit, pagination_offset=pagination_offset, + order_by=order_by, + filter_by_function_class=filter_by_function_class, + search_by_function_title=search_by_function_title, + search_by_multi_columns=search_by_multi_columns, ) -@router.expose() +@router.expose( + reraise_if_error_type=( + FunctionJobsReadApiAccessDeniedError, + FunctionsReadApiAccessDeniedError, + ) +) async def list_function_jobs( app: web.Application, *, @@ -163,6 +241,8 @@ async def list_function_jobs( pagination_limit: int, pagination_offset: int, filter_by_function_id: FunctionID | None = None, + filter_by_function_job_ids: list[FunctionJobID] | None = None, + filter_by_function_job_collection_id: FunctionJobCollectionID | None = None, ) -> tuple[list[RegisteredFunctionJob], PageMetaInfoLimitOffset]: return await _functions_service.list_function_jobs( app=app, @@ -171,10 +251,50 @@ async def list_function_jobs( pagination_limit=pagination_limit, pagination_offset=pagination_offset, filter_by_function_id=filter_by_function_id, + filter_by_function_job_ids=filter_by_function_job_ids, + filter_by_function_job_collection_id=filter_by_function_job_collection_id, ) -@router.expose() +@router.expose( + reraise_if_error_type=( + FunctionJobsReadApiAccessDeniedError, + FunctionsReadApiAccessDeniedError, + ) +) +async def list_function_jobs_with_status( + app: web.Application, + *, + user_id: UserID, + product_name: ProductName, + pagination_limit: int, + pagination_offset: int, + filter_by_function_id: FunctionID | None = None, + filter_by_function_job_ids: list[FunctionJobID] | None = None, + filter_by_function_job_collection_id: FunctionJobCollectionID | None = None, +) -> tuple[ + list[RegisteredFunctionJobWithStatus], + PageMetaInfoLimitOffset, +]: + return await _functions_service.list_function_jobs_with_status( + app=app, + user_id=user_id, + product_name=product_name, + pagination_limit=pagination_limit, + pagination_offset=pagination_offset, + filter_by_function_id=filter_by_function_id, + filter_by_function_job_ids=filter_by_function_job_ids, + filter_by_function_job_collection_id=filter_by_function_job_collection_id, + ) + + +@router.expose( + reraise_if_error_type=( + FunctionJobCollectionsReadApiAccessDeniedError, + FunctionJobsReadApiAccessDeniedError, + FunctionsReadApiAccessDeniedError, + ) +) async def list_function_job_collections( app: web.Application, *, @@ -199,6 +319,8 @@ async def list_function_job_collections( FunctionIDNotFoundError, FunctionReadAccessDeniedError, FunctionWriteAccessDeniedError, + FunctionsWriteApiAccessDeniedError, + FunctionsReadApiAccessDeniedError, ) ) async def delete_function( @@ -221,6 +343,7 @@ async def delete_function( FunctionJobIDNotFoundError, FunctionJobReadAccessDeniedError, FunctionJobWriteAccessDeniedError, + FunctionJobsWriteApiAccessDeniedError, ) ) async def delete_function_job( @@ -242,6 +365,8 @@ async def delete_function_job( reraise_if_error_type=( FunctionJobCollectionIDNotFoundError, FunctionJobCollectionReadAccessDeniedError, + FunctionJobCollectionWriteAccessDeniedError, + FunctionJobCollectionsWriteApiAccessDeniedError, ) ) async def delete_function_job_collection( @@ -263,6 +388,7 @@ async def delete_function_job_collection( reraise_if_error_type=( FunctionIDNotFoundError, FunctionReadAccessDeniedError, + FunctionWriteAccessDeniedError, ) ) async def update_function_title( @@ -273,17 +399,21 @@ async def update_function_title( function_id: FunctionID, title: str, ) -> RegisteredFunction: - return await _functions_service.update_function_title( + return await _functions_service.update_function( app=app, user_id=user_id, product_name=product_name, function_id=function_id, - title=title, + function=FunctionUpdate(title=title), ) @router.expose( - reraise_if_error_type=(FunctionIDNotFoundError, FunctionReadAccessDeniedError) + reraise_if_error_type=( + FunctionIDNotFoundError, + FunctionReadAccessDeniedError, + FunctionWriteAccessDeniedError, + ) ) async def update_function_description( app: web.Application, @@ -293,12 +423,12 @@ async def update_function_description( function_id: FunctionID, description: str, ) -> RegisteredFunction: - return await _functions_service.update_function_description( + return await _functions_service.update_function( app=app, user_id=user_id, product_name=product_name, function_id=function_id, - description=description, + function=FunctionUpdate(description=description), ) @@ -336,6 +466,90 @@ async def get_function_input_schema( ) +@router.expose(reraise_if_error_type=(FunctionJobIDNotFoundError,)) +async def get_function_job_status( + app: web.Application, + *, + user_id: UserID, + product_name: ProductName, + function_job_id: FunctionJobID, +) -> FunctionJobStatus: + return await _functions_service.get_function_job_status( + app=app, + user_id=user_id, + product_name=product_name, + function_job_id=function_job_id, + ) + + +@router.expose(reraise_if_error_type=(FunctionJobIDNotFoundError,)) +async def get_function_job_outputs( + app: web.Application, + *, + user_id: UserID, + product_name: ProductName, + function_job_id: FunctionJobID, +) -> FunctionOutputs: + return await _functions_service.get_function_job_outputs( + app=app, + user_id=user_id, + product_name=product_name, + function_job_id=function_job_id, + ) + + +@router.expose( + reraise_if_error_type=( + FunctionJobIDNotFoundError, + FunctionJobWriteAccessDeniedError, + FunctionJobReadAccessDeniedError, + ) +) +async def update_function_job_status( + app: web.Application, + *, + user_id: UserID, + product_name: ProductName, + function_job_id: FunctionJobID, + job_status: FunctionJobStatus, + check_write_permissions: bool = True, +) -> FunctionJobStatus: + return await _functions_service.update_function_job_status( + app=app, + user_id=user_id, + product_name=product_name, + function_job_id=function_job_id, + job_status=job_status, + check_write_permissions=check_write_permissions, + ) + + +@router.expose( + reraise_if_error_type=( + FunctionJobIDNotFoundError, + FunctionJobWriteAccessDeniedError, + FunctionJobReadAccessDeniedError, + ) +) +async def update_function_job_outputs( + app: web.Application, + *, + user_id: UserID, + product_name: ProductName, + function_job_id: FunctionJobID, + outputs: FunctionOutputs, + check_write_permissions: bool = True, +) -> FunctionOutputs: + return await _functions_service.update_function_job_outputs( + app=app, + user_id=user_id, + product_name=product_name, + function_job_id=function_job_id, + outputs=outputs, + check_write_permissions=check_write_permissions, + ) + + @router.expose(reraise_if_error_type=(FunctionIDNotFoundError,)) async def get_function_output_schema( app: web.Application, @@ -352,6 +566,4 @@ async def get_function_output_schema( ) -async def register_rpc_routes_on_startup(app: web.Application): - rpc_server = get_rabbitmq_rpc_server(app) - await rpc_server.register_router(router, WEBSERVER_RPC_NAMESPACE, app) +register_rpc_routes_on_startup = create_register_rpc_routes_on_startup(router) diff --git a/services/web/server/src/simcore_service_webserver/functions/_functions_exceptions.py b/services/web/server/src/simcore_service_webserver/functions/_functions_exceptions.py new file mode 100644 index 000000000000..e974ac625650 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/functions/_functions_exceptions.py @@ -0,0 +1,9 @@ +from common_library.user_messages import user_message + +from ..errors import WebServerBaseError + + +class FunctionGroupAccessRightsNotFoundError(WebServerBaseError, RuntimeError): + msg_template = user_message( + "Group access rights could not be found for Function '{function_id}' in product '{product_name}'." + ) diff --git a/services/web/server/src/simcore_service_webserver/functions/_functions_repository.py b/services/web/server/src/simcore_service_webserver/functions/_functions_repository.py index a4d2a55496f6..8a6543eb9a16 100644 --- a/services/web/server/src/simcore_service_webserver/functions/_functions_repository.py +++ b/services/web/server/src/simcore_service_webserver/functions/_functions_repository.py @@ -1,47 +1,70 @@ # pylint: disable=too-many-arguments import json -from typing import Literal +from typing import Final, Literal from uuid import UUID import sqlalchemy from aiohttp import web +from models_library.basic_types import IDStr from models_library.functions import ( FunctionAccessRightsDB, FunctionClass, + FunctionGroupAccessRights, FunctionID, FunctionInputs, FunctionInputSchema, FunctionJobAccessRightsDB, FunctionJobClassSpecificData, FunctionJobCollectionAccessRightsDB, + FunctionJobCollectionID, FunctionJobCollectionsListFilters, FunctionJobID, + FunctionJobStatus, FunctionOutputs, FunctionOutputSchema, + FunctionsApiAccessRights, + FunctionUpdate, + FunctionUserApiAccessRights, RegisteredFunctionDB, RegisteredFunctionJobCollectionDB, RegisteredFunctionJobDB, + RegisteredFunctionJobWithStatusDB, ) from models_library.functions_errors import ( + FunctionBaseError, FunctionExecuteAccessDeniedError, + FunctionHasJobsCannotDeleteError, FunctionIDNotFoundError, FunctionJobCollectionExecuteAccessDeniedError, FunctionJobCollectionIDNotFoundError, FunctionJobCollectionReadAccessDeniedError, + FunctionJobCollectionsExecuteApiAccessDeniedError, + FunctionJobCollectionsReadApiAccessDeniedError, + FunctionJobCollectionsWriteApiAccessDeniedError, FunctionJobCollectionWriteAccessDeniedError, FunctionJobExecuteAccessDeniedError, FunctionJobIDNotFoundError, FunctionJobReadAccessDeniedError, + FunctionJobsExecuteApiAccessDeniedError, + FunctionJobsReadApiAccessDeniedError, + FunctionJobsWriteApiAccessDeniedError, FunctionJobWriteAccessDeniedError, FunctionReadAccessDeniedError, + FunctionsExecuteApiAccessDeniedError, + FunctionsReadApiAccessDeniedError, + FunctionsWriteApiAccessDeniedError, FunctionWriteAccessDeniedError, ) from models_library.groups import GroupID from models_library.products import ProductName +from models_library.rest_ordering import OrderBy, OrderDirection from models_library.rest_pagination import PageMetaInfoLimitOffset from models_library.users import UserID from pydantic import TypeAdapter +from simcore_postgres_database.models.funcapi_api_access_rights_table import ( + funcapi_api_access_rights_table, +) from simcore_postgres_database.models.funcapi_function_job_collections_access_rights_table import ( function_job_collections_access_rights_table, ) @@ -63,15 +86,17 @@ from simcore_postgres_database.models.funcapi_functions_table import functions_table from simcore_postgres_database.utils_repos import ( get_columns_from_db_model, + pass_or_acquire_connection, transaction_context, ) -from simcore_service_webserver.groups.api import list_all_user_groups_ids -from simcore_service_webserver.users.api import get_user_primary_group_id -from sqlalchemy import Text, cast +from sqlalchemy import String, Text, cast +from sqlalchemy.engine.row import Row from sqlalchemy.ext.asyncio import AsyncConnection -from sqlalchemy.sql import func +from sqlalchemy.sql import ColumnElement, func from ..db.plugin import get_asyncpg_engine +from ..groups.api import list_all_user_groups_ids +from ..users import users_service _FUNCTIONS_TABLE_COLS = get_columns_from_db_model(functions_table, RegisteredFunctionDB) _FUNCTION_JOBS_TABLE_COLS = get_columns_from_db_model( @@ -90,6 +115,8 @@ function_job_collections_access_rights_table, FunctionJobCollectionAccessRightsDB ) +DEFAULT_ORDER_BY = OrderBy(field=IDStr("modified"), direction=OrderDirection.DESC) + async def create_function( # noqa: PLR0913 app: web.Application, @@ -105,9 +132,16 @@ async def create_function( # noqa: PLR0913 output_schema: FunctionOutputSchema, default_inputs: FunctionInputs, ) -> RegisteredFunctionDB: + async with transaction_context(get_asyncpg_engine(app), connection) as transaction: + await check_user_api_access_rights( + app, + connection=transaction, + user_id=user_id, + product_name=product_name, + api_access_rights=[FunctionsApiAccessRights.WRITE_FUNCTIONS], + ) - async with transaction_context(get_asyncpg_engine(app), connection) as conn: - result = await conn.stream( + result = await transaction.execute( functions_table.insert() .values( title=title, @@ -120,24 +154,26 @@ async def create_function( # noqa: PLR0913 ) .returning(*_FUNCTIONS_TABLE_COLS) ) - row = await result.one() + row = result.one() - registered_function = RegisteredFunctionDB.model_validate(dict(row)) + registered_function = RegisteredFunctionDB.model_validate(row) - user_primary_group_id = await get_user_primary_group_id(app, user_id=user_id) - await set_group_permissions( - app, - connection=connection, - group_id=user_primary_group_id, - product_name=product_name, - object_type="function", - object_ids=[registered_function.uuid], - read=True, - write=True, - execute=True, - ) + user_primary_group_id = await users_service.get_user_primary_group_id( + app, user_id=user_id + ) + await _internal_set_group_permissions( + app, + connection=transaction, + permission_group_id=user_primary_group_id, + product_name=product_name, + object_type="function", + object_ids=[registered_function.uuid], + read=True, + write=True, + execute=True, + ) - return RegisteredFunctionDB.model_validate(dict(row)) + return RegisteredFunctionDB.model_validate(row) async def create_function_job( # noqa: PLR0913 @@ -154,9 +190,17 @@ async def create_function_job( # noqa: PLR0913 outputs: FunctionOutputs, class_specific_data: FunctionJobClassSpecificData, ) -> RegisteredFunctionJobDB: - - async with transaction_context(get_asyncpg_engine(app), connection) as conn: - result = await conn.stream( + async with transaction_context(get_asyncpg_engine(app), connection) as transaction: + await check_user_api_access_rights( + app, + connection=transaction, + user_id=user_id, + product_name=product_name, + api_access_rights=[ + FunctionsApiAccessRights.WRITE_FUNCTION_JOBS, + ], + ) + result = await transaction.execute( function_jobs_table.insert() .values( function_uuid=function_uid, @@ -170,26 +214,66 @@ async def create_function_job( # noqa: PLR0913 ) .returning(*_FUNCTION_JOBS_TABLE_COLS) ) - row = await result.one() + row = result.one() - registered_function_job = RegisteredFunctionJobDB.model_validate(dict(row)) + registered_function_job = RegisteredFunctionJobDB.model_validate(row) - user_primary_group_id = await get_user_primary_group_id(app, user_id=user_id) - await set_group_permissions( - app, - connection=connection, - group_id=user_primary_group_id, - product_name=product_name, - object_type="function_job", - object_ids=[registered_function_job.uuid], - read=True, - write=True, - execute=True, - ) + user_primary_group_id = await users_service.get_user_primary_group_id( + app, user_id=user_id + ) + await _internal_set_group_permissions( + app, + connection=transaction, + permission_group_id=user_primary_group_id, + product_name=product_name, + object_type="function_job", + object_ids=[registered_function_job.uuid], + read=True, + write=True, + execute=True, + ) return registered_function_job +async def patch_function_job( + app: web.Application, + connection: AsyncConnection | None = None, + *, + user_id: UserID, + product_name: ProductName, + registered_function_job_db: RegisteredFunctionJobDB, +) -> RegisteredFunctionJobDB: + + async with transaction_context(get_asyncpg_engine(app), connection) as transaction: + await check_user_api_access_rights( + app, + connection=transaction, + user_id=user_id, + product_name=product_name, + api_access_rights=[ + FunctionsApiAccessRights.WRITE_FUNCTION_JOBS, + ], + ) + result = await transaction.execute( + function_jobs_table.update() + .where(function_jobs_table.c.uuid == f"{registered_function_job_db.uuid}") + .values( + inputs=registered_function_job_db.inputs, + outputs=registered_function_job_db.outputs, + function_class=registered_function_job_db.function_class, + class_specific_data=registered_function_job_db.class_specific_data, + title=registered_function_job_db.title, + description=registered_function_job_db.description, + status="created", + ) + .returning(*_FUNCTION_JOBS_TABLE_COLS) + ) + row = result.one() + + return RegisteredFunctionJobDB.model_validate(row) + + async def create_function_job_collection( app: web.Application, connection: AsyncConnection | None = None, @@ -200,20 +284,28 @@ async def create_function_job_collection( description: str, job_ids: list[FunctionJobID], ) -> tuple[RegisteredFunctionJobCollectionDB, list[FunctionJobID]]: - - for job_id in job_ids: - await check_user_permissions( + async with transaction_context(get_asyncpg_engine(app), connection) as transaction: + await check_user_api_access_rights( app, - connection=connection, + connection=transaction, user_id=user_id, product_name=product_name, - object_type="function_job", - object_id=job_id, - permissions=["read"], + api_access_rights=[ + FunctionsApiAccessRights.WRITE_FUNCTION_JOB_COLLECTIONS, + ], ) + for job_id in job_ids: + await check_user_permissions( + app, + connection=transaction, + user_id=user_id, + product_name=product_name, + object_type="function_job", + object_id=job_id, + permissions=["read"], + ) - async with transaction_context(get_asyncpg_engine(app), connection) as conn: - result = await conn.stream( + result = await transaction.execute( function_job_collections_table.insert() .values( title=title, @@ -221,7 +313,7 @@ async def create_function_job_collection( ) .returning(*_FUNCTION_JOB_COLLECTIONS_TABLE_COLS) ) - row = await result.one_or_none() + row = result.one_or_none() assert row is not None, ( "No row was returned from the database after creating function job collection." @@ -229,11 +321,11 @@ async def create_function_job_collection( ) # nosec function_job_collection_db = RegisteredFunctionJobCollectionDB.model_validate( - dict(row) + row ) - job_collection_entries = [] + job_collection_entries: list[Row] = [] for job_id in job_ids: - result = await conn.stream( + result = await transaction.execute( function_job_collections_to_function_jobs_table.insert() .values( function_job_collection_uuid=function_job_collection_db.uuid, @@ -244,28 +336,30 @@ async def create_function_job_collection( function_job_collections_to_function_jobs_table.c.function_job_uuid, ) ) - entry = await result.one_or_none() + entry = result.one_or_none() assert entry is not None, ( f"No row was returned from the database after creating function job collection entry {title}." f" Job ID: {job_id}" ) # nosec - job_collection_entries.append(dict(entry)) + job_collection_entries.append(entry) - user_primary_group_id = await get_user_primary_group_id(app, user_id=user_id) - await set_group_permissions( - app, - connection=connection, - group_id=user_primary_group_id, - product_name=product_name, - object_type="function_job_collection", - object_ids=[function_job_collection_db.uuid], - read=True, - write=True, - execute=True, - ) + user_primary_group_id = await users_service.get_user_primary_group_id( + app, user_id=user_id + ) + await _internal_set_group_permissions( + app, + connection=transaction, + permission_group_id=user_primary_group_id, + product_name=product_name, + object_type="function_job_collection", + object_ids=[function_job_collection_db.uuid], + read=True, + write=True, + execute=True, + ) return function_job_collection_db, [ - dict(entry)["function_job_uuid"] for entry in job_collection_entries + entry.function_job_uuid for entry in job_collection_entries ] @@ -277,28 +371,57 @@ async def get_function( product_name: ProductName, function_id: FunctionID, ) -> RegisteredFunctionDB: + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + await check_user_permissions( + app, + connection=conn, + user_id=user_id, + product_name=product_name, + object_id=function_id, + object_type="function", + permissions=["read"], + ) - async with transaction_context(get_asyncpg_engine(app), connection) as conn: - result = await conn.stream( + result = await conn.execute( functions_table.select().where(functions_table.c.uuid == function_id) ) - row = await result.one_or_none() + row = result.one_or_none() if row is None: raise FunctionIDNotFoundError(function_id=function_id) - registered_function = RegisteredFunctionDB.model_validate(dict(row)) + return RegisteredFunctionDB.model_validate(row) - await check_user_permissions( - app, - connection=connection, - user_id=user_id, - product_name=product_name, - object_id=function_id, - object_type="function", - permissions=["read"], - ) - return registered_function +def _create_list_functions_attributes_filters( + *, + filter_by_function_class: FunctionClass | None, + search_by_multi_columns: str | None, + search_by_function_title: str | None, +) -> list[ColumnElement]: + attributes_filters: list[ColumnElement] = [] + + if filter_by_function_class is not None: + attributes_filters.append( + functions_table.c.function_class == filter_by_function_class.value + ) + + if search_by_multi_columns is not None: + attributes_filters.append( + (functions_table.c.title.ilike(f"%{search_by_multi_columns}%")) + | (functions_table.c.description.ilike(f"%{search_by_multi_columns}%")) + | ( + cast(functions_table.c.uuid, String).ilike( + f"%{search_by_multi_columns}%" + ) + ) + ) + + if search_by_function_title is not None: + attributes_filters.append( + functions_table.c.title.ilike(f"%{search_by_function_title}%") + ) + + return attributes_filters async def list_functions( @@ -309,50 +432,81 @@ async def list_functions( product_name: ProductName, pagination_limit: int, pagination_offset: int, + order_by: OrderBy | None = None, + filter_by_function_class: FunctionClass | None = None, + search_by_multi_columns: str | None = None, + search_by_function_title: str | None = None, ) -> tuple[list[RegisteredFunctionDB], PageMetaInfoLimitOffset]: - async with transaction_context(get_asyncpg_engine(app), connection) as conn: + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + await check_user_api_access_rights( + app, + connection=conn, + user_id=user_id, + product_name=product_name, + api_access_rights=[FunctionsApiAccessRights.READ_FUNCTIONS], + ) user_groups = await list_all_user_groups_ids(app, user_id=user_id) + attributes_filters = _create_list_functions_attributes_filters( + filter_by_function_class=filter_by_function_class, + search_by_multi_columns=search_by_multi_columns, + search_by_function_title=search_by_function_title, + ) - subquery = ( - functions_access_rights_table.select() - .with_only_columns(functions_access_rights_table.c.function_uuid) + # Build the base query with join to access rights table + base_query = ( + functions_table.select() + .join( + functions_access_rights_table, + functions_table.c.uuid == functions_access_rights_table.c.function_uuid, + ) .where( functions_access_rights_table.c.group_id.in_(user_groups), functions_access_rights_table.c.product_name == product_name, functions_access_rights_table.c.read, + *attributes_filters, ) ) - total_count_result = await conn.scalar( - func.count() - .select() - .select_from(functions_table) - .where(functions_table.c.uuid.in_(subquery)) - ) - result = await conn.stream( - functions_table.select() - .where(functions_table.c.uuid.in_(subquery)) - .offset(pagination_offset) - .limit(pagination_limit) + # Get total count + total_count = await conn.scalar( + func.count().select().select_from(base_query.subquery()) ) - rows = await result.all() - if rows is None: + if total_count == 0: return [], PageMetaInfoLimitOffset( total=0, offset=pagination_offset, limit=pagination_limit, count=0 ) - return [ - RegisteredFunctionDB.model_validate(dict(row)) for row in rows - ], PageMetaInfoLimitOffset( - total=total_count_result, + if order_by is None: + order_by = DEFAULT_ORDER_BY + # Apply ordering and pagination + if order_by.direction == OrderDirection.ASC: + base_query = base_query.order_by( + sqlalchemy.asc(getattr(functions_table.c, order_by.field)), + functions_table.c.uuid, + ) + else: + base_query = base_query.order_by( + sqlalchemy.desc(getattr(functions_table.c, order_by.field)), + functions_table.c.uuid, + ) + + function_rows = [ + RegisteredFunctionDB.model_validate(row) + async for row in await conn.stream( + base_query.offset(pagination_offset).limit(pagination_limit) + ) + ] + + return function_rows, PageMetaInfoLimitOffset( + total=total_count, offset=pagination_offset, limit=pagination_limit, - count=len(rows), + count=len(function_rows), ) -async def list_function_jobs( +async def list_function_jobs_with_status( app: web.Application, connection: AsyncConnection | None = None, *, @@ -361,9 +515,17 @@ async def list_function_jobs( pagination_limit: int, pagination_offset: int, filter_by_function_id: FunctionID | None = None, -) -> tuple[list[RegisteredFunctionJobDB], PageMetaInfoLimitOffset]: - - async with transaction_context(get_asyncpg_engine(app), connection) as conn: + filter_by_function_job_ids: list[FunctionJobID] | None = None, + filter_by_function_job_collection_id: FunctionJobCollectionID | None = None, +) -> tuple[list[RegisteredFunctionJobWithStatusDB], PageMetaInfoLimitOffset]: + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + await check_user_api_access_rights( + app, + connection=conn, + user_id=user_id, + product_name=product_name, + api_access_rights=[FunctionsApiAccessRights.READ_FUNCTION_JOBS], + ) user_groups = await list_all_user_groups_ids(app, user_id=user_id) access_subquery = ( @@ -376,42 +538,170 @@ async def list_function_jobs( ) ) - total_count_result = await conn.scalar( - func.count() - .select() - .select_from(function_jobs_table) - .where(function_jobs_table.c.uuid.in_(access_subquery)) - .where( + filter_conditions = sqlalchemy.and_( + function_jobs_table.c.uuid.in_(access_subquery), + ( function_jobs_table.c.function_uuid == filter_by_function_id if filter_by_function_id else sqlalchemy.sql.true() - ) - ) - result = await conn.stream( - function_jobs_table.select() - .where(function_jobs_table.c.uuid.in_(access_subquery)) - .where( - function_jobs_table.c.function_uuid == filter_by_function_id - if filter_by_function_id + ), + ( + function_jobs_table.c.uuid.in_(filter_by_function_job_ids) + if filter_by_function_job_ids else sqlalchemy.sql.true() + ), + ) + + if filter_by_function_job_collection_id: + collection_subquery = ( + function_job_collections_to_function_jobs_table.select() + .with_only_columns( + function_job_collections_to_function_jobs_table.c.function_job_uuid + ) + .where( + function_job_collections_to_function_jobs_table.c.function_job_collection_uuid + == filter_by_function_job_collection_id + ) + ) + filter_conditions = sqlalchemy.and_( + filter_conditions, + function_jobs_table.c.uuid.in_(collection_subquery), ) - .offset(pagination_offset) - .limit(pagination_limit) + + total_count_result = await conn.scalar( + func.count() + .select() + .select_from(function_jobs_table) + .where(filter_conditions) ) - rows = await result.all() - if rows is None: + if total_count_result == 0: return [], PageMetaInfoLimitOffset( total=0, offset=pagination_offset, limit=pagination_limit, count=0 ) + results = [ + RegisteredFunctionJobWithStatusDB.model_validate(row) + async for row in await conn.stream( + function_jobs_table.select() + .where(filter_conditions) + .offset(pagination_offset) + .limit(pagination_limit) + ) + ] - return [ - RegisteredFunctionJobDB.model_validate(dict(row)) for row in rows - ], PageMetaInfoLimitOffset( + return results, PageMetaInfoLimitOffset( total=total_count_result, offset=pagination_offset, limit=pagination_limit, - count=len(rows), + count=len(results), + ) + + +async def get_function_job_status( + app: web.Application, + connection: AsyncConnection | None = None, + *, + user_id: UserID, + product_name: ProductName, + function_job_id: FunctionJobID, +) -> FunctionJobStatus: + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + await check_user_permissions( + app, + connection=conn, + user_id=user_id, + product_name=product_name, + object_type="function_job", + object_id=function_job_id, + permissions=["read"], + ) + + result = await conn.execute( + function_jobs_table.select().where( + function_jobs_table.c.uuid == function_job_id + ) + ) + row = result.one_or_none() + + if row is None: + raise FunctionJobIDNotFoundError(function_job_id=function_job_id) + + return FunctionJobStatus(status=row.status) + + +async def get_function_job_outputs( + app: web.Application, + connection: AsyncConnection | None = None, + *, + user_id: UserID, + product_name: ProductName, + function_job_id: FunctionJobID, +) -> FunctionOutputs: + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + await check_user_permissions( + app, + connection=conn, + user_id=user_id, + product_name=product_name, + object_type="function_job", + object_id=function_job_id, + permissions=["read"], + ) + + result = await conn.execute( + function_jobs_table.select().where( + function_jobs_table.c.uuid == function_job_id + ) + ) + row = result.one_or_none() + + if row is None: + raise FunctionJobIDNotFoundError(function_job_id=function_job_id) + + return TypeAdapter(FunctionOutputs).validate_python(row.outputs) + + +async def update_function_job_status( + app: web.Application, + connection: AsyncConnection | None = None, + *, + function_job_id: FunctionJobID, + job_status: FunctionJobStatus, +) -> FunctionJobStatus: + async with transaction_context(get_asyncpg_engine(app), connection) as transaction: + result = await transaction.execute( + function_jobs_table.update() + .where(function_jobs_table.c.uuid == function_job_id) + .values(status=job_status.status) + .returning(function_jobs_table.c.status) + ) + row = result.one_or_none() + + if row is None: + raise FunctionJobIDNotFoundError(function_job_id=function_job_id) + + return FunctionJobStatus(status=row.status) + + +async def update_function_job_outputs( + app: web.Application, + connection: AsyncConnection | None = None, + *, + function_job_id: FunctionJobID, + outputs: FunctionOutputs, +) -> FunctionOutputs: + async with transaction_context(get_asyncpg_engine(app), connection) as transaction: + result = await transaction.execute( + function_jobs_table.update() + .where(function_jobs_table.c.uuid == function_job_id) + .values(outputs=outputs) + .returning(function_jobs_table.c.outputs) ) + row = result.one_or_none() + + if row is None: + raise FunctionJobIDNotFoundError(function_job_id=function_job_id) + + return TypeAdapter(FunctionOutputs).validate_python(row.outputs) async def list_function_job_collections( @@ -431,8 +721,17 @@ async def list_function_job_collections( Returns a list of function job collections and their associated job ids. Filters the collections to include only those that have function jobs with the specified function id if filters.has_function_id is provided. """ + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + await check_user_api_access_rights( + app, + connection=conn, + user_id=user_id, + product_name=product_name, + api_access_rights=[ + FunctionsApiAccessRights.READ_FUNCTION_JOB_COLLECTIONS, + ], + ) - async with transaction_context(get_asyncpg_engine(app), connection) as conn: filter_condition: sqlalchemy.sql.ColumnElement = sqlalchemy.sql.true() if filters and filters.has_function_id: @@ -482,40 +781,35 @@ async def list_function_job_collections( .select_from(function_job_collections_table) .where(filter_and_access_condition) ) - query = function_job_collections_table.select().where( - filter_and_access_condition - ) - - result = await conn.stream( - query.offset(pagination_offset).limit(pagination_limit) - ) - rows = await result.all() - if rows is None: + if total_count_result == 0: return [], PageMetaInfoLimitOffset( total=0, offset=pagination_offset, limit=pagination_limit, count=0 ) + query = function_job_collections_table.select().where( + filter_and_access_condition + ) + collections = [] - for row in rows: - collection = RegisteredFunctionJobCollectionDB.model_validate(dict(row)) - job_result = await conn.stream( - function_job_collections_to_function_jobs_table.select().where( - function_job_collections_to_function_jobs_table.c.function_job_collection_uuid - == row["uuid"] + async for row in await conn.stream( + query.offset(pagination_offset).limit(pagination_limit) + ): + collection = RegisteredFunctionJobCollectionDB.model_validate(row) + job_ids = [ + job_row.function_job_uuid + async for job_row in await conn.stream( + function_job_collections_to_function_jobs_table.select().where( + function_job_collections_to_function_jobs_table.c.function_job_collection_uuid + == row.uuid + ) ) - ) - job_rows = await job_result.all() - job_ids = ( - [job_row["function_job_uuid"] for job_row in job_rows] - if job_rows - else [] - ) + ] collections.append((collection, job_ids)) return collections, PageMetaInfoLimitOffset( total=total_count_result, offset=pagination_offset, limit=pagination_limit, - count=len(rows), + count=len(collections), ) @@ -526,128 +820,112 @@ async def delete_function( user_id: UserID, product_name: ProductName, function_id: FunctionID, + force: bool = False, ) -> None: + async with transaction_context(get_asyncpg_engine(app), connection) as transaction: + await check_user_permissions( + app, + connection=transaction, + user_id=user_id, + product_name=product_name, + object_id=function_id, + object_type="function", + permissions=["write"], + ) - await check_user_permissions( - app, - connection=connection, - user_id=user_id, - product_name=product_name, - object_id=function_id, - object_type="function", - permissions=["write"], - ) - - async with transaction_context(get_asyncpg_engine(app), connection) as conn: # Check if the function exists - result = await conn.stream( + result = await transaction.execute( functions_table.select().where(functions_table.c.uuid == function_id) ) - row = await result.one_or_none() + row = result.one_or_none() if row is None: raise FunctionIDNotFoundError(function_id=function_id) + # Check for existing function jobs if force is not True + if not force: + jobs_result = await transaction.execute( + function_jobs_table.select() + .with_only_columns(func.count()) + .where(function_jobs_table.c.function_uuid == function_id) + ) + jobs_count = jobs_result.scalar() or 0 + + if jobs_count > 0: + raise FunctionHasJobsCannotDeleteError( + function_id=function_id, jobs_count=jobs_count + ) + # Proceed with deletion - await conn.execute( + await transaction.execute( functions_table.delete().where(functions_table.c.uuid == function_id) ) -async def update_function_title( +async def update_function( app: web.Application, + connection: AsyncConnection | None = None, *, user_id: UserID, product_name: ProductName, function_id: FunctionID, - title: str, + function: FunctionUpdate, ) -> RegisteredFunctionDB: - await check_user_permissions( - app, - user_id=user_id, - product_name=product_name, - object_id=function_id, - object_type="function", - permissions=["write"], - ) + async with transaction_context(get_asyncpg_engine(app), connection) as transaction: + await check_user_permissions( + app, + transaction, + user_id=user_id, + product_name=product_name, + object_id=function_id, + object_type="function", + permissions=["read", "write"], + ) - async with transaction_context(get_asyncpg_engine(app)) as conn: - result = await conn.stream( + result = await transaction.execute( functions_table.update() .where(functions_table.c.uuid == function_id) - .values(title=title) + .values(**function.model_dump(exclude_none=True, exclude_unset=True)) .returning(*_FUNCTIONS_TABLE_COLS) ) - row = await result.one_or_none() + row = result.one_or_none() if row is None: raise FunctionIDNotFoundError(function_id=function_id) - return RegisteredFunctionDB.model_validate(dict(row)) + return RegisteredFunctionDB.model_validate(row) -async def update_function_description( +async def get_function_job( app: web.Application, - *, - user_id: UserID, - product_name: ProductName, - function_id: FunctionID, - description: str, -) -> RegisteredFunctionDB: - await check_user_permissions( - app, - user_id=user_id, - product_name=product_name, - object_id=function_id, - object_type="function", - permissions=["write"], - ) - - async with transaction_context(get_asyncpg_engine(app)) as conn: - result = await conn.stream( - functions_table.update() - .where(functions_table.c.uuid == function_id) - .values(description=description) - .returning(*_FUNCTIONS_TABLE_COLS) - ) - row = await result.one_or_none() - - if row is None: - raise FunctionIDNotFoundError(function_id=function_id) - - return RegisteredFunctionDB.model_validate(dict(row)) - - -async def get_function_job( - app: web.Application, - connection: AsyncConnection | None = None, + connection: AsyncConnection | None = None, *, user_id: UserID, product_name: ProductName, function_job_id: FunctionID, ) -> RegisteredFunctionJobDB: - await check_user_permissions( - app, - connection=connection, - user_id=user_id, - product_name=product_name, - object_id=function_job_id, - object_type="function_job", - permissions=["read"], - ) + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + await check_user_permissions( + app, + connection=conn, + user_id=user_id, + product_name=product_name, + object_id=function_job_id, + object_type="function_job", + permissions=["read"], + ) - async with transaction_context(get_asyncpg_engine(app), connection) as conn: - result = await conn.stream( + result = await conn.execute( function_jobs_table.select().where( function_jobs_table.c.uuid == function_job_id ) ) - row = await result.one_or_none() + row = result.one_or_none() if row is None: raise FunctionJobIDNotFoundError(function_job_id=function_job_id) - return RegisteredFunctionJobDB.model_validate(dict(row)) + return RegisteredFunctionJobDB.model_validate(row) async def delete_function_job( @@ -658,29 +936,29 @@ async def delete_function_job( product_name: ProductName, function_job_id: FunctionID, ) -> None: - await check_user_permissions( - app, - connection=connection, - user_id=user_id, - product_name=product_name, - object_id=function_job_id, - object_type="function_job", - permissions=["write"], - ) + async with transaction_context(get_asyncpg_engine(app), connection) as transaction: + await check_user_permissions( + app, + connection=transaction, + user_id=user_id, + product_name=product_name, + object_id=function_job_id, + object_type="function_job", + permissions=["write"], + ) - async with transaction_context(get_asyncpg_engine(app), connection) as conn: # Check if the function job exists - result = await conn.stream( + result = await transaction.execute( function_jobs_table.select().where( function_jobs_table.c.uuid == function_job_id ) ) - row = await result.one_or_none() + row = result.one_or_none() if row is None: raise FunctionJobIDNotFoundError(function_job_id=function_job_id) # Proceed with deletion - await conn.execute( + await transaction.execute( function_jobs_table.delete().where( function_jobs_table.c.uuid == function_job_id ) @@ -696,41 +974,34 @@ async def find_cached_function_jobs( product_name: ProductName, inputs: FunctionInputs, ) -> list[RegisteredFunctionJobDB] | None: - - async with transaction_context(get_asyncpg_engine(app), connection) as conn: - result = await conn.stream( + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + jobs: list[RegisteredFunctionJobDB] = [] + async for row in await conn.stream( function_jobs_table.select().where( function_jobs_table.c.function_uuid == function_id, cast(function_jobs_table.c.inputs, Text) == json.dumps(inputs), - ), - ) - rows = await result.all() - - if rows is None or len(rows) == 0: - return None - - jobs = [] - for row in rows: - job = RegisteredFunctionJobDB.model_validate(dict(row)) - try: - await check_user_permissions( - app, - connection=connection, - user_id=user_id, - product_name=product_name, - object_id=job.uuid, - object_type="function_job", - permissions=["read"], ) - except FunctionJobReadAccessDeniedError: - continue + ): + job = RegisteredFunctionJobDB.model_validate(row) + try: + await check_user_permissions( + app, + connection=conn, + user_id=user_id, + product_name=product_name, + object_id=job.uuid, + object_type="function_job", + permissions=["read"], + ) + except FunctionJobReadAccessDeniedError: + continue - jobs.append(job) + jobs.append(job) - if len(jobs) > 0: - return jobs + if len(jobs) > 0: + return jobs - return None + return None async def get_function_job_collection( @@ -741,23 +1012,23 @@ async def get_function_job_collection( product_name: ProductName, function_job_collection_id: FunctionID, ) -> tuple[RegisteredFunctionJobCollectionDB, list[FunctionJobID]]: - await check_user_permissions( - app, - connection=connection, - user_id=user_id, - product_name=product_name, - object_id=function_job_collection_id, - object_type="function_job_collection", - permissions=["read"], - ) + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + await check_user_permissions( + app, + connection=conn, + user_id=user_id, + product_name=product_name, + object_id=function_job_collection_id, + object_type="function_job_collection", + permissions=["read"], + ) - async with transaction_context(get_asyncpg_engine(app), connection) as conn: - result = await conn.stream( + result = await conn.execute( function_job_collections_table.select().where( function_job_collections_table.c.uuid == function_job_collection_id ) ) - row = await result.one_or_none() + row = result.one_or_none() if row is None: raise FunctionJobCollectionIDNotFoundError( @@ -765,19 +1036,17 @@ async def get_function_job_collection( ) # Retrieve associated job ids from the join table - job_result = await conn.stream( - function_job_collections_to_function_jobs_table.select().where( - function_job_collections_to_function_jobs_table.c.function_job_collection_uuid - == row["uuid"] + job_ids = [ + job_row.function_job_uuid + async for job_row in await conn.stream( + function_job_collections_to_function_jobs_table.select().where( + function_job_collections_to_function_jobs_table.c.function_job_collection_uuid + == row.uuid + ) ) - ) - job_rows = await job_result.all() - - job_ids = ( - [job_row["function_job_uuid"] for job_row in job_rows] if job_rows else [] - ) + ] - job_collection = RegisteredFunctionJobCollectionDB.model_validate(dict(row)) + job_collection = RegisteredFunctionJobCollectionDB.model_validate(row) return job_collection, job_ids @@ -790,35 +1059,35 @@ async def delete_function_job_collection( product_name: ProductName, function_job_collection_id: FunctionID, ) -> None: - await check_user_permissions( - app, - connection=connection, - user_id=user_id, - product_name=product_name, - object_id=function_job_collection_id, - object_type="function_job_collection", - permissions=["write"], - ) + async with transaction_context(get_asyncpg_engine(app), connection) as transaction: + await check_user_permissions( + app, + connection=transaction, + user_id=user_id, + product_name=product_name, + object_id=function_job_collection_id, + object_type="function_job_collection", + permissions=["write"], + ) - async with transaction_context(get_asyncpg_engine(app), connection) as conn: # Check if the function job collection exists - result = await conn.stream( + result = await transaction.execute( function_job_collections_table.select().where( function_job_collections_table.c.uuid == function_job_collection_id ) ) - row = await result.one_or_none() + row = result.one_or_none() if row is None: raise FunctionJobCollectionIDNotFoundError( function_job_collection_id=function_job_collection_id ) # Proceed with deletion - await conn.execute( + await transaction.execute( function_job_collections_table.delete().where( function_job_collections_table.c.uuid == function_job_collection_id ) ) - await conn.execute( + await transaction.execute( function_job_collections_to_function_jobs_table.delete().where( function_job_collections_to_function_jobs_table.c.function_job_collection_uuid == function_job_collection_id @@ -826,19 +1095,212 @@ async def delete_function_job_collection( ) -async def set_group_permissions( +async def get_group_permissions( app: web.Application, connection: AsyncConnection | None = None, *, - group_id: GroupID, + user_id: UserID, product_name: ProductName, object_type: Literal["function", "function_job", "function_job_collection"], object_ids: list[UUID], +) -> list[tuple[UUID, list[FunctionGroupAccessRights]]]: + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + for object_id in object_ids: + await check_user_permissions( + app, + connection=conn, + user_id=user_id, + product_name=product_name, + object_id=object_id, + object_type=object_type, + permissions=["read"], + ) + + return await _internal_get_group_permissions( + app, + connection=connection, + product_name=product_name, + object_type=object_type, + object_ids=object_ids, + ) + + +async def set_group_permissions( + app: web.Application, + connection: AsyncConnection | None = None, + *, + user_id: UserID, + permission_group_id: GroupID, + product_name: ProductName, + object_type: Literal["function", "function_job", "function_job_collection"], + object_ids: list[FunctionID | FunctionJobID | FunctionJobCollectionID], read: bool | None = None, write: bool | None = None, execute: bool | None = None, +) -> list[ + tuple[ + FunctionID | FunctionJobID | FunctionJobCollectionID, FunctionGroupAccessRights + ] +]: + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + for object_id in object_ids: + await check_user_permissions( + app, + connection=conn, + user_id=user_id, + product_name=product_name, + object_id=object_id, + object_type=object_type, + permissions=["write"], + ) + + return await _internal_set_group_permissions( + app, + connection=connection, + permission_group_id=permission_group_id, + product_name=product_name, + object_type=object_type, + object_ids=object_ids, + read=read, + write=write, + execute=execute, + ) + + +async def remove_group_permissions( + app: web.Application, + connection: AsyncConnection | None = None, + *, + user_id: UserID, + permission_group_id: GroupID, + product_name: ProductName, + object_type: Literal["function", "function_job", "function_job_collection"], + object_ids: list[UUID], ) -> None: + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + for object_id in object_ids: + await check_user_permissions( + app, + connection=conn, + user_id=user_id, + product_name=product_name, + object_id=object_id, + object_type=object_type, + permissions=["write"], + ) + await _internal_remove_group_permissions( + app, + connection=connection, + permission_group_id=permission_group_id, + product_name=product_name, + object_type=object_type, + object_ids=object_ids, + ) + + +async def _internal_remove_group_permissions( + app: web.Application, + connection: AsyncConnection | None = None, + *, + permission_group_id: GroupID, + product_name: ProductName, + object_type: Literal["function", "function_job", "function_job_collection"], + object_ids: list[UUID], +) -> None: + access_rights_table = None + field_name = None + + if object_type == "function": + access_rights_table = functions_access_rights_table + field_name = "function_uuid" + elif object_type == "function_job": + access_rights_table = function_jobs_access_rights_table + field_name = "function_job_uuid" + elif object_type == "function_job_collection": + access_rights_table = function_job_collections_access_rights_table + field_name = "function_job_collection_uuid" + + assert access_rights_table is not None # nosec + assert field_name is not None # nosec + + async with transaction_context(get_asyncpg_engine(app), connection) as transaction: + for object_id in object_ids: + await transaction.execute( + access_rights_table.delete().where( + getattr(access_rights_table.c, field_name) == object_id, + access_rights_table.c.group_id == permission_group_id, + access_rights_table.c.product_name == product_name, + ) + ) + + +async def _internal_get_group_permissions( + app: web.Application, + connection: AsyncConnection | None = None, + *, + product_name: ProductName, + object_type: Literal["function", "function_job", "function_job_collection"], + object_ids: list[FunctionID | FunctionJobID | FunctionJobCollectionID], +) -> list[ + tuple[ + FunctionID | FunctionJobID | FunctionJobCollectionID, + list[FunctionGroupAccessRights], + ] +]: + access_rights_table = None + field_name = None + if object_type == "function": + access_rights_table = functions_access_rights_table + field_name = "function_uuid" + elif object_type == "function_job": + access_rights_table = function_jobs_access_rights_table + field_name = "function_job_uuid" + elif object_type == "function_job_collection": + access_rights_table = function_job_collections_access_rights_table + field_name = "function_job_collection_uuid" + + assert access_rights_table is not None # nosec + assert field_name is not None # nosec + + access_rights_list: list[tuple[UUID, list[FunctionGroupAccessRights]]] = [] + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + for object_id in object_ids: + rows = [ + row + async for row in await conn.stream( + access_rights_table.select().where( + getattr(access_rights_table.c, field_name) == object_id, + access_rights_table.c.product_name == product_name, + ) + ) + ] + group_permissions = [ + FunctionGroupAccessRights( + group_id=row.group_id, + read=row.read, + write=row.write, + execute=row.execute, + ) + for row in rows + ] + access_rights_list.append((object_id, group_permissions)) + + return access_rights_list + + +async def _internal_set_group_permissions( + app: web.Application, + connection: AsyncConnection | None = None, + *, + permission_group_id: GroupID, + product_name: ProductName, + object_type: Literal["function", "function_job", "function_job_collection"], + object_ids: list[UUID], + read: bool | None = None, + write: bool | None = None, + execute: bool | None = None, +) -> list[tuple[UUID, FunctionGroupAccessRights]]: access_rights_table = None field_name = None if object_type == "function": @@ -854,29 +1316,39 @@ async def set_group_permissions( assert access_rights_table is not None # nosec assert field_name is not None # nosec - async with transaction_context(get_asyncpg_engine(app), connection) as conn: + access_rights_list: list[tuple[UUID, FunctionGroupAccessRights]] = [] + async with transaction_context(get_asyncpg_engine(app), connection) as transaction: for object_id in object_ids: # Check if the group already has access rights for the function - result = await conn.stream( + result = await transaction.execute( access_rights_table.select().where( getattr(access_rights_table.c, field_name) == object_id, - access_rights_table.c.group_id == group_id, + access_rights_table.c.group_id == permission_group_id, ) ) - row = await result.one_or_none() + row = result.one_or_none() if row is None: # Insert new access rights if the group does not have any - await conn.execute( - access_rights_table.insert().values( + result = await transaction.execute( + access_rights_table.insert() + .values( **{field_name: object_id}, - group_id=group_id, + group_id=permission_group_id, product_name=product_name, read=read if read is not None else False, write=write if write is not None else False, execute=execute if execute is not None else False, ) + .returning( + access_rights_table.c.group_id, + access_rights_table.c.read, + access_rights_table.c.write, + access_rights_table.c.execute, + ) ) + row = result.one() + access_rights_list.append((object_id, FunctionGroupAccessRights(**row))) else: # Update existing access rights only for non-None values update_values = { @@ -885,14 +1357,76 @@ async def set_group_permissions( "execute": execute if execute is not None else row["execute"], } - await conn.execute( + update_result = await transaction.execute( access_rights_table.update() .where( getattr(access_rights_table.c, field_name) == object_id, - access_rights_table.c.group_id == group_id, + access_rights_table.c.group_id == permission_group_id, ) .values(**update_values) + .returning( + access_rights_table.c.group_id, + access_rights_table.c.read, + access_rights_table.c.write, + access_rights_table.c.execute, + ) ) + updated_row = update_result.one() + access_rights_list.append( + (object_id, FunctionGroupAccessRights(**updated_row)) + ) + + return access_rights_list + + +async def get_user_api_access_rights( + app: web.Application, + connection: AsyncConnection | None = None, + *, + user_id: UserID, + product_name: ProductName, +) -> FunctionUserApiAccessRights: + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + user_groups = await list_all_user_groups_ids(app, user_id=user_id) + + # Initialize combined permissions with False values + combined_permissions = FunctionUserApiAccessRights( + user_id=user_id, + read_functions=False, + write_functions=False, + execute_functions=False, + read_function_jobs=False, + write_function_jobs=False, + execute_function_jobs=False, + read_function_job_collections=False, + write_function_job_collections=False, + execute_function_job_collections=False, + ) + + # Process each row only once and combine permissions + async for row in await conn.stream( + funcapi_api_access_rights_table.select().where( + funcapi_api_access_rights_table.c.group_id.in_(user_groups), + funcapi_api_access_rights_table.c.product_name == product_name, + ) + ): + combined_permissions.read_functions |= row.read_functions + combined_permissions.write_functions |= row.write_functions + combined_permissions.execute_functions |= row.execute_functions + combined_permissions.read_function_jobs |= row.read_function_jobs + combined_permissions.write_function_jobs |= row.write_function_jobs + combined_permissions.execute_function_jobs |= row.execute_function_jobs + combined_permissions.read_function_job_collections |= ( + row.read_function_job_collections + ) + combined_permissions.write_function_job_collections |= ( + row.write_function_job_collections + ) + combined_permissions.execute_function_job_collections |= ( + row.execute_function_job_collections + ) + + return combined_permissions async def get_user_permissions( @@ -904,52 +1438,49 @@ async def get_user_permissions( object_id: UUID, object_type: Literal["function", "function_job", "function_job_collection"], ) -> FunctionAccessRightsDB | None: + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + await check_exists( + app, + conn, + object_id=object_id, + object_type=object_type, + ) - await check_exists( - app, - object_id=object_id, - object_type=object_type, - ) + access_rights_table = None + cols = None + if object_type == "function": + access_rights_table = functions_access_rights_table + cols = _FUNCTIONS_ACCESS_RIGHTS_TABLE_COLS + elif object_type == "function_job": + access_rights_table = function_jobs_access_rights_table + cols = _FUNCTION_JOBS_ACCESS_RIGHTS_TABLE_COLS + elif object_type == "function_job_collection": + access_rights_table = function_job_collections_access_rights_table + cols = _FUNCTION_JOB_COLLECTIONS_ACCESS_RIGHTS_TABLE_COLS + assert access_rights_table is not None # nosec - access_rights_table = None - cols = None - if object_type == "function": - access_rights_table = functions_access_rights_table - cols = _FUNCTIONS_ACCESS_RIGHTS_TABLE_COLS - elif object_type == "function_job": - access_rights_table = function_jobs_access_rights_table - cols = _FUNCTION_JOBS_ACCESS_RIGHTS_TABLE_COLS - elif object_type == "function_job_collection": - access_rights_table = function_job_collections_access_rights_table - cols = _FUNCTION_JOB_COLLECTIONS_ACCESS_RIGHTS_TABLE_COLS - assert access_rights_table is not None # nosec - - async with transaction_context(get_asyncpg_engine(app), connection) as conn: user_groups = await list_all_user_groups_ids(app, user_id=user_id) - # Combine permissions for all groups the user belongs to - result = await conn.stream( + # Initialize combined permissions with False values + combined_permissions = FunctionAccessRightsDB( + read=False, write=False, execute=False + ) + + # Process each row only once and combine permissions + async for row in await conn.stream( access_rights_table.select() - .with_only_columns(cols) + .with_only_columns(*cols) .where( getattr(access_rights_table.c, f"{object_type}_uuid") == object_id, access_rights_table.c.product_name == product_name, access_rights_table.c.group_id.in_(user_groups), ) - ) - rows = await result.all() + ): + combined_permissions.read |= row.read + combined_permissions.write |= row.write + combined_permissions.execute |= row.execute - if not rows: - return None - - # Combine permissions across all rows - combined_permissions = { - "read": any(row["read"] for row in rows), - "write": any(row["write"] for row in rows), - "execute": any(row["execute"] for row in rows), - } - - return FunctionAccessRightsDB.model_validate(combined_permissions) + return combined_permissions async def check_exists( @@ -980,11 +1511,11 @@ async def check_exists( function_job_collection_id=object_id ) - async with transaction_context(get_asyncpg_engine(app), connection) as conn: - result = await conn.stream( + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + result = await conn.execute( main_table.select().where(main_table.c.uuid == object_id) ) - row = await result.one_or_none() + row = result.one_or_none() if row is None: raise error @@ -1001,6 +1532,21 @@ async def check_user_permissions( object_type: Literal["function", "function_job", "function_job_collection"], permissions: list[Literal["read", "write", "execute"]], ) -> bool: + + api_access_rights = [ + getattr( + FunctionsApiAccessRights, f"{permission.upper()}_{object_type.upper()}S" + ) + for permission in permissions + ] + await check_user_api_access_rights( + app, + connection=connection, + user_id=user_id, + product_name=product_name, + api_access_rights=api_access_rights, + ) + user_permissions = await get_user_permissions( app, connection=connection, @@ -1058,3 +1604,35 @@ async def check_user_permissions( raise errors[permission] return True + + +_ERRORS_MAP: Final[dict[FunctionsApiAccessRights, type[FunctionBaseError]]] = { + FunctionsApiAccessRights.READ_FUNCTIONS: FunctionsReadApiAccessDeniedError, + FunctionsApiAccessRights.WRITE_FUNCTIONS: FunctionsWriteApiAccessDeniedError, + FunctionsApiAccessRights.EXECUTE_FUNCTIONS: FunctionsExecuteApiAccessDeniedError, + FunctionsApiAccessRights.READ_FUNCTION_JOBS: FunctionJobsReadApiAccessDeniedError, + FunctionsApiAccessRights.WRITE_FUNCTION_JOBS: FunctionJobsWriteApiAccessDeniedError, + FunctionsApiAccessRights.EXECUTE_FUNCTION_JOBS: FunctionJobsExecuteApiAccessDeniedError, + FunctionsApiAccessRights.READ_FUNCTION_JOB_COLLECTIONS: FunctionJobCollectionsReadApiAccessDeniedError, + FunctionsApiAccessRights.WRITE_FUNCTION_JOB_COLLECTIONS: FunctionJobCollectionsWriteApiAccessDeniedError, + FunctionsApiAccessRights.EXECUTE_FUNCTION_JOB_COLLECTIONS: FunctionJobCollectionsExecuteApiAccessDeniedError, +} + + +async def check_user_api_access_rights( + app: web.Application, + connection: AsyncConnection | None = None, + *, + user_id: UserID, + product_name: ProductName, + api_access_rights: list[FunctionsApiAccessRights], +) -> bool: + user_api_access_rights = await get_user_api_access_rights( + app, connection=connection, user_id=user_id, product_name=product_name + ) + + for api_access_right in api_access_rights: + if not getattr(user_api_access_rights, api_access_right): + raise _ERRORS_MAP[api_access_right](user_id=user_id) + + return True diff --git a/services/web/server/src/simcore_service_webserver/functions/_functions_service.py b/services/web/server/src/simcore_service_webserver/functions/_functions_service.py index fd29f603d7cd..0b94ee4829cc 100644 --- a/services/web/server/src/simcore_service_webserver/functions/_functions_service.py +++ b/services/web/server/src/simcore_service_webserver/functions/_functions_service.py @@ -1,47 +1,62 @@ +from typing import Literal + from aiohttp import web +from models_library.basic_types import IDStr from models_library.functions import ( Function, FunctionClass, FunctionClassSpecificData, FunctionDB, + FunctionGroupAccessRights, FunctionID, FunctionInputs, FunctionInputSchema, FunctionJob, FunctionJobClassSpecificData, FunctionJobCollection, + FunctionJobCollectionID, FunctionJobCollectionsListFilters, FunctionJobDB, FunctionJobID, + FunctionJobStatus, + FunctionOutputs, FunctionOutputSchema, + FunctionUpdate, + FunctionUserAccessRights, + FunctionUserApiAccessRights, RegisteredFunction, RegisteredFunctionDB, RegisteredFunctionJob, RegisteredFunctionJobCollection, RegisteredFunctionJobDB, + RegisteredFunctionJobPatch, + RegisteredFunctionJobWithStatus, + RegisteredFunctionJobWithStatusDB, RegisteredProjectFunction, RegisteredProjectFunctionJob, + RegisteredProjectFunctionJobWithStatus, RegisteredSolverFunction, RegisteredSolverFunctionJob, + RegisteredSolverFunctionJobWithStatus, ) from models_library.functions_errors import ( - FunctionIDNotFoundError, - FunctionJobCollectionIDNotFoundError, - FunctionJobIDNotFoundError, + FunctionJobPatchModelIncompatibleError, UnsupportedFunctionClassError, UnsupportedFunctionJobClassError, ) +from models_library.groups import GroupID from models_library.products import ProductName +from models_library.rest_ordering import OrderBy from models_library.rest_pagination import PageMetaInfoLimitOffset from models_library.users import UserID from servicelib.rabbitmq import RPCRouter from . import _functions_repository +from ._functions_exceptions import FunctionGroupAccessRightsNotFoundError router = RPCRouter() -@router.expose(reraise_if_error_type=(UnsupportedFunctionClassError,)) async def register_function( app: web.Application, *, @@ -65,7 +80,6 @@ async def register_function( return _decode_function(saved_function) -@router.expose(reraise_if_error_type=(UnsupportedFunctionJobClassError,)) async def register_function_job( app: web.Application, *, @@ -89,7 +103,37 @@ async def register_function_job( return _decode_functionjob(created_function_job_db) -@router.expose(reraise_if_error_type=()) +async def patch_registered_function_job( + app: web.Application, + *, + user_id: UserID, + product_name: ProductName, + function_job_uuid: FunctionJobID, + registered_function_job_patch: RegisteredFunctionJobPatch, +) -> RegisteredFunctionJob: + job = await _functions_repository.get_function_job( + app=app, + user_id=user_id, + product_name=product_name, + function_job_id=function_job_uuid, + ) + if job.function_class != registered_function_job_patch.function_class: + raise FunctionJobPatchModelIncompatibleError( + function_id=job.function_uuid, + product_name=product_name, + ) + + patched_job = _patch_functionjob(job, registered_function_job_patch) + + result = await _functions_repository.patch_function_job( + app=app, + user_id=user_id, + product_name=product_name, + registered_function_job_db=patched_job, + ) + return _decode_functionjob(result) + + async def register_function_job_collection( app: web.Application, *, @@ -116,7 +160,6 @@ async def register_function_job_collection( ) -@router.expose(reraise_if_error_type=(FunctionIDNotFoundError,)) async def get_function( app: web.Application, *, @@ -135,7 +178,6 @@ async def get_function( ) -@router.expose(reraise_if_error_type=(FunctionJobIDNotFoundError,)) async def get_function_job( app: web.Application, *, @@ -154,7 +196,6 @@ async def get_function_job( return _decode_functionjob(returned_function_job) -@router.expose(reraise_if_error_type=(FunctionJobCollectionIDNotFoundError,)) async def get_function_job_collection( app: web.Application, *, @@ -179,7 +220,6 @@ async def get_function_job_collection( ) -@router.expose() async def list_functions( app: web.Application, *, @@ -187,6 +227,10 @@ async def list_functions( product_name: ProductName, pagination_limit: int, pagination_offset: int, + order_by: OrderBy | None = None, + filter_by_function_class: FunctionClass | None = None, + search_by_function_title: str | None = None, + search_by_multi_columns: str | None = None, ) -> tuple[list[RegisteredFunction], PageMetaInfoLimitOffset]: returned_functions, page = await _functions_repository.list_functions( app=app, @@ -194,13 +238,23 @@ async def list_functions( product_name=product_name, pagination_limit=pagination_limit, pagination_offset=pagination_offset, + order_by=( + OrderBy( + field=IDStr("uuid") if order_by.field == "uid" else order_by.field, + direction=order_by.direction, + ) + if order_by + else None + ), + filter_by_function_class=filter_by_function_class, + search_by_function_title=search_by_function_title, + search_by_multi_columns=search_by_multi_columns, ) return [ _decode_function(returned_function) for returned_function in returned_functions ], page -@router.expose() async def list_function_jobs( app: web.Application, *, @@ -209,14 +263,20 @@ async def list_function_jobs( pagination_limit: int, pagination_offset: int, filter_by_function_id: FunctionID | None = None, + filter_by_function_job_ids: list[FunctionJobID] | None = None, + filter_by_function_job_collection_id: FunctionJobCollectionID | None = None, ) -> tuple[list[RegisteredFunctionJob], PageMetaInfoLimitOffset]: - returned_function_jobs, page = await _functions_repository.list_function_jobs( - app=app, - user_id=user_id, - product_name=product_name, - pagination_limit=pagination_limit, - pagination_offset=pagination_offset, - filter_by_function_id=filter_by_function_id, + returned_function_jobs, page = ( + await _functions_repository.list_function_jobs_with_status( + app=app, + user_id=user_id, + product_name=product_name, + pagination_limit=pagination_limit, + pagination_offset=pagination_offset, + filter_by_function_id=filter_by_function_id, + filter_by_function_job_ids=filter_by_function_job_ids, + filter_by_function_job_collection_id=filter_by_function_job_collection_id, + ) ) return [ _decode_functionjob(returned_function_job) @@ -224,7 +284,38 @@ async def list_function_jobs( ], page -@router.expose() +async def list_function_jobs_with_status( + app: web.Application, + *, + user_id: UserID, + product_name: ProductName, + pagination_limit: int, + pagination_offset: int, + filter_by_function_id: FunctionID | None = None, + filter_by_function_job_ids: list[FunctionJobID] | None = None, + filter_by_function_job_collection_id: FunctionJobCollectionID | None = None, +) -> tuple[ + list[RegisteredFunctionJobWithStatus], + PageMetaInfoLimitOffset, +]: + returned_function_jobs_wso, page = ( + await _functions_repository.list_function_jobs_with_status( + app=app, + user_id=user_id, + product_name=product_name, + pagination_limit=pagination_limit, + pagination_offset=pagination_offset, + filter_by_function_id=filter_by_function_id, + filter_by_function_job_ids=filter_by_function_job_ids, + filter_by_function_job_collection_id=filter_by_function_job_collection_id, + ) + ) + return [ + _decode_functionjob_wso(returned_function_job_wso) + for returned_function_job_wso in returned_function_jobs_wso + ], page + + async def list_function_job_collections( app: web.Application, *, @@ -256,23 +347,23 @@ async def list_function_job_collections( ], page -@router.expose(reraise_if_error_type=(FunctionIDNotFoundError,)) async def delete_function( app: web.Application, *, user_id: UserID, product_name: ProductName, function_id: FunctionID, + force: bool = False, ) -> None: await _functions_repository.delete_function( app=app, user_id=user_id, product_name=product_name, function_id=function_id, + force=force, ) -@router.expose(reraise_if_error_type=(FunctionJobIDNotFoundError,)) async def delete_function_job( app: web.Application, *, @@ -288,7 +379,6 @@ async def delete_function_job( ) -@router.expose(reraise_if_error_type=(FunctionJobCollectionIDNotFoundError,)) async def delete_function_job_collection( app: web.Application, *, @@ -304,45 +394,24 @@ async def delete_function_job_collection( ) -@router.expose() -async def update_function_title( +async def update_function( app: web.Application, *, user_id: UserID, product_name: ProductName, function_id: FunctionID, - title: str, + function: FunctionUpdate, ) -> RegisteredFunction: - updated_function = await _functions_repository.update_function_title( + updated_function = await _functions_repository.update_function( app=app, user_id=user_id, product_name=product_name, function_id=function_id, - title=title, + function=function, ) return _decode_function(updated_function) -@router.expose(reraise_if_error_type=(FunctionIDNotFoundError,)) -async def update_function_description( - app: web.Application, - *, - user_id: UserID, - product_name: ProductName, - function_id: FunctionID, - description: str, -) -> RegisteredFunction: - updated_function = await _functions_repository.update_function_description( - app=app, - user_id=user_id, - product_name=product_name, - function_id=function_id, - description=description, - ) - return _decode_function(updated_function) - - -@router.expose() async def find_cached_function_jobs( app: web.Application, *, @@ -375,6 +444,9 @@ async def find_cached_function_jobs( project_job_id=returned_function_job.class_specific_data[ "project_job_id" ], + job_creation_task_id=returned_function_job.class_specific_data.get( + "job_creation_task_id" + ), created_at=returned_function_job.created, ) ) @@ -387,9 +459,12 @@ async def find_cached_function_jobs( function_uid=returned_function_job.function_uuid, inputs=returned_function_job.inputs, outputs=None, - solver_job_id=returned_function_job.class_specific_data[ + solver_job_id=returned_function_job.class_specific_data.get( "solver_job_id" - ], + ), + job_creation_task_id=returned_function_job.class_specific_data.get( + "job_creation_task_id" + ), created_at=returned_function_job.created, ) ) @@ -401,7 +476,6 @@ async def find_cached_function_jobs( return to_return_function_jobs -@router.expose(reraise_if_error_type=(FunctionIDNotFoundError,)) async def get_function_input_schema( app: web.Application, *, @@ -418,7 +492,6 @@ async def get_function_input_schema( return _decode_function(returned_function).input_schema -@router.expose(reraise_if_error_type=(FunctionIDNotFoundError,)) async def get_function_output_schema( app: web.Application, *, @@ -435,6 +508,237 @@ async def get_function_output_schema( return _decode_function(returned_function).output_schema +async def get_function_user_permissions( + app: web.Application, + *, + user_id: UserID, + product_name: ProductName, + function_id: FunctionID, +) -> FunctionUserAccessRights: + user_permissions = await _functions_repository.get_user_permissions( + app=app, + user_id=user_id, + product_name=product_name, + object_id=function_id, + object_type="function", + ) + return ( + FunctionUserAccessRights( + user_id=user_id, + read=user_permissions.read, + write=user_permissions.write, + execute=user_permissions.execute, + ) + if user_permissions + else FunctionUserAccessRights( + user_id=user_id, + read=False, + write=False, + execute=False, + ) + ) + + +async def list_function_group_permissions( + app: web.Application, + *, + user_id: UserID, + product_name: ProductName, + function_id: FunctionID, +) -> list[FunctionGroupAccessRights]: + access_rights_list = await _functions_repository.get_group_permissions( + app=app, + user_id=user_id, + product_name=product_name, + object_ids=[function_id], + object_type="function", + ) + + for object_id, access_rights in access_rights_list: + if object_id == function_id: + return access_rights + + raise FunctionGroupAccessRightsNotFoundError( + function_id=function_id, + product_name=product_name, + ) + + +async def set_function_group_permissions( + app: web.Application, + *, + user_id: UserID, + product_name: ProductName, + function_id: FunctionID, + permissions: FunctionGroupAccessRights, +) -> FunctionGroupAccessRights: + access_rights_list = await _functions_repository.set_group_permissions( + app=app, + user_id=user_id, + product_name=product_name, + object_ids=[function_id], + object_type="function", + permission_group_id=permissions.group_id, + read=permissions.read, + write=permissions.write, + execute=permissions.execute, + ) + for object_id, access_rights in access_rights_list: + if object_id == function_id: + return access_rights + + raise FunctionGroupAccessRightsNotFoundError( + product_name=product_name, + function_id=function_id, + ) + + +async def remove_function_group_permissions( + app: web.Application, + *, + user_id: UserID, + product_name: ProductName, + function_id: FunctionID, + permission_group_id: GroupID, +) -> None: + await _functions_repository.remove_group_permissions( + app=app, + user_id=user_id, + product_name=product_name, + object_ids=[function_id], + object_type="function", + permission_group_id=permission_group_id, + ) + + +async def set_group_permissions( + app: web.Application, + *, + user_id: UserID, + permission_group_id: GroupID, + product_name: ProductName, + object_type: Literal["function", "function_job", "function_job_collection"], + object_ids: list[FunctionID | FunctionJobID | FunctionJobCollectionID], + read: bool | None = None, + write: bool | None = None, + execute: bool | None = None, +) -> list[ + tuple[ + FunctionID | FunctionJobID | FunctionJobCollectionID, FunctionGroupAccessRights + ] +]: + return await _functions_repository.set_group_permissions( + app=app, + user_id=user_id, + product_name=product_name, + object_type=object_type, + object_ids=object_ids, + permission_group_id=permission_group_id, + read=read, + write=write, + execute=execute, + ) + + +async def get_function_job_status( + app: web.Application, + *, + user_id: UserID, + product_name: ProductName, + function_job_id: FunctionJobID, +) -> FunctionJobStatus: + return await _functions_repository.get_function_job_status( + app=app, + user_id=user_id, + product_name=product_name, + function_job_id=function_job_id, + ) + + +async def get_function_job_outputs( + app: web.Application, + *, + user_id: UserID, + product_name: ProductName, + function_job_id: FunctionJobID, +) -> FunctionOutputs: + return await _functions_repository.get_function_job_outputs( + app=app, + user_id=user_id, + product_name=product_name, + function_job_id=function_job_id, + ) + + +async def update_function_job_outputs( + app: web.Application, + *, + user_id: UserID, + product_name: ProductName, + function_job_id: FunctionJobID, + outputs: FunctionOutputs, + check_write_permissions: bool = True, +) -> FunctionOutputs: + checked_permissions: list[Literal["read", "write", "execute"]] = ["read"] + if check_write_permissions: + checked_permissions.append("write") + await _functions_repository.check_user_permissions( + app, + user_id=user_id, + product_name=product_name, + object_type="function_job", + object_id=function_job_id, + permissions=checked_permissions, + ) + + return await _functions_repository.update_function_job_outputs( + app=app, + function_job_id=function_job_id, + outputs=outputs, + ) + + +async def update_function_job_status( + app: web.Application, + *, + user_id: UserID, + product_name: ProductName, + function_job_id: FunctionJobID, + job_status: FunctionJobStatus, + check_write_permissions: bool = True, +) -> FunctionJobStatus: + checked_permissions: list[Literal["read", "write", "execute"]] = ["read"] + + if check_write_permissions: + checked_permissions.append("write") + await _functions_repository.check_user_permissions( + app, + user_id=user_id, + product_name=product_name, + object_type="function_job", + object_id=function_job_id, + permissions=checked_permissions, + ) + return await _functions_repository.update_function_job_status( + app=app, + function_job_id=function_job_id, + job_status=job_status, + ) + + +async def get_functions_user_api_access_rights( + app: web.Application, + *, + user_id: UserID, + product_name: ProductName, +) -> FunctionUserApiAccessRights: + return await _functions_repository.get_user_api_access_rights( + app=app, + user_id=user_id, + product_name=product_name, + ) + + def _decode_function( function: RegisteredFunctionDB, ) -> RegisteredFunction: @@ -448,6 +752,7 @@ def _decode_function( project_id=function.class_specific_data["project_id"], default_inputs=function.default_inputs, created_at=function.created, + modified_at=function.modified, ) if function.function_class == FunctionClass.SOLVER: @@ -461,6 +766,7 @@ def _decode_function( solver_version=function.class_specific_data["solver_version"], default_inputs=function.default_inputs, created_at=function.created, + modified_at=function.modified, ) raise UnsupportedFunctionClassError(function_class=function.function_class) @@ -501,13 +807,31 @@ def _encode_functionjob( if functionjob.function_class == FunctionClass.PROJECT: class_specific_data = FunctionJobClassSpecificData( { - "project_job_id": str(functionjob.project_job_id), + "project_job_id": ( + str(functionjob.project_job_id) + if functionjob.project_job_id + else None + ), + "job_creation_task_id": ( + str(functionjob.job_creation_task_id) + if functionjob.job_creation_task_id + else None + ), } ) elif functionjob.function_class == FunctionClass.SOLVER: class_specific_data = FunctionJobClassSpecificData( { - "solver_job_id": str(functionjob.solver_job_id), + "solver_job_id": ( + str(functionjob.solver_job_id) + if functionjob.solver_job_id + else None + ), + "job_creation_task_id": ( + str(functionjob.job_creation_task_id) + if functionjob.job_creation_task_id + else None + ), } ) else: @@ -526,22 +850,64 @@ def _encode_functionjob( def _decode_functionjob( - functionjob_db: RegisteredFunctionJobDB, + functionjob_db: RegisteredFunctionJobWithStatusDB | RegisteredFunctionJobDB, ) -> RegisteredFunctionJob: if functionjob_db.function_class == FunctionClass.PROJECT: return RegisteredProjectFunctionJob( uid=functionjob_db.uuid, title=functionjob_db.title, - description="", + description=functionjob_db.description, function_uid=functionjob_db.function_uuid, inputs=functionjob_db.inputs, outputs=functionjob_db.outputs, project_job_id=functionjob_db.class_specific_data["project_job_id"], + job_creation_task_id=functionjob_db.class_specific_data.get( + "job_creation_task_id" + ), created_at=functionjob_db.created, ) if functionjob_db.function_class == FunctionClass.SOLVER: return RegisteredSolverFunctionJob( + uid=functionjob_db.uuid, + title=functionjob_db.title, + description=functionjob_db.description, + function_uid=functionjob_db.function_uuid, + inputs=functionjob_db.inputs, + outputs=functionjob_db.outputs, + solver_job_id=functionjob_db.class_specific_data["solver_job_id"], + job_creation_task_id=functionjob_db.class_specific_data.get( + "job_creation_task_id" + ), + created_at=functionjob_db.created, + ) + + raise UnsupportedFunctionJobClassError( + function_job_class=functionjob_db.function_class + ) + + +def _decode_functionjob_wso( + functionjob_db: RegisteredFunctionJobWithStatusDB, +) -> RegisteredFunctionJobWithStatus: + if functionjob_db.function_class == FunctionClass.PROJECT: + return RegisteredProjectFunctionJobWithStatus( + uid=functionjob_db.uuid, + title=functionjob_db.title, + description="", + function_uid=functionjob_db.function_uuid, + inputs=functionjob_db.inputs, + outputs=functionjob_db.outputs, + project_job_id=functionjob_db.class_specific_data["project_job_id"], + created_at=functionjob_db.created, + status=FunctionJobStatus(status=functionjob_db.status), + job_creation_task_id=functionjob_db.class_specific_data.get( + "job_creation_task_id" + ), + ) + + if functionjob_db.function_class == FunctionClass.SOLVER: + return RegisteredSolverFunctionJobWithStatus( uid=functionjob_db.uuid, title=functionjob_db.title, description="", @@ -550,8 +916,82 @@ def _decode_functionjob( outputs=functionjob_db.outputs, solver_job_id=functionjob_db.class_specific_data["solver_job_id"], created_at=functionjob_db.created, + status=FunctionJobStatus(status=functionjob_db.status), + job_creation_task_id=functionjob_db.class_specific_data.get( + "job_creation_task_id" + ), ) raise UnsupportedFunctionJobClassError( function_job_class=functionjob_db.function_class ) + + +def _patch_functionjob( + function_job_db: RegisteredFunctionJobDB, + patch: RegisteredFunctionJobPatch, +) -> RegisteredFunctionJobDB: + if function_job_db.function_class == FunctionClass.PROJECT: + assert patch.function_class == FunctionClass.PROJECT # nosec + return RegisteredFunctionJobDB( + function_class=FunctionClass.PROJECT, + function_uuid=function_job_db.function_uuid, + title=patch.title or function_job_db.title, + uuid=function_job_db.uuid, + description=patch.description or function_job_db.description, + inputs=patch.inputs or function_job_db.inputs, + outputs=patch.outputs or function_job_db.outputs, + created=function_job_db.created, + class_specific_data=FunctionClassSpecificData( + project_job_id=( + f"{patch.project_job_id}" + if patch.project_job_id + else function_job_db.class_specific_data.get("project_job_id") + ), + job_creation_task_id=( + f"{patch.job_creation_task_id}" + if patch.job_creation_task_id + else function_job_db.class_specific_data.get("job_creation_task_id") + ), + ), + ) + if function_job_db.function_class == FunctionClass.SOLVER: + assert patch.function_class == FunctionClass.SOLVER # nosec + return RegisteredFunctionJobDB( + function_class=FunctionClass.SOLVER, + function_uuid=function_job_db.function_uuid, + title=patch.title or function_job_db.title, + uuid=function_job_db.uuid, + description=patch.description or function_job_db.description, + inputs=patch.inputs or function_job_db.inputs, + outputs=patch.outputs or function_job_db.outputs, + created=function_job_db.created, + class_specific_data=FunctionClassSpecificData( + solver_job_id=( + f"{patch.solver_job_id}" + if patch.solver_job_id + else function_job_db.class_specific_data.get("solver_job_id") + ), + job_creation_task_id=( + f"{patch.job_creation_task_id}" + if patch.job_creation_task_id + else function_job_db.class_specific_data.get("job_creation_task_id") + ), + ), + ) + if function_job_db.function_class == FunctionClass.PYTHON_CODE: + assert patch.function_class == FunctionClass.PYTHON_CODE # nosec + return RegisteredFunctionJobDB( + function_class=FunctionClass.PYTHON_CODE, + function_uuid=function_job_db.function_uuid, + title=patch.title or function_job_db.title, + uuid=function_job_db.uuid, + description=patch.description or function_job_db.description, + inputs=patch.inputs or function_job_db.inputs, + outputs=patch.outputs or function_job_db.outputs, + created=function_job_db.created, + class_specific_data=function_job_db.class_specific_data, + ) + raise UnsupportedFunctionJobClassError( + function_job_class=function_job_db.function_class + ) diff --git a/services/web/server/src/simcore_service_webserver/functions/_services_metadata/__init__.py b/services/web/server/src/simcore_service_webserver/functions/_services_metadata/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/services/web/server/src/simcore_service_webserver/functions/_services_metadata/_errors.py b/services/web/server/src/simcore_service_webserver/functions/_services_metadata/_errors.py new file mode 100644 index 000000000000..b18455467519 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/functions/_services_metadata/_errors.py @@ -0,0 +1,5 @@ +from common_library.errors_classes import OsparcErrorMixin + + +class ServiceMetadataNotFoundError(OsparcErrorMixin, Exception): + msg_template = "Service metadata for key {key} and version {version} not found" diff --git a/services/web/server/src/simcore_service_webserver/functions/_services_metadata/_models.py b/services/web/server/src/simcore_service_webserver/functions/_services_metadata/_models.py new file mode 100644 index 000000000000..e9876b7770f2 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/functions/_services_metadata/_models.py @@ -0,0 +1,8 @@ +from models_library.services_types import ServiceKey, ServiceVersion +from pydantic import BaseModel, HttpUrl + + +class ServiceMetadata(BaseModel): + key: ServiceKey + version: ServiceVersion + thumbnail: HttpUrl | None diff --git a/services/web/server/src/simcore_service_webserver/functions/_services_metadata/_proxy.py b/services/web/server/src/simcore_service_webserver/functions/_services_metadata/_proxy.py new file mode 100644 index 000000000000..3c91989a28a4 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/functions/_services_metadata/_proxy.py @@ -0,0 +1,26 @@ +from collections.abc import Iterable + +from aiohttp import web +from models_library.services_types import ServiceKey, ServiceVersion + +from . import _repository +from ._models import ServiceMetadata + + +async def batch_get_service_metadata( + app: web.Application, + *, + keys_and_versions: Iterable[tuple[ServiceKey, ServiceVersion]], +) -> dict[tuple[ServiceKey, ServiceVersion], ServiceMetadata]: + return await _repository.batch_service_metadata( + app, keys_and_versions=keys_and_versions + ) + + +async def get_service_metadata( + app: web.Application, + *, + key: ServiceKey, + version: ServiceVersion, +) -> ServiceMetadata: + return await _repository.get_service_metadata(app, key=key, version=version) diff --git a/services/web/server/src/simcore_service_webserver/functions/_services_metadata/_repository.py b/services/web/server/src/simcore_service_webserver/functions/_services_metadata/_repository.py new file mode 100644 index 000000000000..99610f93287b --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/functions/_services_metadata/_repository.py @@ -0,0 +1,72 @@ +from collections.abc import Iterable + +from aiohttp import web +from models_library.services_types import ServiceKey, ServiceVersion +from simcore_postgres_database.models.services import ( + services_meta_data, +) +from simcore_postgres_database.utils_repos import pass_or_acquire_connection +from simcore_service_webserver.functions._services_metadata._errors import ( + ServiceMetadataNotFoundError, +) +from sqlalchemy import select, tuple_ +from sqlalchemy.ext.asyncio import AsyncConnection + +from ...db.plugin import get_asyncpg_engine +from ._models import ServiceMetadata + + +async def batch_service_metadata( + app: web.Application, + connection: AsyncConnection | None = None, + *, + keys_and_versions: Iterable[tuple[ServiceKey, ServiceVersion]], +) -> dict[tuple[ServiceKey, ServiceVersion], ServiceMetadata]: + keys_and_versions = list(keys_and_versions) + if not keys_and_versions: + return {} + + query = select( + services_meta_data.c.key, + services_meta_data.c.version, + services_meta_data.c.thumbnail, + ).where( + tuple_(services_meta_data.c.key, services_meta_data.c.version).in_( + keys_and_versions + ) + ) + + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + result = await conn.execute(query) + rows = result.fetchall() + + return { + (row.key, row.version): ServiceMetadata.model_validate( + row, from_attributes=True + ) + for row in rows + } + + +async def get_service_metadata( + app: web.Application, + connection: AsyncConnection | None = None, + *, + key: ServiceKey, + version: ServiceVersion, +) -> ServiceMetadata: + query = select( + services_meta_data.c.key, + services_meta_data.c.version, + services_meta_data.c.thumbnail, + ).where( + tuple_(services_meta_data.c.key, services_meta_data.c.version) == (key, version) + ) + + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + result = await conn.execute(query) + row = result.one_or_none() + if row is None: + raise ServiceMetadataNotFoundError(key=key, version=version) + + return ServiceMetadata.model_validate(row, from_attributes=True) diff --git a/services/web/server/src/simcore_service_webserver/functions/_services_metadata/proxy.py b/services/web/server/src/simcore_service_webserver/functions/_services_metadata/proxy.py new file mode 100644 index 000000000000..3556c7c36ffe --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/functions/_services_metadata/proxy.py @@ -0,0 +1,9 @@ +from ._models import ServiceMetadata +from ._proxy import batch_get_service_metadata, get_service_metadata + +__all__: tuple[str, ...] = ( + "ServiceMetadata", + "batch_get_service_metadata", + "get_service_metadata", +) +# nopycln: file diff --git a/services/web/server/src/simcore_service_webserver/functions/plugin.py b/services/web/server/src/simcore_service_webserver/functions/plugin.py index 7022efa5aa49..d210f283b53b 100644 --- a/services/web/server/src/simcore_service_webserver/functions/plugin.py +++ b/services/web/server/src/simcore_service_webserver/functions/plugin.py @@ -1,14 +1,14 @@ import logging from aiohttp import web -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from ..application_setup import ModuleCategory, app_setup_func from ._controller import _functions_rest, _functions_rpc _logger = logging.getLogger(__name__) -@app_module_setup( +@app_setup_func( __name__, ModuleCategory.ADDON, settings_name="WEBSERVER_FUNCTIONS", diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/_core.py b/services/web/server/src/simcore_service_webserver/garbage_collector/_core.py index df192a272d90..c63f669f0986 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector/_core.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/_core.py @@ -5,7 +5,7 @@ from aiohttp import web from servicelib.logging_utils import log_catch, log_context -from ..resource_manager.registry import RedisResourceRegistry, get_registry +from ..resource_manager.registry import get_registry from ._core_disconnected import remove_disconnected_user_resources from ._core_guests import remove_users_manually_marked_as_guests from ._core_orphans import remove_orphaned_services @@ -34,24 +34,31 @@ async def collect_garbage(app: web.Application): The field `garbage_collection_interval_seconds` defines the interval at which this function will be called. """ - registry: RedisResourceRegistry = get_registry(app) + registry = get_registry(app) - with log_catch(_logger, reraise=False), log_context( - _logger, logging.INFO, "Step 1: Removes disconnected user sessions" + with ( + log_catch(_logger, reraise=False), + log_context( + _logger, logging.INFO, "Step 1: Removes disconnected user sessions" + ), ): # Triggers signal to close possible pending opened projects # Removes disconnected GUEST users after they finished their sessions await remove_disconnected_user_resources(registry, app) - with log_catch(_logger, reraise=False), log_context( - _logger, logging.INFO, "Step 2: Removes users manually marked for removal" + with ( + log_catch(_logger, reraise=False), + log_context( + _logger, logging.INFO, "Step 2: Removes users manually marked for removal" + ), ): # if a user was manually marked as GUEST it needs to be # removed together with all the associated projects await remove_users_manually_marked_as_guests(registry, app) - with log_catch(_logger, reraise=False), log_context( - _logger, logging.INFO, "Step 3: Removes orphaned services" + with ( + log_catch(_logger, reraise=False), + log_context(_logger, logging.INFO, "Step 3: Removes orphaned services"), ): # For various reasons, some services remain pending after # the projects are closed or the user was disconencted. diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/_core_disconnected.py b/services/web/server/src/simcore_service_webserver/garbage_collector/_core_disconnected.py index fb5b149cb16d..1d0929b4cbd0 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector/_core_disconnected.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/_core_disconnected.py @@ -1,20 +1,15 @@ import logging from aiohttp import web -from redis.asyncio import Redis +from models_library.projects import ProjectID +from pydantic import TypeAdapter from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE -from servicelib.utils import logged_gather +from servicelib.logging_utils import log_catch, log_context -from ..projects._projects_service import remove_project_dynamic_services -from ..projects.exceptions import ProjectLockError, ProjectNotFoundError -from ..redis import get_redis_lock_manager_client +from ..projects import _projects_service from ..resource_manager.registry import ( RedisResourceRegistry, - ResourcesDict, - UserSessionDict, ) -from ._core_guests import remove_guest_user_with_all_its_resources -from .settings import GUEST_USER_RC_LOCK_FORMAT _logger = logging.getLogger(__name__) @@ -22,138 +17,52 @@ async def remove_disconnected_user_resources( registry: RedisResourceRegistry, app: web.Application ) -> None: - lock_manager: Redis = get_redis_lock_manager_client(app) - - # - # In redis jargon, every entry is denoted as "key" - # - A key can contain one or more fields: name-value pairs - # - A key can have a limited livespan by setting the Time-to-live (TTL) which - # is automatically decreasing - # - Every user can open multiple sessions (e.g. in different tabs and/or browser) and - # each session is hierarchically represented in the redis registry with two keys: - # - "alive" is a string that keeps a TLL of the user session - # - "resources" is a hash toto keep project and websocket ids + # NOTE: + # Each user session is represented in the redis registry with two keys: + # - "alive" is a string that keeps a TTL of the user session + # - "resources" is a redis hash to keep project and websocket ids attached to the user session + # when the alive key expires, it means the user session is disconnected + # and the resources attached to that user session shall be closed and removed # - all_session_alive, all_sessions_dead = await registry.get_all_resource_keys() - _logger.debug("potential dead keys: %s", all_sessions_dead) + _, dead_user_sessions = await registry.get_all_resource_keys() + _logger.debug("potential dead keys: %s", dead_user_sessions) # clean up all resources of expired keys - for dead_session in all_sessions_dead: - - try: - user_id = int(dead_session["user_id"]) - except (KeyError, ValueError): # noqa: PERF203 - continue - - if await lock_manager.lock( - GUEST_USER_RC_LOCK_FORMAT.format(user_id=user_id) - ).locked(): - _logger.info( - "Skipping garbage-collecting %s since it is still locked", - f"{user_id=}", - ) - continue + for dead_session in dead_user_sessions: + user_id = dead_session.user_id # (0) If key has no resources => remove from registry and continue - resources: ResourcesDict = await registry.get_resources(dead_session) + resources = await registry.get_resources(dead_session) if not resources: await registry.remove_key(dead_session) continue - # (1,2) CAREFULLY releasing every resource acquired by the expired key - _logger.info( - "%s expired. Checking resources to cleanup", - f"{dead_session=}", - ) - for resource_name, resource_value in resources.items(): - # Releasing a resource consists of two steps - # - (1) release actual resource (e.g. stop service, close project, deallocate memory, etc) - # - (2) remove resource field entry in expired key registry after (1) is completed. - - # collects a list of keys for (2) - keys_to_update = [ - dead_session, - ] - - # Every resource might be SHARED with other keys. - # In that case, the resource is released by THE LAST DYING KEY - # (we could call this the "last-standing-man" pattern! :-) ) - # - other_sessions_with_this_resource: list[UserSessionDict] = [ - k - for k in await registry.find_keys((resource_name, f"{resource_value}")) - if k != dead_session - ] - is_resource_still_in_use: bool = any( - k in all_session_alive for k in other_sessions_with_this_resource - ) - - if not is_resource_still_in_use: - # adds the remaining resource entries for (2) - keys_to_update.extend(other_sessions_with_this_resource) - - # (1) releasing acquired resources - _logger.info( - "(1) Releasing resource %s:%s acquired by expired %s", - f"{resource_name=}", - f"{resource_value=}", - f"{dead_session!r}", - ) - - if resource_name == "project_id": - # inform that the project can be closed on the backend side - # - try: - _logger.info( - "Closing services for project '%s'", resource_value - ) - await remove_project_dynamic_services( - user_id=user_id, - project_uuid=f"{resource_value}", - app=app, - simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, - user_name={ - "first_name": "garbage", - "last_name": "collector", - }, - ) - - except (ProjectNotFoundError, ProjectLockError) as err: - _logger.warning( - ( - "Could not remove project interactive services user_id=%s " - "project_uuid=%s. Check the logs above for details [%s]" - ), - user_id, - resource_value, - err, - ) - - # ONLY GUESTS: if this user was a GUEST also remove it from the database - # with the only associated project owned - await remove_guest_user_with_all_its_resources( - app=app, - user_id=user_id, - ) - - # (2) remove resource field in collected keys since (1) is completed + # (1) releasing acquired resources (currently only projects), + # that means closing project for the disconnected user _logger.info( - "(2) Removing field for released resource %s:%s from registry keys: %s", + "(1) Releasing resource %s:%s acquired by expired %s", f"{resource_name=}", f"{resource_value=}", - keys_to_update, - ) - await logged_gather( - *[ - registry.remove_resource(key, resource_name) - for key in keys_to_update - ], - reraise=False, + f"{dead_session!r}", ) - # NOTE: - # - if releasing a resource (1) fails, the resource is not removed from the registry and it allows GC to try in next round - # - if any task in (2) fails, GC will clean them up in next round as well - # - if all resource fields are removed from a key, next GC iteration will remove the key (see (0)) + if resource_name == "project_id": + project_id = TypeAdapter(ProjectID).validate_python(resource_value) + with ( + log_catch(_logger, reraise=False), + log_context( + _logger, + logging.INFO, + f"Closing project {project_id} for user {user_id=}", + ), + ): + await _projects_service.close_project_for_user( + user_id=user_id, + project_uuid=project_id, + client_session_id=dead_session.client_session_id, + app=app, + simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, + wait_for_service_closed=True, + ) diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/_core_guests.py b/services/web/server/src/simcore_service_webserver/garbage_collector/_core_guests.py index 20f3bb45e8ba..6fda91dbb6af 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector/_core_guests.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/_core_guests.py @@ -19,13 +19,7 @@ from ..projects.exceptions import ProjectDeleteError, ProjectNotFoundError from ..redis import get_redis_lock_manager_client from ..resource_manager.registry import RedisResourceRegistry -from ..users import exceptions -from ..users.api import ( - delete_user_without_projects, - get_guest_user_ids_and_names, - get_user_primary_group_id, - get_user_role, -) +from ..users import exceptions, users_service from ..users.exceptions import UserNotFoundError from ._core_utils import get_new_project_owner_gid, replace_current_owner from .settings import GUEST_USER_RC_LOCK_FORMAT @@ -48,7 +42,7 @@ async def _delete_all_projects_for_user(app: web.Application, user_id: int) -> N """ # recover user's primary_gid try: - project_owner_primary_gid = await get_user_primary_group_id( + project_owner_primary_gid = await users_service.get_user_primary_group_id( app=app, user_id=user_id ) except exceptions.UserNotFoundError: @@ -149,7 +143,7 @@ async def remove_guest_user_with_all_its_resources( """Removes a GUEST user with all its associated projects and S3/MinIO files""" try: - user_role: UserRole = await get_user_role(app, user_id=user_id) + user_role: UserRole = await users_service.get_user_role(app, user_id=user_id) if user_role > UserRole.GUEST: # NOTE: This acts as a protection barrier to avoid removing resources to more # priviledge users @@ -165,7 +159,7 @@ async def remove_guest_user_with_all_its_resources( "Deleting user %s because it is a GUEST", f"{user_id=}", ) - await delete_user_without_projects(app, user_id) + await users_service.delete_user_without_projects(app, user_id=user_id) except ( DatabaseError, @@ -198,15 +192,15 @@ async def remove_users_manually_marked_as_guests( ) = await registry.get_all_resource_keys() skip_users = { - int(user_session["user_id"]) + user_session.user_id for user_session in itertools.chain( all_user_session_alive, all_user_sessions_dead ) } # Prevent creating this list if a guest user - guest_users: list[tuple[UserID, UserNameID]] = await get_guest_user_ids_and_names( - app + guest_users: list[tuple[UserID, UserNameID]] = ( + await users_service.get_guest_user_ids_and_names(app) ) for guest_user_id, guest_user_name in guest_users: diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/_core_orphans.py b/services/web/server/src/simcore_service_webserver/garbage_collector/_core_orphans.py index 1020f1dcb1d5..31d79dced2ed 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector/_core_orphans.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/_core_orphans.py @@ -2,15 +2,14 @@ from typing import Final from aiohttp import web +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from common_library.users_enums import UserRole from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( DynamicServiceStop, ) -from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE -from servicelib.logging_errors import create_troubleshotting_log_kwargs from servicelib.logging_utils import log_catch, log_context from servicelib.utils import limited_as_completed, limited_gather @@ -21,7 +20,8 @@ ) from ..projects.api import has_user_project_access_rights from ..resource_manager.registry import RedisResourceRegistry -from ..users.api import get_user_role +from ..resource_manager.service import list_opened_project_ids +from ..users import users_service from ..users.exceptions import UserNotFoundError _logger = logging.getLogger(__name__) @@ -38,22 +38,29 @@ async def _remove_service( save_service_state = False else: try: - if await get_user_role(app, user_id=service.user_id) <= UserRole.GUEST: - save_service_state = False - else: - save_service_state = await has_user_project_access_rights( + user_role: UserRole = await users_service.get_user_role( + app, user_id=service.user_id + ) + except (UserNotFoundError, ValueError): + save_service_state = False + else: + save_service_state = ( + user_role > UserRole.GUEST + and await has_user_project_access_rights( app, project_id=service.project_id, user_id=service.user_id, permission="write", ) - except (UserNotFoundError, ValueError): - save_service_state = False + ) - with log_catch(_logger, reraise=False), log_context( - _logger, - logging.INFO, - f"removing {(service.node_uuid, service.host)} with {save_service_state=}", + with ( + log_catch(_logger, reraise=False), + log_context( + _logger, + logging.INFO, + f"removing {(service.node_uuid, service.host)} with {save_service_state=}", + ), ): await dynamic_scheduler_service.stop_dynamic_service( app, @@ -67,16 +74,6 @@ async def _remove_service( ) -async def _list_opened_project_ids(registry: RedisResourceRegistry) -> list[ProjectID]: - opened_projects: list[ProjectID] = [] - all_session_alive, _ = await registry.get_all_resource_keys() - for alive_session in all_session_alive: - resources = await registry.get_resources(alive_session) - if "project_id" in resources: - opened_projects.append(ProjectID(resources["project_id"])) - return opened_projects - - async def remove_orphaned_services( registry: RedisResourceRegistry, app: web.Application ) -> None: @@ -101,7 +98,7 @@ async def remove_orphaned_services( service.node_uuid: service for service in running_services } - known_opened_project_ids = await _list_opened_project_ids(registry) + known_opened_project_ids = await list_opened_project_ids(registry) # NOTE: Always skip orphan repmoval when `list_node_ids_in_project` raises an error. # Why? If a service is running but the nodes form the correspondign project cannot be listed, @@ -119,7 +116,7 @@ async def remove_orphaned_services( potentially_running_service_ids.append(project_nodes) except BaseException as e: # pylint:disable=broad-exception-caught _logger.warning( - create_troubleshotting_log_kwargs( + create_troubleshooting_log_kwargs( ( "Skipping orpahn services removal, call to " "`list_node_ids_in_project` raised" diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/_core_utils.py b/services/web/server/src/simcore_service_webserver/garbage_collector/_core_utils.py index 67106abddcc8..b3039394086d 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector/_core_utils.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/_core_utils.py @@ -7,7 +7,7 @@ from models_library.users import UserID from simcore_postgres_database.aiopg_errors import DatabaseError -from ..groups.api import get_group_from_gid +from ..groups.api import get_group_by_gid from ..projects._projects_repository_legacy import ( APP_PROJECT_DBAPI, ProjectAccessRights, @@ -17,7 +17,7 @@ delete_project_group_without_checking_permissions, ) from ..projects.exceptions import ProjectNotFoundError -from ..users.api import get_user, get_user_id_from_gid, get_users_in_group +from ..users import users_service from ..users.exceptions import UserNotFoundError _logger = logging.getLogger(__name__) @@ -34,15 +34,17 @@ async def _fetch_new_project_owner_from_groups( # go through user_to_groups table and fetch all uid for matching gid for group_gid in standard_groups: # remove the current owner from the bunch - target_group_users = await get_users_in_group(app=app, gid=int(group_gid)) - { - user_id - } + target_group_users = await users_service.get_users_in_group( + app=app, gid=int(group_gid) + ) - {user_id} _logger.info("Found group users '%s'", target_group_users) for possible_user_id in target_group_users: # check if the possible_user is still present in the db try: - possible_user = await get_user(app=app, user_id=possible_user_id) + possible_user = await users_service.get_user( + app=app, user_id=possible_user_id + ) return int(possible_user["primary_gid"]) except UserNotFoundError: # noqa: PERF203 _logger.warning( @@ -81,7 +83,7 @@ async def get_new_project_owner_gid( standard_groups = {} # groups of users, multiple users can be part of this primary_groups = {} # each individual user has a unique primary group for other_gid in other_users_access_rights: - group: Group | None = await get_group_from_gid(app=app, group_id=int(other_gid)) + group: Group | None = await get_group_by_gid(app=app, group_id=int(other_gid)) # only process for users and groups with write access right if group is None: @@ -130,7 +132,7 @@ async def replace_current_owner( project: dict, ) -> None: try: - new_project_owner_id = await get_user_id_from_gid( + new_project_owner_id = await users_service.get_user_id_from_gid( app=app, primary_gid=new_project_owner_gid ) diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_api_keys.py b/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_api_keys.py index b992d25b3876..a0a67a833f85 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_api_keys.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_api_keys.py @@ -3,74 +3,51 @@ """ -import asyncio import logging -from collections.abc import AsyncIterator, Callable +from collections.abc import AsyncIterator +from datetime import timedelta from aiohttp import web -from tenacity import retry -from tenacity.before_sleep import before_sleep_log -from tenacity.wait import wait_exponential +from servicelib.background_task_utils import exclusive_periodic +from servicelib.logging_utils import log_context from ..api_keys import api_keys_service +from ..redis import get_redis_lock_manager_client_sdk +from ._tasks_utils import CleanupContextFunc, periodic_task_lifespan -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) -CleanupContextFunc = Callable[[web.Application], AsyncIterator[None]] - -_PERIODIC_TASK_NAME = f"{__name__}.prune_expired_api_keys_periodically" -_APP_TASK_KEY = f"{_PERIODIC_TASK_NAME}.task" - - -@retry( - wait=wait_exponential(min=5, max=30), - before_sleep=before_sleep_log(logger, logging.WARNING), -) -async def _run_task(app: web.Application): - """Periodically check expiration dates and updates user status - - It is resilient, i.e. if update goes wrong, it waits a bit and retries - """ +async def _prune_expired_api_keys(app: web.Application): if deleted := await api_keys_service.prune_expired_api_keys(app): # broadcast force logout of user_id for api_key in deleted: - logger.info("API-key %s expired and was removed", f"{api_key=}") + _logger.info("API-key %s expired and was removed", f"{api_key=}") else: - logger.info("No API keys expired") - - -async def _run_periodically(app: web.Application, wait_period_s: float): - """Periodically check expiration dates and updates user status - - It is resilient, i.e. if update goes wrong, it waits a bit and retries - """ - while True: - await _run_task(app) - await asyncio.sleep(wait_period_s) + _logger.info("No API keys expired") def create_background_task_to_prune_api_keys( - wait_period_s: float, task_name: str = _PERIODIC_TASK_NAME + wait_period_s: float, ) -> CleanupContextFunc: - async def _cleanup_ctx_fun( - app: web.Application, - ) -> AsyncIterator[None]: - # setup - task = asyncio.create_task( - _run_periodically(app, wait_period_s), - name=task_name, - ) - app[_APP_TASK_KEY] = task - yield + async def _cleanup_ctx_fun(app: web.Application) -> AsyncIterator[None]: + interval = timedelta(seconds=wait_period_s) - # tear-down - task.cancel() - try: - await task - except asyncio.CancelledError: - assert task.cancelled() # nosec + @exclusive_periodic( + # Function-exclusiveness is required to avoid multiple tasks like thisone running concurrently + get_redis_lock_manager_client_sdk(app), + task_interval=interval, + retry_after=min(timedelta(seconds=10), interval / 10), + ) + async def _prune_expired_api_keys_periodically() -> None: + with log_context(_logger, logging.INFO, "Pruning expired API keys"): + await _prune_expired_api_keys(app) + + async for _ in periodic_task_lifespan( + app, _prune_expired_api_keys_periodically + ): + yield return _cleanup_ctx_fun diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_core.py b/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_core.py index dfb7237d97f1..f29634ec7de2 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_core.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_core.py @@ -1,103 +1,46 @@ -""" Setup and running of periodic background task +"""Setup and running of periodic background task Specifics of the gc implementation should go into garbage_collector_core.py """ -import asyncio import logging -from collections.abc import AsyncGenerator +from collections.abc import AsyncIterator +from datetime import timedelta from aiohttp import web +from servicelib.background_task_utils import exclusive_periodic from servicelib.logging_utils import log_context +from ..redis import get_redis_lock_manager_client_sdk from ._core import collect_garbage +from ._tasks_utils import CleanupContextFunc, periodic_task_lifespan from .settings import GarbageCollectorSettings, get_plugin_settings _logger = logging.getLogger(__name__) +_GC_TASK_NAME = f"{__name__}._collect_garbage_periodically" -_GC_TASK_NAME = f"background-task.{__name__}.collect_garbage_periodically" -_GC_TASK_CONFIG = f"{_GC_TASK_NAME}.config" -_GC_TASK = f"{_GC_TASK_NAME}.task" +def create_background_task_for_garbage_collection() -> CleanupContextFunc: -async def run_background_task(app: web.Application) -> AsyncGenerator: - # SETUP ------ - # create a background task to collect garbage periodically - assert not any( # nosec - t.get_name() == _GC_TASK_NAME for t in asyncio.all_tasks() - ), "Garbage collector task already running. ONLY ONE expected" # nosec + async def _cleanup_ctx_fun(app: web.Application) -> AsyncIterator[None]: + settings: GarbageCollectorSettings = get_plugin_settings(app) + interval = timedelta(seconds=settings.GARBAGE_COLLECTOR_INTERVAL_S) - gc_bg_task = asyncio.create_task( - _collect_garbage_periodically(app), name=_GC_TASK_NAME - ) - # attaches variable to the app's lifetime - app[_GC_TASK] = gc_bg_task + @exclusive_periodic( + # Function-exclusiveness is required to avoid multiple tasks like thisone running concurrently + get_redis_lock_manager_client_sdk(app), + task_interval=interval, + retry_after=min(timedelta(seconds=10), interval / 10), + ) + async def _collect_garbage_periodically() -> None: + with log_context(_logger, logging.INFO, "Garbage collect cycle"): + await collect_garbage(app) - # FIXME: added this config to overcome the state in which the - # task cancelation is ignored and the exceptions enter in a loop - # that never stops the background task. This flag is an additional - # mechanism to enforce stopping the background task - # - # Implemented with a mutable dict to avoid - # DeprecationWarning: Changing state of started or joined application is deprecated - # - app[_GC_TASK_CONFIG] = {"force_stop": False, "name": _GC_TASK_NAME} + async for _ in periodic_task_lifespan( + app, _collect_garbage_periodically, task_name=_GC_TASK_NAME + ): + yield - yield - - # TEAR-DOWN ----- - # controlled cancelation of the gc task - try: - _logger.info("Stopping garbage collector...") - - ack = gc_bg_task.cancel() - assert ack # nosec - - app[_GC_TASK_CONFIG]["force_stop"] = True - - await gc_bg_task - - except asyncio.CancelledError: - assert gc_bg_task.cancelled() # nosec - - -async def _collect_garbage_periodically(app: web.Application): - settings: GarbageCollectorSettings = get_plugin_settings(app) - interval = settings.GARBAGE_COLLECTOR_INTERVAL_S - - while True: - try: - while True: - with log_context(_logger, logging.INFO, "Garbage collect cycle"): - await collect_garbage(app) - - if app[_GC_TASK_CONFIG].get("force_stop", False): - msg = "Forced to stop garbage collection" - raise RuntimeError(msg) - - _logger.info("Garbage collect cycle pauses %ss", interval) - await asyncio.sleep(interval) - - except asyncio.CancelledError: # EXIT # noqa: PERF203 - _logger.info( - "Stopped: Garbage collection task was cancelled, it will not restart!" - ) - # do not catch Cancellation errors - raise - - except Exception: # RESILIENT restart # pylint: disable=broad-except - _logger.warning( - "Stopped: There was an error during garbage collection, restarting...", - exc_info=True, - ) - - if app[_GC_TASK_CONFIG].get("force_stop", False): - _logger.warning("Forced to stop garbage collection") - break - - # will wait 5 seconds to recover before restarting to avoid restart loops - # - it might be that db/redis is down, etc - # - await asyncio.sleep(5) + return _cleanup_ctx_fun diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_documents.py b/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_documents.py new file mode 100644 index 000000000000..4dcbc5b0f40f --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_documents.py @@ -0,0 +1,43 @@ +""" +Scheduled tasks addressing users + +""" + +import logging +from collections.abc import AsyncIterator +from datetime import timedelta + +from aiohttp import web +from servicelib.background_task_utils import exclusive_periodic +from servicelib.logging_utils import log_context + +from ..projects import projects_documents_service +from ..redis import get_redis_lock_manager_client_sdk +from ._tasks_utils import CleanupContextFunc, periodic_task_lifespan + +_logger = logging.getLogger(__name__) + + +def create_background_task_to_prune_documents(wait_s: float) -> CleanupContextFunc: + + async def _cleanup_ctx_fun(app: web.Application) -> AsyncIterator[None]: + interval = timedelta(seconds=wait_s) + + @exclusive_periodic( + # Function-exclusiveness is required to avoid multiple tasks like thisone running concurrently + get_redis_lock_manager_client_sdk(app), + task_interval=interval, + retry_after=min(timedelta(seconds=10), interval / 10), + ) + async def _prune_documents_periodically() -> None: + with log_context( + _logger, + logging.INFO, + "Deleting project documents in Redis `documents` table started", + ): + await projects_documents_service.remove_project_documents_as_admin(app) + + async for _ in periodic_task_lifespan(app, _prune_documents_periodically): + yield + + return _cleanup_ctx_fun diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_trash.py b/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_trash.py index 46df72c0a708..2307fee336ab 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_trash.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_trash.py @@ -1,64 +1,39 @@ """ - Scheduled tasks addressing users +Scheduled tasks addressing users """ -import asyncio import logging -from collections.abc import AsyncIterator, Callable +from collections.abc import AsyncIterator +from datetime import timedelta from aiohttp import web +from servicelib.background_task_utils import exclusive_periodic from servicelib.logging_utils import log_context -from tenacity import retry -from tenacity.before_sleep import before_sleep_log -from tenacity.wait import wait_exponential +from ..redis import get_redis_lock_manager_client_sdk from ..trash import trash_service +from ._tasks_utils import CleanupContextFunc, periodic_task_lifespan _logger = logging.getLogger(__name__) -CleanupContextFunc = Callable[[web.Application], AsyncIterator[None]] +def create_background_task_to_prune_trash(wait_s: float) -> CleanupContextFunc: -_PERIODIC_TASK_NAME = f"{__name__}" -_APP_TASK_KEY = f"{_PERIODIC_TASK_NAME}.task" + async def _cleanup_ctx_fun(app: web.Application) -> AsyncIterator[None]: + interval = timedelta(seconds=wait_s) - -@retry( - wait=wait_exponential(min=5, max=20), - before_sleep=before_sleep_log(_logger, logging.WARNING), -) -async def _run_task(app: web.Application): - with log_context(_logger, logging.INFO, "Deleting expired trashed items"): - await trash_service.safe_delete_expired_trash_as_admin(app) - - -async def _run_periodically(app: web.Application, wait_interval_s: float): - while True: - await _run_task(app) - await asyncio.sleep(wait_interval_s) - - -def create_background_task_to_prune_trash( - wait_s: float, task_name: str = _PERIODIC_TASK_NAME -) -> CleanupContextFunc: - async def _cleanup_ctx_fun( - app: web.Application, - ) -> AsyncIterator[None]: - # setup - task = asyncio.create_task( - _run_periodically(app, wait_s), - name=task_name, + @exclusive_periodic( + # Function-exclusiveness is required to avoid multiple tasks like thisone running concurrently + get_redis_lock_manager_client_sdk(app), + task_interval=interval, + retry_after=min(timedelta(seconds=10), interval / 10), ) - app[_APP_TASK_KEY] = task - - yield + async def _prune_trash_periodically() -> None: + with log_context(_logger, logging.INFO, "Deleting expired trashed items"): + await trash_service.safe_delete_expired_trash_as_admin(app) - # tear-down - task.cancel() - try: - await task - except asyncio.CancelledError: - assert task.cancelled() # nosec + async for _ in periodic_task_lifespan(app, _prune_trash_periodically): + yield return _cleanup_ctx_fun diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_users.py b/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_users.py index edfeb47230b3..25caef6bbd92 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_users.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_users.py @@ -3,29 +3,24 @@ """ -import asyncio import logging -from collections.abc import AsyncIterator, Callable +from collections.abc import AsyncIterator +from datetime import timedelta from aiohttp import web +from common_library.logging.logging_base import get_log_record_extra from models_library.users import UserID -from servicelib.logging_utils import get_log_record_extra, log_context -from tenacity import retry -from tenacity.before_sleep import before_sleep_log -from tenacity.wait import wait_exponential +from servicelib.background_task_utils import exclusive_periodic +from servicelib.logging_utils import log_context from ..login import login_service -from ..security.api import clean_auth_policy_cache -from ..users.api import update_expired_users +from ..redis import get_redis_lock_manager_client_sdk +from ..security import security_service +from ..users import users_service +from ._tasks_utils import CleanupContextFunc, periodic_task_lifespan _logger = logging.getLogger(__name__) -CleanupContextFunc = Callable[[web.Application], AsyncIterator[None]] - - -_PERIODIC_TASK_NAME = f"{__name__}.update_expired_users_periodically" -_APP_TASK_KEY = f"{_PERIODIC_TASK_NAME}.task" - async def notify_user_logout_all_sessions( app: web.Application, user_id: UserID @@ -49,20 +44,11 @@ async def notify_user_logout_all_sessions( ) -@retry( - wait=wait_exponential(min=5, max=20), - before_sleep=before_sleep_log(_logger, logging.WARNING), - # NOTE: this function does suppresses all exceptions and retry indefinitly -) async def _update_expired_users(app: web.Application): - """ - It is resilient, i.e. if update goes wrong, it waits a bit and retries - """ - - if updated := await update_expired_users(app): + if updated := await users_service.update_expired_users(app): # expired users might be cached in the auth. If so, any request # with this user-id will get thru producing unexpected side-effects - await clean_auth_policy_cache(app) + await security_service.clean_auth_policy_cache(app) # broadcast force logout of user_id for user_id in updated: @@ -81,36 +67,21 @@ async def _update_expired_users(app: web.Application): _logger.info("No users expired") -async def _update_expired_users_periodically( - app: web.Application, wait_interval_s: float -): - """Periodically checks expiration dates and updates user status""" +def create_background_task_for_trial_accounts(wait_s: float) -> CleanupContextFunc: + async def _cleanup_ctx_fun(app: web.Application) -> AsyncIterator[None]: + interval = timedelta(seconds=wait_s) - while True: - await _update_expired_users(app) - await asyncio.sleep(wait_interval_s) - - -def create_background_task_for_trial_accounts( - wait_s: float, task_name: str = _PERIODIC_TASK_NAME -) -> CleanupContextFunc: - async def _cleanup_ctx_fun( - app: web.Application, - ) -> AsyncIterator[None]: - # setup - task = asyncio.create_task( - _update_expired_users_periodically(app, wait_s), - name=task_name, + @exclusive_periodic( + # Function-exclusiveness is required to avoid multiple tasks like thisone running concurrently + get_redis_lock_manager_client_sdk(app), + task_interval=interval, + retry_after=min(timedelta(seconds=10), interval / 10), ) - app[_APP_TASK_KEY] = task + async def _update_expired_users_periodically() -> None: + with log_context(_logger, logging.INFO, "Updating expired users"): + await _update_expired_users(app) - yield - - # tear-down - task.cancel() - try: - await task - except asyncio.CancelledError: - assert task.cancelled() # nosec + async for _ in periodic_task_lifespan(app, _update_expired_users_periodically): + yield return _cleanup_ctx_fun diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_utils.py b/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_utils.py new file mode 100644 index 000000000000..4971389a73f5 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/_tasks_utils.py @@ -0,0 +1,57 @@ +""" +Common utilities for background task management in garbage collector +""" + +import asyncio +from collections.abc import AsyncIterator, Callable, Coroutine + +from aiohttp import web +from common_library.async_tools import cancel_wait_task + +CleanupContextFunc = Callable[[web.Application], AsyncIterator[None]] + + +def create_task_name(coro: Callable) -> str: + """ + Returns a unique name for the task based on its module and function name. + This is useful for logging and debugging purposes. + """ + return f"{coro.__module__}.{coro.__name__}" + + +async def periodic_task_lifespan( + app: web.Application, + periodic_async_func: Callable[[], Coroutine[None, None, None]], + *, + task_name: str | None = None, +) -> AsyncIterator[None]: + """ + Generic setup and teardown for periodic background tasks. + + Args: + app: The aiohttp web application + periodic_async_func: The periodic coroutine function (already decorated with @exclusive_periodic) + """ + assert getattr(periodic_async_func, "__exclusive_periodic__", False) # nosec + + # setup + task_name = task_name or create_task_name(periodic_async_func) + + task = asyncio.create_task( + periodic_async_func(), + name=task_name, + ) + + # Keeping a reference in app's state to prevent premature garbage collection of the task + app_task_key = f"gc-tasks/{task_name}" + if app_task_key in app: + msg = f"Task {task_name} is already registered in the app state" + raise ValueError(msg) + + app[app_task_key] = task + + yield + + # tear-down + await cancel_wait_task(task) + app.pop(app_task_key, None) diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/plugin.py b/services/web/server/src/simcore_service_webserver/garbage_collector/plugin.py index 3c42457ece55..a43834d14af7 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector/plugin.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/plugin.py @@ -1,21 +1,22 @@ import logging from aiohttp import web -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup from servicelib.logging_utils import set_parent_module_log_level from ..application_settings import get_application_settings +from ..application_setup import ModuleCategory, app_setup_func from ..login.plugin import setup_login_storage from ..products.plugin import setup_products from ..projects._projects_repository_legacy import setup_projects_db +from ..redis import setup_redis from ..socketio.plugin import setup_socketio -from . import _tasks_api_keys, _tasks_core, _tasks_trash, _tasks_users +from . import _tasks_api_keys, _tasks_core, _tasks_documents, _tasks_trash, _tasks_users from .settings import get_plugin_settings _logger = logging.getLogger(__name__) -@app_module_setup( +@app_setup_func( "simcore_service_webserver.garbage_collector", ModuleCategory.ADDON, settings_name="WEBSERVER_GARBAGE_COLLECTOR", @@ -25,8 +26,12 @@ def setup_garbage_collector(app: web.Application) -> None: # for trashing setup_products(app) + # distributed exclusive periodic tasks + setup_redis(app) + # - project-api needs access to db setup_projects_db(app) + # - project needs access to socketio via notify_project_state_update setup_socketio(app) # - project needs access to user-api that is connected to login plugin @@ -34,7 +39,7 @@ def setup_garbage_collector(app: web.Application) -> None: settings = get_plugin_settings(app) - app.cleanup_ctx.append(_tasks_core.run_background_task) + app.cleanup_ctx.append(_tasks_core.create_background_task_for_garbage_collection()) set_parent_module_log_level( _logger.name, min(logging.INFO, get_application_settings(app).log_level) @@ -61,3 +66,8 @@ def setup_garbage_collector(app: web.Application) -> None: app.cleanup_ctx.append( _tasks_trash.create_background_task_to_prune_trash(wait_period_s) ) + + wait_period_s = settings.GARBAGE_COLLECTOR_PRUNE_DOCUMENTS_INTERVAL_S + app.cleanup_ctx.append( + _tasks_documents.create_background_task_to_prune_documents(wait_period_s) + ) diff --git a/services/web/server/src/simcore_service_webserver/garbage_collector/settings.py b/services/web/server/src/simcore_service_webserver/garbage_collector/settings.py index 46863d458640..3682ad61d4ba 100644 --- a/services/web/server/src/simcore_service_webserver/garbage_collector/settings.py +++ b/services/web/server/src/simcore_service_webserver/garbage_collector/settings.py @@ -1,8 +1,11 @@ +from typing import Annotated + from aiohttp import web from pydantic import Field, PositiveInt -from servicelib.aiohttp.application_keys import APP_SETTINGS_KEY from settings_library.base import BaseCustomSettings +from ..constants import APP_SETTINGS_KEY + # lock names and format strings GUEST_USER_RC_LOCK_FORMAT = f"{__name__}:redlock:garbage_collect_user:{{user_id}}" @@ -13,19 +16,34 @@ class GarbageCollectorSettings(BaseCustomSettings): - GARBAGE_COLLECTOR_INTERVAL_S: PositiveInt = Field( - 30 * _SEC, - description="Waiting time between consecutive runs of the garbage-colector", + GARBAGE_COLLECTOR_INTERVAL_S: Annotated[ + PositiveInt, + Field( + description="Waiting time between consecutive runs of the garbage-colector" + ), + ] = ( + 30 * _SEC ) - GARBAGE_COLLECTOR_EXPIRED_USERS_CHECK_INTERVAL_S: PositiveInt = Field( - 1 * _HOUR, - description="Time period between checks of expiration dates for trial users", + GARBAGE_COLLECTOR_EXPIRED_USERS_CHECK_INTERVAL_S: Annotated[ + PositiveInt, + Field( + description="Time period between checks of expiration dates for trial users" + ), + ] = ( + 1 * _HOUR ) - GARBAGE_COLLECTOR_PRUNE_APIKEYS_INTERVAL_S: PositiveInt = Field( - _HOUR, - description="Wait time between periodic pruning of expired API keys", + GARBAGE_COLLECTOR_PRUNE_APIKEYS_INTERVAL_S: Annotated[ + PositiveInt, + Field(description="Wait time between periodic pruning of expired API keys"), + ] = _HOUR + + GARBAGE_COLLECTOR_PRUNE_DOCUMENTS_INTERVAL_S: Annotated[ + PositiveInt, + Field(description="Wait time between periodic pruning of documents"), + ] = ( + 30 * _MINUTE ) diff --git a/services/web/server/src/simcore_service_webserver/groups/_classifiers_service.py b/services/web/server/src/simcore_service_webserver/groups/_classifiers_service.py index 68b9e2a5fdb7..1e1155613e25 100644 --- a/services/web/server/src/simcore_service_webserver/groups/_classifiers_service.py +++ b/services/web/server/src/simcore_service_webserver/groups/_classifiers_service.py @@ -1,11 +1,11 @@ """ - - Every group has a set of "official" classifiers and its users tag studies with them - - Classifiers can be defined in two ways: - 1. a static bundle that is stored in group_classifiers.c.bundle - 2. using research resources from scicrunch.org (see group_classifiers.c.uses_scicrunch ) - - The API Classifiers model returned in - 1. is the bundle - 2. a dynamic tree built from validated RRIDs (in ResearchResourceRepository) +- Every group has a set of "official" classifiers and its users tag studies with them +- Classifiers can be defined in two ways: + 1. a static bundle that is stored in group_classifiers.c.bundle + 2. using research resources from scicrunch.org (see group_classifiers.c.uses_scicrunch ) +- The API Classifiers model returned in + 1. is the bundle + 2. a dynamic tree built from validated RRIDs (in ResearchResourceRepository) """ import logging @@ -14,6 +14,7 @@ import sqlalchemy as sa from aiohttp import web from aiopg.sa.result import RowProxy +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from pydantic import ( BaseModel, Field, @@ -25,7 +26,7 @@ ) from simcore_postgres_database.models.classifiers import group_classifiers -from ..db.plugin import get_database_engine +from ..db.plugin import get_database_engine_legacy from ..scicrunch.db import ResearchResourceRepository from ..scicrunch.service_client import SciCrunch @@ -77,7 +78,7 @@ class Classifiers(BaseModel): class GroupClassifierRepository: def __init__(self, app: web.Application): - self.engine = get_database_engine(app) + self.engine = get_database_engine_legacy(app) async def _get_bundle(self, gid: int) -> RowProxy | None: async with self.engine.acquire() as conn: @@ -97,12 +98,15 @@ async def get_classifiers_from_bundle(self, gid: int) -> dict[str, Any]: exclude_unset=True, exclude_none=True ) except ValidationError as err: - _logger.error( - "DB corrupt data in 'groups_classifiers' table. " - "Invalid classifier for gid=%d: %s. " - "Returning empty bundle.", - gid, - err, + _logger.exception( + **create_troubleshooting_log_kwargs( + f"DB corrupt data in 'groups_classifiers' table. Invalid classifier for gid={gid}. Returning empty bundle.", + error=err, + error_context={ + "gid": gid, + "bundle": bundle, + }, + ) ) return {} @@ -124,7 +128,7 @@ async def build_rrids_tree_view( ) -> dict[str, Any]: if tree_view_mode != "std": raise web.HTTPNotImplemented( - reason="Currently only 'std' option for the classifiers tree view is implemented" + text="Currently only 'std' option for the classifiers tree view is implemented" ) scicrunch = SciCrunch.get_instance(app) diff --git a/services/web/server/src/simcore_service_webserver/groups/_common/exceptions_handlers.py b/services/web/server/src/simcore_service_webserver/groups/_common/exceptions_handlers.py index f0b9242fb70f..c0237b2c45fb 100644 --- a/services/web/server/src/simcore_service_webserver/groups/_common/exceptions_handlers.py +++ b/services/web/server/src/simcore_service_webserver/groups/_common/exceptions_handlers.py @@ -1,5 +1,6 @@ import logging +from common_library.user_messages import user_message from servicelib.aiohttp import status from ...exception_handling import ( @@ -23,32 +24,40 @@ _TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = { UserNotFoundError: HttpErrorInfo( status.HTTP_404_NOT_FOUND, - "User {uid} or {email} not found", + user_message( + "The user with ID {uid} or email {email} could not be found.", _version=1 + ), ), GroupNotFoundError: HttpErrorInfo( status.HTTP_404_NOT_FOUND, - "Group {gid} not found", + user_message("The group with ID {gid} could not be found.", _version=1), ), UserInGroupNotFoundError: HttpErrorInfo( status.HTTP_404_NOT_FOUND, - "User not found in group {gid}", + user_message("The user is not a member of group {gid}.", _version=1), ), UserAlreadyInGroupError: HttpErrorInfo( status.HTTP_409_CONFLICT, - "User is already in group {gid}", + user_message("The user is already a member of group {gid}.", _version=1), ), UserInsufficientRightsError: HttpErrorInfo( status.HTTP_403_FORBIDDEN, - "Insufficient rights for {permission} access to group {gid}", + user_message( + "You do not have sufficient rights for {permission} access to group {gid}.", + _version=1, + ), ), # scicrunch InvalidRRIDError: HttpErrorInfo( status.HTTP_409_CONFLICT, - "Invalid RRID {rrid}", + user_message("The RRID {rrid} is not valid.", _version=1), ), ScicrunchError: HttpErrorInfo( status.HTTP_409_CONFLICT, - "Cannot get RRID since scicrunch.org service is not reachable.", + user_message( + "Unable to retrieve RRID information because the scicrunch.org service is currently unavailable.", + _version=1, + ), ), } diff --git a/services/web/server/src/simcore_service_webserver/groups/_groups_repository.py b/services/web/server/src/simcore_service_webserver/groups/_groups_repository.py index 83740fce3920..ede591b76589 100644 --- a/services/web/server/src/simcore_service_webserver/groups/_groups_repository.py +++ b/services/web/server/src/simcore_service_webserver/groups/_groups_repository.py @@ -6,7 +6,6 @@ from aiohttp import web from common_library.groups_enums import GroupType from common_library.users_enums import UserRole -from models_library.basic_types import IDStr from models_library.groups import ( AccessRightsDict, Group, @@ -17,7 +16,7 @@ StandardGroupCreate, StandardGroupUpdate, ) -from models_library.users import UserID +from models_library.users import UserID, UserNameID from simcore_postgres_database.aiopg_errors import UniqueViolation from simcore_postgres_database.models.users import users from simcore_postgres_database.utils_products import get_or_create_product_group @@ -108,8 +107,23 @@ async def _get_group_and_access_rights_or_raise( *, caller_id: UserID, group_id: GroupID, - permission: Literal["read", "write", "delete"] | None, + check_permission: Literal["read", "write", "delete"] | None, ) -> Row: + """Fetches a group and its access rights for a specific user (caller_id). + + Arguments: + conn -- Database connection to use for the query. + caller_id -- the user requesting the group information + group_id -- ID of the group to fetch. + check_permission -- Permission to check for the user on the group. If None, no permission check is performed. + + Raises: + GroupNotFoundError: if the group does not exist or the caller is not part of the group. + UserInsufficientRightsError: if the user lacks the specified permission on the group. + + Returns: + Row containing the group details and access rights. + """ result = await conn.execute( sa.select( *_GROUP_COLUMNS, @@ -122,8 +136,8 @@ async def _get_group_and_access_rights_or_raise( if not row: raise GroupNotFoundError(gid=group_id) - if permission: - _check_group_permissions(row, caller_id, group_id, permission) + if check_permission: + _check_group_permissions(row, caller_id, group_id, check_permission) return row @@ -133,19 +147,19 @@ async def _get_group_and_access_rights_or_raise( # -async def get_group_from_gid( +async def get_group_by_gid( app: web.Application, connection: AsyncConnection | None = None, *, group_id: GroupID, ) -> Group | None: async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: - row = await conn.execute( + result = await conn.execute( sa.select(*_GROUP_COLUMNS).where(groups.c.gid == group_id) ) - result = row.first() - if result: - return Group.model_validate(result, from_attributes=True) + row = result.one_or_none() + if row: + return Group.model_validate(row, from_attributes=True) return None @@ -274,29 +288,29 @@ async def get_user_group( """ async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: row = await _get_group_and_access_rights_or_raise( - conn, caller_id=user_id, group_id=group_id, permission="read" + conn, caller_id=user_id, group_id=group_id, check_permission="read" ) group, access_rights = _to_group_info_tuple(row) return group, access_rights -async def get_product_group_for_user( +async def get_any_group_for_user( app: web.Application, connection: AsyncConnection | None = None, *, user_id: UserID, - product_gid: GroupID, + group_gid: GroupID, ) -> tuple[Group, AccessRightsDict]: """ - Returns product's group if user belongs to it, otherwise it + Returns any group if user belongs to it (even if it has no permissions), otherwise it raises GroupNotFoundError """ async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: row = await _get_group_and_access_rights_or_raise( conn, caller_id=user_id, - group_id=product_gid, - permission=None, + group_id=group_gid, + check_permission=None, ) group, access_rights = _to_group_info_tuple(row) return group, access_rights @@ -363,7 +377,7 @@ async def update_standard_group( async with transaction_context(get_asyncpg_engine(app), connection) as conn: row = await _get_group_and_access_rights_or_raise( - conn, caller_id=user_id, group_id=group_id, permission="write" + conn, caller_id=user_id, group_id=group_id, check_permission="write" ) assert row.gid == group_id # nosec # NOTE: update does not include access-rights @@ -391,7 +405,7 @@ async def delete_standard_group( ) -> None: async with transaction_context(get_asyncpg_engine(app), connection) as conn: await _get_group_and_access_rights_or_raise( - conn, caller_id=user_id, group_id=group_id, permission="delete" + conn, caller_id=user_id, group_id=group_id, check_permission="delete" ) await conn.execute( @@ -465,11 +479,11 @@ async def _get_user_in_group_or_raise( return row -async def list_users_in_group( +async def list_users_in_group_with_caller_check( app: web.Application, connection: AsyncConnection | None = None, *, - caller_id: UserID, + caller_user_id: UserID, group_id: GroupID, ) -> list[GroupMember]: async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: @@ -487,7 +501,7 @@ async def list_users_in_group( .where( (user_to_groups.c.gid == group_id) & ( - (user_to_groups.c.uid == caller_id) + (user_to_groups.c.uid == caller_user_id) | ( (groups.c.type == GroupType.PRIMARY) & users.c.role.in_([r for r in UserRole if r > UserRole.GUEST]) @@ -504,14 +518,17 @@ async def list_users_in_group( # Drop access-rights if primary group if group_row.type == GroupType.PRIMARY: query = sa.select( - *_group_user_cols(caller_id), + *_group_user_cols(caller_user_id), ) else: _check_group_permissions( - group_row, caller_id=caller_id, group_id=group_id, permission="read" + group_row, + caller_id=caller_user_id, + group_id=group_id, + permission="read", ) query = sa.select( - *_group_user_cols(caller_id), + *_group_user_cols(caller_user_id), user_to_groups.c.access_rights, ) @@ -527,6 +544,46 @@ async def list_users_in_group( ] +async def list_users_in_group( + app: web.Application, + connection: AsyncConnection | None = None, + *, + group_id: GroupID, +) -> list[GroupMember]: + """ + Returns all users in a group without any permission checking. + This is a pure function that doesn't validate caller permissions. + """ + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + # Check if group exists + group_exists = await conn.scalar( + sa.select(groups.c.gid).where(groups.c.gid == group_id) + ) + if not group_exists: + raise GroupNotFoundError(gid=group_id) + + # Get all users in the group + query = ( + sa.select( + users.c.id, + users.c.name, + users.c.email, + users.c.first_name, + users.c.last_name, + users.c.primary_gid, + # user_to_groups.c.access_rights, # <-- currently not neccessary, might be added if needed + ) + .select_from(users.join(user_to_groups, users.c.id == user_to_groups.c.uid)) + .where(user_to_groups.c.gid == group_id) + ) + + result = await conn.stream(query) + return [ + GroupMember.model_validate(row, from_attributes=True) + async for row in result + ] + + async def get_user_in_group( app: web.Application, connection: AsyncConnection | None = None, @@ -538,7 +595,7 @@ async def get_user_in_group( async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: # first check if the group exists await _get_group_and_access_rights_or_raise( - conn, caller_id=caller_id, group_id=group_id, permission="read" + conn, caller_id=caller_id, group_id=group_id, check_permission="read" ) # get the user with its permissions @@ -568,7 +625,7 @@ async def update_user_in_group( # first check if the group exists await _get_group_and_access_rights_or_raise( - conn, caller_id=caller_id, group_id=group_id, permission="write" + conn, caller_id=caller_id, group_id=group_id, check_permission="write" ) # now check the user exists @@ -608,7 +665,7 @@ async def delete_user_from_group( async with transaction_context(get_asyncpg_engine(app), connection) as conn: # first check if the group exists await _get_group_and_access_rights_or_raise( - conn, caller_id=caller_id, group_id=group_id, permission="write" + conn, caller_id=caller_id, group_id=group_id, check_permission="write" ) # check the user exists @@ -636,6 +693,26 @@ async def delete_user_from_group( # +async def check_group_write_access( + app: web.Application, + connection: AsyncConnection | None = None, + *, + caller_id: UserID, + group_id: GroupID, +) -> None: + """ + Checks if caller has write access to the group. + + Raises: + GroupNotFoundError: if group not found or caller has no access + UserInsufficientRightsError: if caller has no write permission + """ + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + await _get_group_and_access_rights_or_raise( + conn, caller_id=caller_id, group_id=group_id, check_permission="write" + ) + + async def is_user_by_email_in_group( app: web.Application, connection: AsyncConnection | None = None, @@ -654,26 +731,38 @@ async def is_user_by_email_in_group( return user_id is not None +async def is_user_in_group( + app: web.Application, + connection: AsyncConnection | None = None, + *, + user_id: UserID, + group_id: GroupID, +) -> bool: + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + result = await conn.scalar( + sa.select(user_to_groups.c.uid).where( + (user_to_groups.c.uid == user_id) & (user_to_groups.c.gid == group_id) + ) + ) + return result is not None + + async def add_new_user_in_group( app: web.Application, connection: AsyncConnection | None = None, *, - caller_id: UserID, group_id: GroupID, # either user_id or user_name new_user_id: UserID | None = None, - new_user_name: IDStr | None = None, + new_user_name: UserNameID | None = None, access_rights: AccessRightsDict | None = None, ) -> None: """ - adds new_user (either by id or email) in group (with gid) owned by user_id + adds new_user (either by id or email) in group (with gid) + + Note: This function does not check permissions - caller must ensure permissions are checked separately """ async with transaction_context(get_asyncpg_engine(app), connection) as conn: - # first check if the group exists - await _get_group_and_access_rights_or_raise( - conn, caller_id=caller_id, group_id=group_id, permission="write" - ) - query = sa.select(users.c.id) if new_user_id is not None: query = query.where(users.c.id == new_user_id) @@ -704,7 +793,6 @@ async def add_new_user_in_group( raise UserAlreadyInGroupError( uid=new_user_id, gid=group_id, - user_id=caller_id, access_rights=access_rights, ) from exc diff --git a/services/web/server/src/simcore_service_webserver/groups/_groups_rest.py b/services/web/server/src/simcore_service_webserver/groups/_groups_rest.py index 18032f8ea376..42fbd892d3f9 100644 --- a/services/web/server/src/simcore_service_webserver/groups/_groups_rest.py +++ b/services/web/server/src/simcore_service_webserver/groups/_groups_rest.py @@ -1,5 +1,4 @@ import logging -from contextlib import suppress from aiohttp import web from models_library.api_schemas_webserver.groups import ( @@ -30,7 +29,6 @@ GroupsRequestContext, GroupsUsersPathParams, ) -from .exceptions import GroupNotFoundError _logger = logging.getLogger(__name__) @@ -49,33 +47,19 @@ async def list_groups(request: web.Request): product: Product = products_web.get_current_product(request) req_ctx = GroupsRequestContext.model_validate(request) - groups_by_type = await _groups_service.list_user_groups_with_read_access( - request.app, user_id=req_ctx.user_id + ( + groups_by_type, + my_product_group, + product_support_group, + ) = await _groups_service.get_user_profile_groups( + request.app, user_id=req_ctx.user_id, product=product ) - assert groups_by_type.primary - assert groups_by_type.everyone - - my_product_group = None - - if product.group_id: - with suppress(GroupNotFoundError): - # Product is optional - my_product_group = await _groups_service.get_product_group_for_user( - app=request.app, - user_id=req_ctx.user_id, - product_gid=product.group_id, - ) - - my_groups = MyGroupsGet( - me=GroupGet.from_domain_model(*groups_by_type.primary), - organizations=[ - GroupGet.from_domain_model(*gi) for gi in groups_by_type.standard - ], - all=GroupGet.from_domain_model(*groups_by_type.everyone), - product=GroupGet.from_domain_model(*my_product_group) - if my_product_group - else None, + assert groups_by_type.primary # nosec + assert groups_by_type.everyone # nosec + + my_groups = MyGroupsGet.from_domain_model( + groups_by_type, my_product_group, product_support_group ) return envelope_json_response(my_groups) @@ -173,7 +157,7 @@ async def get_all_group_users(request: web.Request): req_ctx = GroupsRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(GroupsPathParams, request) - users_in_group = await _groups_service.list_group_members( + users_in_group = await _groups_service.list_group_members_with_caller_check( request.app, req_ctx.user_id, path_params.gid ) diff --git a/services/web/server/src/simcore_service_webserver/groups/_groups_service.py b/services/web/server/src/simcore_service_webserver/groups/_groups_service.py index f53a7be17c67..501adea3df06 100644 --- a/services/web/server/src/simcore_service_webserver/groups/_groups_service.py +++ b/services/web/server/src/simcore_service_webserver/groups/_groups_service.py @@ -1,5 +1,6 @@ +from contextlib import suppress + from aiohttp import web -from models_library.basic_types import IDStr from models_library.emails import LowerCaseEmailStr from models_library.groups import ( AccessRightsDict, @@ -11,20 +12,21 @@ StandardGroupUpdate, ) from models_library.products import ProductName -from models_library.users import UserID +from models_library.users import UserID, UserNameID from pydantic import EmailStr -from ..users.api import get_user +from ..products.models import Product +from ..users import users_service from . import _groups_repository -from .exceptions import GroupsError +from .exceptions import GroupNotFoundError, GroupsError # # GROUPS # -async def get_group_from_gid(app: web.Application, group_id: GroupID) -> Group | None: - group_db = await _groups_repository.get_group_from_gid(app, group_id=group_id) +async def get_group_by_gid(app: web.Application, group_id: GroupID) -> Group | None: + group_db = await _groups_repository.get_group_by_gid(app, group_id=group_id) if group_db: return Group.model_construct(**group_db.model_dump()) @@ -71,11 +73,46 @@ async def get_product_group_for_user( Returns product's group if user belongs to it, otherwise it raises GroupNotFoundError """ - return await _groups_repository.get_product_group_for_user( - app, user_id=user_id, product_gid=product_gid + return await _groups_repository.get_any_group_for_user( + app, user_id=user_id, group_gid=product_gid ) +async def get_user_profile_groups( + app: web.Application, *, user_id: UserID, product: Product +) -> tuple[ + GroupsByTypeTuple, + tuple[Group, AccessRightsDict] | None, + Group | None, +]: + """ + Get all groups needed for user profile including standard groups, + product group, and support group. + + Returns: + Tuple of (groups_by_type, my_product_group, product_support_group) + """ + groups_by_type = await list_user_groups_with_read_access(app, user_id=user_id) + + my_product_group = None + if product.group_id: # Product group is optional + with suppress(GroupNotFoundError): + my_product_group = await get_product_group_for_user( + app=app, + user_id=user_id, + product_gid=product.group_id, + ) + + product_support_group = None + if product.support_standard_group_id: # Support group is optional + # NOTE: my_support_group can be part of groups_by_type.standard! + product_support_group = await get_group_by_gid( + app, product.support_standard_group_id + ) + + return groups_by_type, my_product_group, product_support_group + + # # CRUD operations on groups linked to a user # @@ -154,14 +191,20 @@ async def delete_standard_group( # -async def list_group_members( +async def list_group_members_with_caller_check( app: web.Application, user_id: UserID, group_id: GroupID ) -> list[GroupMember]: - return await _groups_repository.list_users_in_group( - app, caller_id=user_id, group_id=group_id + return await _groups_repository.list_users_in_group_with_caller_check( + app, caller_user_id=user_id, group_id=group_id ) +async def list_group_members( + app: web.Application, group_id: GroupID +) -> list[GroupMember]: + return await _groups_repository.list_users_in_group(app, group_id=group_id) + + async def get_group_member( app: web.Application, user_id: UserID, @@ -218,8 +261,16 @@ async def is_user_by_email_in_group( ) +async def is_user_in_group( + app: web.Application, *, user_id: UserID, group_id: GroupID +) -> bool: + return await _groups_repository.is_user_in_group( + app, user_id=user_id, group_id=group_id + ) + + async def auto_add_user_to_groups(app: web.Application, user_id: UserID) -> None: - user: dict = await get_user(app, user_id) + user: dict = await users_service.get_user(app, user_id) return await _groups_repository.auto_add_user_to_groups(app, user=user) @@ -244,9 +295,8 @@ async def add_user_in_group( *, # identifies new_by_user_id: UserID | None = None, - new_by_user_name: IDStr | None = None, + new_by_user_name: UserNameID | None = None, new_by_user_email: EmailStr | None = None, - # payload access_rights: AccessRightsDict | None = None, ) -> None: """Adds new_user (either by id or email) in group (with gid) owned by user_id @@ -254,20 +304,31 @@ async def add_user_in_group( Raises: UserInGroupNotFoundError GroupsException + GroupNotFoundError + UserInsufficientRightsError """ if not _only_one_true(new_by_user_id, new_by_user_name, new_by_user_email): msg = "Invalid method call, required one of these: user id, username or user email, none provided" raise GroupsError(msg=msg) + # First check if caller has write access to the group + await _groups_repository.check_group_write_access( + app, caller_id=user_id, group_id=group_id + ) + + # get target user to add to group if new_by_user_email: user = await _groups_repository.get_user_from_email( app, email=new_by_user_email, caller_id=user_id ) new_by_user_id = user.id + if new_by_user_id is not None: + new_user = await users_service.get_user(app, new_by_user_id) + new_by_user_name = new_user["name"] + return await _groups_repository.add_new_user_in_group( app, - caller_id=user_id, group_id=group_id, new_user_id=new_by_user_id, new_user_name=new_by_user_name, diff --git a/services/web/server/src/simcore_service_webserver/groups/api.py b/services/web/server/src/simcore_service_webserver/groups/api.py index a01fe9ef63f0..99d962a37eb4 100644 --- a/services/web/server/src/simcore_service_webserver/groups/api.py +++ b/services/web/server/src/simcore_service_webserver/groups/api.py @@ -5,10 +5,13 @@ add_user_in_group, auto_add_user_to_groups, auto_add_user_to_product_group, - get_group_from_gid, + get_group_by_gid, get_product_group_for_user, + get_user_profile_groups, is_user_by_email_in_group, + is_user_in_group, list_all_user_groups_ids, + list_group_members, list_user_groups_ids_with_read_access, list_user_groups_with_read_access, ) @@ -17,10 +20,13 @@ "add_user_in_group", "auto_add_user_to_groups", "auto_add_user_to_product_group", - "get_group_from_gid", + "get_group_by_gid", "get_product_group_for_user", + "get_user_profile_groups", "is_user_by_email_in_group", + "is_user_in_group", "list_all_user_groups_ids", + "list_group_members", "list_user_groups_ids_with_read_access", "list_user_groups_with_read_access", # nopycln: file diff --git a/services/web/server/src/simcore_service_webserver/groups/plugin.py b/services/web/server/src/simcore_service_webserver/groups/plugin.py index e8e56413671b..9017f96cea44 100644 --- a/services/web/server/src/simcore_service_webserver/groups/plugin.py +++ b/services/web/server/src/simcore_service_webserver/groups/plugin.py @@ -1,8 +1,8 @@ import logging from aiohttp import web -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from ..application_setup import ModuleCategory, app_setup_func from ..constants import APP_SETTINGS_KEY from ..products.plugin import setup_products from . import _classifiers_rest, _groups_rest @@ -10,7 +10,7 @@ _logger = logging.getLogger(__name__) -@app_module_setup( +@app_setup_func( __name__, ModuleCategory.ADDON, settings_name="WEBSERVER_GROUPS", diff --git a/services/web/server/src/simcore_service_webserver/invitations/_client.py b/services/web/server/src/simcore_service_webserver/invitations/_client.py index 84417a759ea5..30dfaf390978 100644 --- a/services/web/server/src/simcore_service_webserver/invitations/_client.py +++ b/services/web/server/src/simcore_service_webserver/invitations/_client.py @@ -155,9 +155,9 @@ async def generate_invitation( async def invitations_service_api_cleanup_ctx(app: web.Application): - service_api = await InvitationsServiceApi.create( - settings=app[APP_SETTINGS_KEY].WEBSERVER_INVITATIONS - ) + settings = app[APP_SETTINGS_KEY].WEBSERVER_INVITATIONS + assert settings # nosec + service_api = await InvitationsServiceApi.create(settings) app[_APP_INVITATIONS_SERVICE_API_KEY] = service_api diff --git a/services/web/server/src/simcore_service_webserver/invitations/_rest.py b/services/web/server/src/simcore_service_webserver/invitations/_rest.py index ec0b8cbb1c0b..f0f38bfc0d86 100644 --- a/services/web/server/src/simcore_service_webserver/invitations/_rest.py +++ b/services/web/server/src/simcore_service_webserver/invitations/_rest.py @@ -9,15 +9,14 @@ from models_library.rest_base import RequestParameters from models_library.users import UserID from pydantic import Field +from servicelib.aiohttp.request_keys import RQT_USERID_KEY from servicelib.aiohttp.requests_validation import parse_request_body_as -from servicelib.request_keys import RQT_USERID_KEY -from yarl import URL from .._meta import API_VTAG as VTAG from ..constants import RQ_PRODUCT_KEY from ..login.decorators import login_required from ..security.decorators import permission_required -from ..users.api import get_user_name_and_email +from ..users import users_service from ..utils_aiohttp import envelope_json_response from . import api @@ -39,7 +38,9 @@ async def generate_invitation(request: web.Request): req_ctx = _ProductsRequestContext.model_validate(request) body = await parse_request_body_as(InvitationGenerate, request) - _, user_email = await get_user_name_and_email(request.app, user_id=req_ctx.user_id) + _, user_email = await users_service.get_user_name_and_email( + request.app, user_id=req_ctx.user_id + ) # NOTE: check if invitations are activated in this product or raise generated = await api.generate_invitation( @@ -51,14 +52,12 @@ async def generate_invitation(request: web.Request): extra_credits_in_usd=body.extra_credits_in_usd, product=req_ctx.product_name, ), + request.url, ) assert request.url.host # nosec assert generated.product == req_ctx.product_name # nosec assert generated.guest == body.guest # nosec - url = URL(f"{generated.invitation_url}") - invitation_link = request.url.with_path(url.path).with_fragment(url.raw_fragment) - invitation = InvitationGenerated( product_name=generated.product, issuer=generated.issuer, @@ -66,6 +65,6 @@ async def generate_invitation(request: web.Request): trial_account_days=generated.trial_account_days, extra_credits_in_usd=generated.extra_credits_in_usd, created=generated.created, - invitation_link=f"{invitation_link}", # type: ignore[arg-type] + invitation_link=generated.invitation_url, ) return envelope_json_response(invitation.model_dump(exclude_none=True)) diff --git a/services/web/server/src/simcore_service_webserver/invitations/_service.py b/services/web/server/src/simcore_service_webserver/invitations/_service.py index 4c585d1ed0ec..2bf59df55376 100644 --- a/services/web/server/src/simcore_service_webserver/invitations/_service.py +++ b/services/web/server/src/simcore_service_webserver/invitations/_service.py @@ -8,7 +8,8 @@ ApiInvitationInputs, ) from models_library.emails import LowerCaseEmailStr -from pydantic import AnyHttpUrl, TypeAdapter, ValidationError +from pydantic import AnyHttpUrl, HttpUrl, TypeAdapter, ValidationError +from yarl import URL from ..groups.api import is_user_by_email_in_group from ..products.models import Product @@ -51,7 +52,7 @@ async def validate_invitation_url( """ if current_product.group_id is None: raise InvitationsServiceUnavailableError( - reason="Current product is not configured for invitations", + details="Current product is not configured for invitations", current_product=current_product, guest_email=guest_email, ) @@ -60,7 +61,7 @@ async def validate_invitation_url( valid_url = TypeAdapter(AnyHttpUrl).validate_python(invitation_url) except ValidationError as err: raise InvalidInvitationError( - reason=MSG_INVALID_INVITATION_URL, + details=MSG_INVALID_INVITATION_URL, current_product=current_product, guest_email=guest_email, ) from err @@ -73,7 +74,7 @@ async def validate_invitation_url( # check email if invitation.guest.lower() != guest_email.lower(): raise InvalidInvitationError( - reason="This invitation was issued for a different email", + details="This invitation was issued for a different email", current_product=current_product, guest_email=guest_email, invitation=invitation, @@ -83,7 +84,7 @@ async def validate_invitation_url( assert current_product.group_id is not None # nosec if invitation.product is not None and invitation.product != current_product.name: raise InvalidInvitationError( - reason="This invitation was issued for a different product. " + details="This invitation was issued for a different product. " f"Got '{invitation.product}', expected '{current_product.name}'", guest_email=guest_email, current_product=current_product, @@ -101,7 +102,7 @@ async def validate_invitation_url( if is_user_registered_in_product: # NOTE: a user might be already registered but the invitation is for another product raise InvalidInvitationError( - reason=MSG_INVITATION_ALREADY_USED, + details=MSG_INVITATION_ALREADY_USED, guest_email=guest_email, current_product=current_product, invitation=invitation, @@ -124,7 +125,7 @@ async def extract_invitation( try: valid_url = TypeAdapter(AnyHttpUrl).validate_python(invitation_url) except ValidationError as err: - raise InvalidInvitationError(reason=MSG_INVALID_INVITATION_URL) from err + raise InvalidInvitationError(details=MSG_INVALID_INVITATION_URL) from err # check with service invitation: ApiInvitationContent = await get_invitations_service_api( @@ -134,7 +135,9 @@ async def extract_invitation( async def generate_invitation( - app: web.Application, params: ApiInvitationInputs + app: web.Application, + params: ApiInvitationInputs, + product_origin_url: URL, ) -> ApiInvitationContentAndLink: """ Raises: @@ -145,4 +148,10 @@ async def generate_invitation( invitation: ApiInvitationContentAndLink = await get_invitations_service_api( app=app ).generate_invitation(params) + + _normalized_url = URL(f"{invitation.invitation_url}") + invitation.invitation_url = HttpUrl( + f"{product_origin_url.with_path(_normalized_url.path).with_fragment(_normalized_url.raw_fragment)}" + ) + return invitation diff --git a/services/web/server/src/simcore_service_webserver/invitations/errors.py b/services/web/server/src/simcore_service_webserver/invitations/errors.py index 881b62c6df9a..27ed49213b79 100644 --- a/services/web/server/src/simcore_service_webserver/invitations/errors.py +++ b/services/web/server/src/simcore_service_webserver/invitations/errors.py @@ -1,16 +1,20 @@ """ - API plugin errors +API plugin errors """ +from common_library.user_messages import user_message from ..errors import WebServerBaseError -MSG_INVALID_INVITATION_URL = "Link seems corrupted or incomplete" -MSG_INVITATION_ALREADY_USED = "This invitation was already used" +MSG_INVALID_INVITATION_URL = user_message( + "The invitation link appears to be corrupted or incomplete.", _version=1 +) +MSG_INVITATION_ALREADY_USED = user_message( + "This invitation has already been used and cannot be used again.", _version=1 +) -class InvitationsError(WebServerBaseError, ValueError): - ... +class InvitationsError(WebServerBaseError, ValueError): ... class InvalidInvitationError(InvitationsError): @@ -18,4 +22,4 @@ class InvalidInvitationError(InvitationsError): class InvitationsServiceUnavailableError(InvitationsError): - msg_template = "Cannot process invitations" + msg_template = "Cannot process invitations: {details}" diff --git a/services/web/server/src/simcore_service_webserver/invitations/plugin.py b/services/web/server/src/simcore_service_webserver/invitations/plugin.py index 344f652ac83b..ce9063e74232 100644 --- a/services/web/server/src/simcore_service_webserver/invitations/plugin.py +++ b/services/web/server/src/simcore_service_webserver/invitations/plugin.py @@ -1,12 +1,12 @@ """ - Plugin to interact with the invitations service +Plugin to interact with the invitations service """ import logging from aiohttp import web -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from ..application_setup import ModuleCategory, app_setup_func from ..constants import APP_SETTINGS_KEY from ..db.plugin import setup_db from ..products.plugin import setup_products @@ -16,7 +16,7 @@ _logger = logging.getLogger(__name__) -@app_module_setup( +@app_setup_func( __name__, ModuleCategory.ADDON, settings_name="WEBSERVER_INVITATIONS", diff --git a/services/web/server/src/simcore_service_webserver/licenses/_common/exceptions_handlers.py b/services/web/server/src/simcore_service_webserver/licenses/_common/exceptions_handlers.py index 6c7d1255ca55..d73585875306 100644 --- a/services/web/server/src/simcore_service_webserver/licenses/_common/exceptions_handlers.py +++ b/services/web/server/src/simcore_service_webserver/licenses/_common/exceptions_handlers.py @@ -1,7 +1,7 @@ import logging +from common_library.user_messages import user_message from servicelib.aiohttp import status -from simcore_service_webserver.wallets.errors import WalletAccessForbiddenError from ...exception_handling import ( ExceptionToHttpErrorMap, @@ -9,7 +9,7 @@ exception_handling_decorator, to_exceptions_handlers_map, ) -from ...wallets.errors import WalletNotEnoughCreditsError +from ...wallets.errors import WalletAccessForbiddenError, WalletNotEnoughCreditsError from ..errors import LicensedItemNotFoundError, LicensedItemPricingPlanMatchError _logger = logging.getLogger(__name__) @@ -18,19 +18,31 @@ _TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = { LicensedItemNotFoundError: HttpErrorInfo( status.HTTP_404_NOT_FOUND, - "Market item {licensed_item_id} not found.", + user_message( + "The requested market item '{licensed_item_id}' could not be found.", + _version=1, + ), ), WalletAccessForbiddenError: HttpErrorInfo( status.HTTP_403_FORBIDDEN, - "Credit account {wallet_id} forbidden.", + user_message( + "You do not have permission to access credit account '{wallet_id}'.", + _version=1, + ), ), WalletNotEnoughCreditsError: HttpErrorInfo( status.HTTP_402_PAYMENT_REQUIRED, - "Not enough credits in the credit account.", + user_message( + "Your credit account does not have sufficient funds to complete this purchase.", + _version=1, + ), ), LicensedItemPricingPlanMatchError: HttpErrorInfo( status.HTTP_400_BAD_REQUEST, - "The provided pricing plan does not match the one associated with the licensed item.", + user_message( + "The selected pricing plan is not valid for this licensed item. Please choose a different plan.", + _version=1, + ), ), } diff --git a/services/web/server/src/simcore_service_webserver/licenses/_common/models.py b/services/web/server/src/simcore_service_webserver/licenses/_common/models.py index 2469eb146140..099f0b81c7d8 100644 --- a/services/web/server/src/simcore_service_webserver/licenses/_common/models.py +++ b/services/web/server/src/simcore_service_webserver/licenses/_common/models.py @@ -14,7 +14,7 @@ from models_library.users import UserID from models_library.wallets import WalletID from pydantic import BaseModel, ConfigDict, Field -from servicelib.request_keys import RQT_USERID_KEY +from servicelib.aiohttp.request_keys import RQT_USERID_KEY from ...constants import RQ_PRODUCT_KEY @@ -28,23 +28,22 @@ class LicensedItemsPathParams(StrictRequestParameters): licensed_item_id: LicensedItemID -_LicensedItemsListOrderQueryParams: type[ - RequestParameters -] = create_ordering_query_model_class( - ordering_fields={ - "display_name", - "modified_at", - }, - default=OrderBy(field=IDStr("display_name"), direction=OrderDirection.DESC), - ordering_fields_api_to_column_map={"modified_at": "modified"}, +_LicensedItemsListOrderQueryParams: type[RequestParameters] = ( + create_ordering_query_model_class( + ordering_fields={ + "display_name", + "modified_at", + }, + default=OrderBy(field=IDStr("display_name"), direction=OrderDirection.DESC), + ordering_fields_api_to_column_map={"modified_at": "modified"}, + ) ) class LicensedItemsListQueryParams( PageQueryParameters, _LicensedItemsListOrderQueryParams, # type: ignore[misc, valid-type] -): - ... +): ... class LicensedItemsBodyParams(BaseModel): @@ -60,21 +59,20 @@ class LicensedItemsPurchasesPathParams(StrictRequestParameters): licensed_item_purchase_id: LicensedItemPurchaseID -_LicensedItemsPurchasesListOrderQueryParams: type[ - RequestParameters -] = create_ordering_query_model_class( - ordering_fields={ - "purchased_at", - "modified_at", - "name", - }, - default=OrderBy(field=IDStr("purchased_at"), direction=OrderDirection.DESC), - ordering_fields_api_to_column_map={"modified_at": "modified"}, +_LicensedItemsPurchasesListOrderQueryParams: type[RequestParameters] = ( + create_ordering_query_model_class( + ordering_fields={ + "purchased_at", + "modified_at", + "name", + }, + default=OrderBy(field=IDStr("purchased_at"), direction=OrderDirection.DESC), + ordering_fields_api_to_column_map={"modified_at": "modified"}, + ) ) class LicensedItemsPurchasesListQueryParams( PageQueryParameters, _LicensedItemsPurchasesListOrderQueryParams, # type: ignore[misc, valid-type] -): - ... +): ... diff --git a/services/web/server/src/simcore_service_webserver/licenses/_itis_vip_syncer_service.py b/services/web/server/src/simcore_service_webserver/licenses/_itis_vip_syncer_service.py index 6bcd772cdf57..7cca49731e01 100644 --- a/services/web/server/src/simcore_service_webserver/licenses/_itis_vip_syncer_service.py +++ b/services/web/server/src/simcore_service_webserver/licenses/_itis_vip_syncer_service.py @@ -4,17 +4,17 @@ from datetime import timedelta from aiohttp import web +from common_library.async_tools import cancel_wait_task from httpx import AsyncClient from models_library.licenses import LicensedResourceType -from servicelib.async_utils import cancel_wait_task from servicelib.background_task_utils import exclusive_periodic from servicelib.logging_utils import log_catch, log_context -from simcore_service_webserver.licenses import ( + +from ..redis import get_redis_lock_manager_client_sdk, setup_redis +from . import ( _itis_vip_service, _licensed_resources_service, ) - -from ..redis import get_redis_lock_manager_client_sdk, setup_redis from ._itis_vip_models import CategoryTuple, ItisVipData, ItisVipResourceData from ._licensed_resources_service import RegistrationState @@ -32,10 +32,10 @@ async def sync_licensed_resources( with log_context( _logger, logging.INFO, "Fetching %s and validating", category_url ), log_catch(_logger, reraise=True): - vip_data_items: list[ - ItisVipData - ] = await _itis_vip_service.get_category_items( - http_client, category_url + vip_data_items: list[ItisVipData] = ( + await _itis_vip_service.get_category_items( + http_client, category_url + ) ) # REGISTRATION diff --git a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_checkouts_service.py b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_checkouts_service.py index ed70c51bc8f2..1b4777994117 100644 --- a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_checkouts_service.py +++ b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_checkouts_service.py @@ -16,7 +16,7 @@ ) from ..rabbitmq import get_rabbitmq_rpc_client -from ..users.api import get_user +from ..users import users_service from ..wallets.api import get_wallet_by_user from . import _licensed_items_repository from ._licensed_items_checkouts_models import ( @@ -135,7 +135,7 @@ async def checkout_licensed_item_for_wallet( product_name=product_name, ) - user = await get_user(app, user_id=user_id) + user = await users_service.get_user(app, user_id=user_id) licensed_item_db = await _licensed_items_repository.get( app, licensed_item_id=licensed_item_id, product_name=product_name diff --git a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_service.py b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_service.py index 90303ff1369f..ea616808458b 100644 --- a/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_service.py +++ b/services/web/server/src/simcore_service_webserver/licenses/_licensed_items_service.py @@ -31,7 +31,7 @@ from ..rabbitmq import get_rabbitmq_rpc_client from ..resource_usage.service import get_pricing_plan, get_pricing_plan_unit -from ..users.api import get_user +from ..users import users_service from ..wallets.api import get_wallet_with_available_credits_by_user_and_wallet from ..wallets.errors import WalletNotEnoughCreditsError from . import _licensed_items_repository @@ -134,10 +134,10 @@ async def purchase_licensed_item( # Check whether wallet has enough credits if wallet.available_credits - pricing_unit.current_cost_per_unit < 0: raise WalletNotEnoughCreditsError( - reason=f"Wallet '{wallet.name}' has {wallet.available_credits} credits." + details=f"Wallet '{wallet.name}' has {wallet.available_credits} credits." ) - user = await get_user(app, user_id=user_id) + user = await users_service.get_user(app, user_id=user_id) _data = LicensedItemsPurchasesCreate( product_name=product_name, diff --git a/services/web/server/src/simcore_service_webserver/licenses/_rpc.py b/services/web/server/src/simcore_service_webserver/licenses/_rpc.py index e86129ac3f33..3555f82c3595 100644 --- a/services/web/server/src/simcore_service_webserver/licenses/_rpc.py +++ b/services/web/server/src/simcore_service_webserver/licenses/_rpc.py @@ -1,5 +1,4 @@ from aiohttp import web -from models_library.api_schemas_webserver import WEBSERVER_RPC_NAMESPACE from models_library.api_schemas_webserver.licensed_items import ( LicensedItemRpcGet, LicensedItemRpcGetPage, @@ -27,7 +26,7 @@ NotEnoughAvailableSeatsError, ) -from ..rabbitmq import get_rabbitmq_rpc_server +from ..rabbitmq import create_register_rpc_routes_on_startup from . import _licensed_items_checkouts_service, _licensed_items_service router = RPCRouter() @@ -161,6 +160,4 @@ async def release_licensed_item_for_wallet( ) -async def register_rpc_routes_on_startup(app: web.Application): - rpc_server = get_rabbitmq_rpc_server(app) - await rpc_server.register_router(router, WEBSERVER_RPC_NAMESPACE, app) +register_rpc_routes_on_startup = create_register_rpc_routes_on_startup(router) diff --git a/services/web/server/src/simcore_service_webserver/licenses/plugin.py b/services/web/server/src/simcore_service_webserver/licenses/plugin.py index 2911564007f5..2856bc80d19a 100644 --- a/services/web/server/src/simcore_service_webserver/licenses/plugin.py +++ b/services/web/server/src/simcore_service_webserver/licenses/plugin.py @@ -1,12 +1,11 @@ -""" tags management subsystem +"""tags management subsystem""" -""" import logging from aiohttp import web -from servicelib.aiohttp.application_keys import APP_SETTINGS_KEY -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from ..application_setup import ModuleCategory, app_setup_func +from ..constants import APP_SETTINGS_KEY from ..rabbitmq import setup_rabbitmq from ..rest.plugin import setup_rest from . import ( @@ -21,7 +20,7 @@ _logger = logging.getLogger(__name__) -@app_module_setup( +@app_setup_func( __name__, ModuleCategory.ADDON, settings_name="WEBSERVER_LICENSES", diff --git a/services/web/server/src/simcore_service_webserver/licenses/settings.py b/services/web/server/src/simcore_service_webserver/licenses/settings.py index 3882c88da1d1..6bdf0a1be005 100644 --- a/services/web/server/src/simcore_service_webserver/licenses/settings.py +++ b/services/web/server/src/simcore_service_webserver/licenses/settings.py @@ -3,9 +3,9 @@ from aiohttp import web from pydantic import Field -from servicelib.aiohttp.application_keys import APP_SETTINGS_KEY from settings_library.base import BaseCustomSettings +from ..constants import APP_SETTINGS_KEY from ._itis_vip_settings import ItisVipSettings, SpeagPhantomsSettings diff --git a/services/web/server/src/simcore_service_webserver/log.py b/services/web/server/src/simcore_service_webserver/log.py index f1cbbada0570..271c28bd7b95 100644 --- a/services/web/server/src/simcore_service_webserver/log.py +++ b/services/web/server/src/simcore_service_webserver/log.py @@ -1,46 +1,45 @@ -""" Configuration and utilities for service logging - -""" +"""Configuration and utilities for service logging""" import logging +from collections.abc import Awaitable, Callable +from contextlib import AsyncExitStack +from typing import Final, TypeAlias from aiodebug import log_slow_callbacks # type: ignore[import-untyped] +from aiohttp import web from aiohttp.log import access_logger -from servicelib.logging_utils import config_all_loggers -from settings_library.tracing import TracingSettings +from servicelib.logging_utils import async_loggers +from simcore_service_webserver.application_settings import ApplicationSettings -LOG_LEVEL_STEP = logging.CRITICAL - logging.ERROR -NOISY_LOGGERS = ( +_NOISY_LOGGERS: Final[tuple[str, ...]] = ( "aio_pika", "aiormq", "engineio", + "engineio.server", "inotify.adapters", - "gunicorn.access", "openapi_spec_validator", "servicelib.aiohttp.monitoring", + "socketio", + "socketio.server", "sqlalchemy.engine", "sqlalchemy", - "socketio", ) +_logger = logging.getLogger(__name__) + +CleanupEvent: TypeAlias = Callable[[web.Application], Awaitable[None]] -def setup_logging( - *, - level: str | int, - slow_duration: float | None = None, - log_format_local_dev_enabled: bool, - logger_filter_mapping: dict, - tracing_settings: TracingSettings | None -): - # service log level - logging.basicConfig(level=level) - # root - logging.root.setLevel(level) - config_all_loggers( - log_format_local_dev_enabled=log_format_local_dev_enabled, - logger_filter_mapping=logger_filter_mapping, - tracing_settings=tracing_settings, +def setup_logging(app_settings: ApplicationSettings) -> CleanupEvent: + exit_stack = AsyncExitStack() + exit_stack.enter_context( + async_loggers( + log_base_level=app_settings.log_level, + noisy_loggers=_NOISY_LOGGERS, + log_format_local_dev_enabled=app_settings.WEBSERVER_LOG_FORMAT_LOCAL_DEV_ENABLED, + logger_filter_mapping=app_settings.WEBSERVER_LOG_FILTER_MAPPING, + tracing_settings=app_settings.WEBSERVER_TRACING, + ) ) # Enforces same log-level to aiohttp & gunicorn access loggers @@ -51,17 +50,16 @@ def setup_logging( # they are not applied globally but only upon setup_logging ... # gunicorn_access_log = logging.getLogger("gunicorn.access") - access_logger.setLevel(level) - gunicorn_access_log.setLevel(level) + access_logger.setLevel(app_settings.log_level) + gunicorn_access_log.setLevel(app_settings.log_level) - # keep mostly quiet noisy loggers - quiet_level: int = max( - min(logging.root.level + LOG_LEVEL_STEP, logging.CRITICAL), logging.WARNING - ) + if app_settings.AIODEBUG_SLOW_DURATION_SECS: + # NOTE: Every task blocking > AIODEBUG_SLOW_DURATION_SECS secs is considered slow and logged as warning + log_slow_callbacks.enable(abs(app_settings.AIODEBUG_SLOW_DURATION_SECS)) - for name in NOISY_LOGGERS: - logging.getLogger(name).setLevel(quiet_level) + async def _cleanup_event(app: web.Application) -> None: + assert app # nosec + _logger.info("Cleaning up application resources") + await exit_stack.aclose() - if slow_duration: - # NOTE: Every task blocking > AIODEBUG_SLOW_DURATION_SECS secs is considered slow and logged as warning - log_slow_callbacks.enable(abs(slow_duration)) + return _cleanup_event diff --git a/services/web/server/src/simcore_service_webserver/login/__init__.py b/services/web/server/src/simcore_service_webserver/login/__init__.py index 7d14213b2ee6..e69de29bb2d1 100644 --- a/services/web/server/src/simcore_service_webserver/login/__init__.py +++ b/services/web/server/src/simcore_service_webserver/login/__init__.py @@ -1,5 +0,0 @@ -""" webserver's login subsystem - - - This sub-package is based on aiohttp-login https://github.com/imbolc/aiohttp-login -""" diff --git a/services/web/server/src/simcore_service_webserver/login/_auth_service.py b/services/web/server/src/simcore_service_webserver/login/_auth_service.py index 31b873051158..2d806495402e 100644 --- a/services/web/server/src/simcore_service_webserver/login/_auth_service.py +++ b/services/web/server/src/simcore_service_webserver/login/_auth_service.py @@ -1,25 +1,61 @@ from datetime import datetime -from typing import Any +from typing import TypedDict from aiohttp import web from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON from simcore_postgres_database.models.users import UserStatus from simcore_postgres_database.utils_repos import transaction_context from simcore_postgres_database.utils_users import UsersRepo -from simcore_service_webserver.db.plugin import get_asyncpg_engine +from ..db.plugin import get_asyncpg_engine from ..groups import api as groups_service from ..products.models import Product -from ..security import api as security_service +from ..security import security_service from . import _login_service -from ._constants import MSG_UNKNOWN_EMAIL, MSG_WRONG_PASSWORD -from ._login_repository_legacy import AsyncpgStorage, get_plugin_storage +from .constants import MSG_UNKNOWN_EMAIL +from .errors import WrongPasswordError -async def get_user_by_email(app: web.Application, *, email: str) -> dict[str, Any]: - db: AsyncpgStorage = get_plugin_storage(app) - user = await db.get_user({"email": email}) - return dict(user) if user else {} +class UserInfoDict(TypedDict): + id: int + name: str + email: str + role: str + status: str + first_name: str | None + last_name: str | None + phone: str | None + + +async def get_user_or_none( + app: web.Application, *, email: str | None = None, user_id: int | None = None +) -> UserInfoDict | None: + if email is None and user_id is None: + msg = "Either email or user_id must be provided" + raise ValueError(msg) + + asyncpg_engine = get_asyncpg_engine(app) + repo = UsersRepo(asyncpg_engine) + + if email is not None: + user_row = await repo.get_user_by_email_or_none(email=email.lower()) + else: + assert user_id is not None + user_row = await repo.get_user_by_id_or_none(user_id=user_id) + + if user_row is None: + return None + + return UserInfoDict( + id=user_row.id, + name=user_row.name, + email=user_row.email, + role=user_row.role.value, + status=user_row.status.value, + first_name=user_row.first_name, + last_name=user_row.last_name, + phone=user_row.phone, + ) async def create_user( @@ -29,59 +65,131 @@ async def create_user( password: str, status_upon_creation: UserStatus, expires_at: datetime | None, -) -> dict[str, Any]: +) -> UserInfoDict: - async with transaction_context(get_asyncpg_engine(app)) as conn: - user = await UsersRepo.new_user( + asyncpg_engine = get_asyncpg_engine(app) + repo = UsersRepo(asyncpg_engine) + async with transaction_context(asyncpg_engine) as conn: + user_row = await repo.new_user( conn, email=email, password_hash=security_service.encrypt_password(password), status=status_upon_creation, expires_at=expires_at, ) - await UsersRepo.link_and_update_user_from_pre_registration( - conn, new_user_id=user.id, new_user_email=user.email + await repo.link_and_update_user_from_pre_registration( + conn, + new_user_id=user_row.id, + new_user_email=user_row.email, ) - return dict(user._mapping) # pylint: disable=protected-access # noqa: SLF001 - + return UserInfoDict( + id=user_row.id, + name=user_row.name, + email=user_row.email, + role=user_row.role.value, + status=user_row.status.value, + first_name=user_row.first_name, + last_name=user_row.last_name, + phone=user_row.phone, + ) -async def check_authorized_user_credentials_or_raise( - user: dict[str, Any], - password: str, - product: Product, -) -> dict: +def check_not_null_user(user: UserInfoDict | None) -> UserInfoDict: if not user: raise web.HTTPUnauthorized( - reason=MSG_UNKNOWN_EMAIL, content_type=MIMETYPE_APPLICATION_JSON + text=MSG_UNKNOWN_EMAIL, content_type=MIMETYPE_APPLICATION_JSON ) + return user - _login_service.validate_user_status(user=user, support_email=product.support_email) - if not security_service.check_password(password, user["password_hash"]): - raise web.HTTPUnauthorized( - reason=MSG_WRONG_PASSWORD, content_type=MIMETYPE_APPLICATION_JSON - ) +async def check_authorized_user_credentials( + app: web.Application, + user: UserInfoDict | None, + *, + password: str, + product: Product, +) -> UserInfoDict: + """ + + Raises: + WrongPasswordError: when password is invalid + web.HTTPUnauthorized: 401 + + Returns: + user info dict + """ + + user = check_not_null_user(user) + + _login_service.validate_user_access( + user_status=user["status"], + user_role=user["role"], + support_email=product.support_email, + ) + + repo = UsersRepo(get_asyncpg_engine(app)) + + if not security_service.check_password( + password, password_hash=await repo.get_password_hash(user_id=user["id"]) + ): + raise WrongPasswordError(user_id=user["id"], product_name=product.name) return user -async def check_authorized_user_in_product_or_raise( +async def check_authorized_user_in_product( app: web.Application, *, - user: dict, + user_email: str, product: Product, ) -> None: - """Checks whether user is registered in this product""" - email = user.get("email", "").lower() + """Checks whether user is registered in this product + + + Raises: + web.HTTPUnauthorized: 401 + """ + product_group_id = product.group_id assert product_group_id is not None # nosec if ( product_group_id is not None and not await groups_service.is_user_by_email_in_group( - app, user_email=email, group_id=product_group_id + app, user_email=user_email, group_id=product_group_id ) ): - raise web.HTTPUnauthorized( - reason=MSG_UNKNOWN_EMAIL, content_type=MIMETYPE_APPLICATION_JSON - ) + raise web.HTTPUnauthorized(text=MSG_UNKNOWN_EMAIL) + + +async def update_user_password( + app: web.Application, + *, + user_id: int, + current_password: str, + new_password: str, + verify_current_password: bool = True, +) -> None: + """Updates user password after verifying current password + + Keyword Arguments: + verify_current_password -- whether to check current_password is valid (default: {True}) + + Raises: + WrongPasswordError: when current password is invalid + """ + + repo = UsersRepo(get_asyncpg_engine(app)) + + if verify_current_password: + # Get current password hash + current_password_hash = await repo.get_password_hash(user_id=user_id) + + # Verify current password + if not security_service.check_password(current_password, current_password_hash): + raise WrongPasswordError(user_id=user_id) + + # Encrypt new password and update + new_password_hash = security_service.encrypt_password(new_password) + await repo.update_user_password_hash( + user_id=user_id, password_hash=new_password_hash + ) diff --git a/services/web/server/src/simcore_service_webserver/login/_constants.py b/services/web/server/src/simcore_service_webserver/login/_constants.py deleted file mode 100644 index cc10d6ed3407..000000000000 --- a/services/web/server/src/simcore_service_webserver/login/_constants.py +++ /dev/null @@ -1,100 +0,0 @@ -from typing import Final - -MSG_2FA_CODE_SENT: Final[str] = "A code was sent by SMS to {phone_number}." -MSG_2FA_UNAVAILABLE: Final[str] = "Two-factor authentication is temporarily unavailable" -MSG_ACTIVATED: Final[str] = "Your account has been activated." -MSG_ACTIVATION_REQUIRED: Final[str] = ( - "Please activate your account via the email we sent before logging in." -) -MSG_AUTH_FAILED: Final[str] = ( - "Authorization was not successful. Please check your credentials and try again." -) -MSG_CANT_SEND_MAIL: Final[str] = ( - "Unable to send email at this time. Please try again later." -) -MSG_CHANGE_EMAIL_REQUESTED: Final[str] = ( - "Please click the verification link sent to your new email address." -) -MSG_EMAIL_CHANGED: Final[str] = "Your email address has been updated." -MSG_EMAIL_ALREADY_REGISTERED: Final[str] = ( - "This email address is already registered. Try logging in or use a different address." -) -MSG_EMAIL_SENT: Final[str] = "An email was sent to {email} with further instructions." -MSG_LOGGED_IN: Final[str] = "You have successfully logged in." -MSG_LOGGED_OUT: Final[str] = "You have successfully logged out." -MSG_OFTEN_RESET_PASSWORD: Final[str] = ( - "You've requested a password reset recently. Please use the link we sent you or wait before requesting again." -) -MSG_PASSWORD_CHANGE_NOT_ALLOWED: Final[str] = ( - "Unable to reset password. Permissions may have expired or been removed. " - "Please try again, or contact support if the problem continues: {support_email}" -) -MSG_PASSWORD_CHANGED: Final[str] = "Your password has been updated." -MSG_PASSWORD_MISMATCH: Final[str] = ( - "Password and confirmation do not match. Please try again." -) -MSG_PHONE_MISSING: Final[str] = "No phone number is associated with this account." -MSG_UNAUTHORIZED_CODE_RESEND_2FA: Final[str] = ( - "You can no longer resend the code. Please restart the verification process." -) -MSG_UNAUTHORIZED_LOGIN_2FA: Final[str] = ( - "You can no longer submit a code. Please restart the login process." -) -MSG_UNAUTHORIZED_REGISTER_PHONE: Final[str] = ( - "Phone registration is no longer allowed. Please restart the registration process." -) -MSG_UNAUTHORIZED_PHONE_CONFIRMATION: Final[str] = ( - "You can no longer submit a code. Please restart the confirmation process." -) -MSG_UNKNOWN_EMAIL: Final[str] = "This email address is not registered." -MSG_USER_DELETED: Final[str] = ( - "This account is scheduled for deletion. To reactivate it or for more information, please contact support: {support_email}" -) -MSG_USER_BANNED: Final[str] = ( - "Access to this account is no longer available. Please contact support for more information: {support_email}" -) -MSG_USER_EXPIRED: Final[str] = ( - "This account has expired and access is no longer available. Please contact support for assistance: {support_email}" -) -MSG_USER_DISABLED: Final[str] = ( - "This account has been disabled and cannot be registered again. Please contact support for details: {support_email}" -) -MSG_WRONG_2FA_CODE__INVALID: Final[str] = ( - "The code entered is not valid. Please enter a valid code or generate a new one." -) -MSG_WRONG_2FA_CODE__EXPIRED: Final[str] = ( - "The code has expired. Please generate a new code." -) -MSG_WRONG_CAPTCHA__INVALID: Final[str] = ( - "The CAPTCHA entered is incorrect. Please try again." -) -MSG_WRONG_PASSWORD: Final[str] = "The password is incorrect. Please try again." -MSG_WEAK_PASSWORD: Final[str] = ( - "Password must be at least {LOGIN_PASSWORD_MIN_LENGTH} characters long." -) -MSG_INVITATIONS_CONTACT_SUFFIX: Final[str] = ( - "Please contact our support team to request a new invitation." -) - -# Login Accepted Response Codes: -# - These string codes are used to identify next step in the login (e.g. login_2fa or register_phone?) -# - The frontend uses them also to determine what page/form has to display to the user for next step -CODE_PHONE_NUMBER_REQUIRED: Final[str] = "PHONE_NUMBER_REQUIRED" -CODE_2FA_SMS_CODE_REQUIRED: Final[str] = "SMS_CODE_REQUIRED" -CODE_2FA_EMAIL_CODE_REQUIRED: Final[str] = "EMAIL_CODE_REQUIRED" - - -# App keys for login plugin -# Naming convention: APP_LOGIN_...KEY -APP_LOGIN_SETTINGS_PER_PRODUCT_KEY: Final[str] = ( - f"{__name__}.LOGIN_SETTINGS_PER_PRODUCT" -) - - -# maximum amount the user can resend the code via email or phone -MAX_2FA_CODE_RESEND: Final[int] = 5 - -# maximum number of trials to validate the passcode -MAX_2FA_CODE_TRIALS: Final[int] = 5 - -CAPTCHA_SESSION_KEY: Final[str] = "captcha" diff --git a/services/web/server/src/simcore_service_webserver/login/_controller/rest/_rest_exceptions.py b/services/web/server/src/simcore_service_webserver/login/_controller/rest/_rest_exceptions.py new file mode 100644 index 000000000000..7e6e58887656 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/login/_controller/rest/_rest_exceptions.py @@ -0,0 +1,61 @@ +from aiohttp import web +from common_library.user_messages import user_message +from servicelib.aiohttp import status +from servicelib.aiohttp.rest_middlewares import handle_aiohttp_web_http_error + +from ....exception_handling import ( + ExceptionToHttpErrorMap, + HttpErrorInfo, + exception_handling_decorator, + to_exceptions_handlers_map, +) +from ....users.exceptions import AlreadyPreRegisteredError +from ...constants import MSG_2FA_UNAVAILABLE, MSG_WRONG_PASSWORD +from ...errors import ( + SendingVerificationEmailError, + SendingVerificationSmsError, + WrongPasswordError, +) + +_TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = { + AlreadyPreRegisteredError: HttpErrorInfo( + status.HTTP_409_CONFLICT, + user_message( + "An account for the email {email} has been submitted. If you haven't received any updates, please contact support.", + _version=1, + ), + ), + SendingVerificationSmsError: HttpErrorInfo( + status.HTTP_503_SERVICE_UNAVAILABLE, + MSG_2FA_UNAVAILABLE, + ), + SendingVerificationEmailError: HttpErrorInfo( + status.HTTP_503_SERVICE_UNAVAILABLE, + MSG_2FA_UNAVAILABLE, + ), +} + + +async def _handle_legacy_error_response(request: web.Request, exception: Exception): + """ + This handlers keeps compatibility with error responses that include deprecated + `ErrorGet.errors` field + + SEE packages/models-library/src/models_library/rest_error.py + """ + assert isinstance( # nosec + exception, WrongPasswordError + ), f"Expected WrongPasswordError, got {type(exception)}" + + return handle_aiohttp_web_http_error( + request=request, + exception=web.HTTPUnauthorized(text=MSG_WRONG_PASSWORD), + ) + + +handle_rest_requests_exceptions = exception_handling_decorator( + { + **to_exceptions_handlers_map(_TO_HTTP_ERROR_MAP), + WrongPasswordError: _handle_legacy_error_response, + }, +) diff --git a/services/web/server/src/simcore_service_webserver/login/_controller/rest/auth.py b/services/web/server/src/simcore_service_webserver/login/_controller/rest/auth.py index 2a737579bedc..d685e2f29a7b 100644 --- a/services/web/server/src/simcore_service_webserver/login/_controller/rest/auth.py +++ b/services/web/server/src/simcore_service_webserver/login/_controller/rest/auth.py @@ -2,28 +2,28 @@ from aiohttp import web from aiohttp.web import RouteTableDef +from common_library.logging.logging_base import get_log_record_extra +from common_library.user_messages import user_message from models_library.authentification import TwoFactorAuthentificationMethod -from models_library.emails import LowerCaseEmailStr -from pydantic import BaseModel, Field, PositiveInt, SecretStr, TypeAdapter +from pydantic import TypeAdapter from servicelib.aiohttp import status +from servicelib.aiohttp.request_keys import RQT_USERID_KEY from servicelib.aiohttp.requests_validation import parse_request_body_as -from servicelib.logging_utils import get_log_record_extra, log_context -from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON -from servicelib.request_keys import RQT_USERID_KEY +from servicelib.logging_utils import log_context from simcore_postgres_database.models.users import UserRole from ...._meta import API_VTAG from ....products import products_web from ....products.models import Product -from ....security import api as security_service +from ....security import security_web from ....session.access_policies import ( on_success_grant_session_access_to, session_access_required, ) -from ....users import preferences_api as user_preferences_api -from ....utils_aiohttp import NextPage +from ....user_preferences import user_preferences_service +from ....web_utils import envelope_response, flash_response from ... import _auth_service, _login_service, _security_service, _twofa_service -from ..._constants import ( +from ...constants import ( CODE_2FA_EMAIL_CODE_REQUIRED, CODE_2FA_SMS_CODE_REQUIRED, CODE_PHONE_NUMBER_REQUIRED, @@ -37,10 +37,10 @@ MSG_WRONG_2FA_CODE__EXPIRED, MSG_WRONG_2FA_CODE__INVALID, ) -from ..._models import InputSchema from ...decorators import login_required -from ...errors import handle_login_exceptions from ...settings import LoginSettingsForProduct, get_plugin_settings +from ._rest_exceptions import handle_rest_requests_exceptions +from .auth_schemas import LoginBody, LoginTwoFactorAuthBody, LogoutBody log = logging.getLogger(__name__) @@ -48,20 +48,6 @@ routes = RouteTableDef() -class LoginBody(InputSchema): - email: LowerCaseEmailStr - password: SecretStr - - -class CodePageParams(BaseModel): - message: str - expiration_2fa: PositiveInt | None = None - next_url: str | None = None - - -class LoginNextPage(NextPage[CodePageParams]): ... - - @routes.post(f"/{API_VTAG}/auth/login", name="auth_login") @on_success_grant_session_access_to( name="auth_register_phone", @@ -75,7 +61,7 @@ class LoginNextPage(NextPage[CodePageParams]): ... name="auth_resend_2fa_code", max_access_count=MAX_2FA_CODE_RESEND, ) -@handle_login_exceptions +@handle_rest_requests_exceptions async def login(request: web.Request): """Login: user submits an email (identification) and a password @@ -88,13 +74,18 @@ async def login(request: web.Request): login_data = await parse_request_body_as(LoginBody, request) # Authenticate user and verify access to the product - user = await _auth_service.check_authorized_user_credentials_or_raise( - user=await _auth_service.get_user_by_email(request.app, email=login_data.email), + user = await _auth_service.get_user_or_none(request.app, email=login_data.email) + + user = _auth_service.check_not_null_user(user) + + user = await _auth_service.check_authorized_user_credentials( + request.app, + user, password=login_data.password.get_secret_value(), product=product, ) - await _auth_service.check_authorized_user_in_product_or_raise( - request.app, user=user, product=product + await _auth_service.check_authorized_user_in_product( + request.app, user_email=user["email"], product=product ) # Check if user role allows skipping 2FA or if 2FA is not required @@ -103,18 +94,18 @@ async def login(request: web.Request): return await _security_service.login_granted_response(request, user=user) # 2FA login process continuation - user_2fa_preference = await user_preferences_api.get_frontend_user_preference( + user_2fa_preference = await user_preferences_service.get_frontend_user_preference( request.app, user_id=user["id"], product_name=product.name, - preference_class=user_preferences_api.TwoFAFrontendUserPreference, + preference_class=user_preferences_service.TwoFAFrontendUserPreference, ) if not user_2fa_preference: user_2fa_authentification_method = TwoFactorAuthentificationMethod.SMS preference_id = ( - user_preferences_api.TwoFAFrontendUserPreference().preference_identifier + user_preferences_service.TwoFAFrontendUserPreference().preference_identifier ) - await user_preferences_api.set_frontend_user_preference( + await user_preferences_service.set_frontend_user_preference( request.app, user_id=user["id"], product_name=product.name, @@ -134,7 +125,7 @@ async def login(request: web.Request): user_2fa_authentification_method == TwoFactorAuthentificationMethod.SMS and not user["phone"] ): - return _login_service.envelope_response( + return envelope_response( # LoginNextPage { "name": CODE_PHONE_NUMBER_REQUIRED, @@ -165,11 +156,11 @@ async def login(request: web.Request): twilio_auth=settings.LOGIN_TWILIO, twilio_messaging_sid=product.twilio_messaging_sid, twilio_alpha_numeric_sender=product.twilio_alpha_numeric_sender_id, - first_name=user["first_name"], + first_name=user["first_name"] or user["name"], user_id=user["id"], ) - return _login_service.envelope_response( + return envelope_response( # LoginNextPage { "name": CODE_2FA_SMS_CODE_REQUIRED, @@ -196,7 +187,7 @@ async def login(request: web.Request): product=product, user_id=user["id"], ) - return _login_service.envelope_response( + return envelope_response( { "name": CODE_2FA_EMAIL_CODE_REQUIRED, "parameters": { @@ -208,16 +199,12 @@ async def login(request: web.Request): ) -class LoginTwoFactorAuthBody(InputSchema): - email: LowerCaseEmailStr - code: SecretStr - - @routes.post(f"/{API_VTAG}/auth/validate-code-login", name="auth_login_2fa") @session_access_required( "auth_login_2fa", unauthorized_reason=MSG_UNAUTHORIZED_LOGIN_2FA, ) +@handle_rest_requests_exceptions async def login_2fa(request: web.Request): """Login (continuation): Submits 2FA code""" product: Product = products_web.get_current_product(request) @@ -226,8 +213,7 @@ async def login_2fa(request: web.Request): ) if not settings.LOGIN_2FA_REQUIRED: raise web.HTTPServiceUnavailable( - reason="2FA login is not available", - content_type=MIMETYPE_APPLICATION_JSON, + text=user_message("2FA login is not available"), ) # validates input params @@ -238,16 +224,13 @@ async def login_2fa(request: web.Request): request.app, login_2fa_.email ) if not _expected_2fa_code: - raise web.HTTPUnauthorized( - reason=MSG_WRONG_2FA_CODE__EXPIRED, content_type=MIMETYPE_APPLICATION_JSON - ) + raise web.HTTPUnauthorized(text=MSG_WRONG_2FA_CODE__EXPIRED) if login_2fa_.code.get_secret_value() != _expected_2fa_code: - raise web.HTTPUnauthorized( - reason=MSG_WRONG_2FA_CODE__INVALID, content_type=MIMETYPE_APPLICATION_JSON - ) + raise web.HTTPUnauthorized(text=MSG_WRONG_2FA_CODE__INVALID) - user = await _auth_service.get_user_by_email(request.app, email=login_2fa_.email) - assert user is not None # nosec + user = _auth_service.check_not_null_user( + await _auth_service.get_user_or_none(request.app, email=login_2fa_.email) + ) # NOTE: a priviledge user should not have called this entrypoint assert UserRole(user["role"]) <= UserRole.USER # nosec @@ -255,17 +238,12 @@ async def login_2fa(request: web.Request): # dispose since code was used await _twofa_service.delete_2fa_code(request.app, login_2fa_.email) - return await _security_service.login_granted_response(request, user=dict(user)) - - -class LogoutBody(InputSchema): - client_session_id: str | None = Field( - None, examples=["5ac57685-c40f-448f-8711-70be1936fd63"] - ) + return await _security_service.login_granted_response(request, user=user) @routes.post(f"/{API_VTAG}/auth/logout", name="auth_logout") @login_required +@handle_rest_requests_exceptions async def logout(request: web.Request) -> web.Response: user_id = request.get(RQT_USERID_KEY, -1) @@ -280,26 +258,10 @@ async def logout(request: web.Request) -> web.Response: f"{logout_.client_session_id=}", extra=get_log_record_extra(user_id=user_id), ): - response = _login_service.flash_response(MSG_LOGGED_OUT, "INFO") + response = flash_response(MSG_LOGGED_OUT, "INFO") await _login_service.notify_user_logout( request.app, user_id, logout_.client_session_id ) - await security_service.forget_identity(request, response) + await security_web.forget_identity(request, response) return response - - -@routes.get(f"/{API_VTAG}/auth:check", name="check_authentication") -@login_required -async def check_auth(request: web.Request) -> web.Response: - # lightweight endpoint for checking if users are authenticated - # used primarily by Traefik auth middleware to verify session cookies - - # NOTE: for future development - # if database access is added here, services like jupyter-math - # which load a lot of resources will have a big performance hit - # consider caching some properties required by this endpoint or rely on Redis - - assert request # nosec - - return web.json_response(status=status.HTTP_204_NO_CONTENT) diff --git a/services/web/server/src/simcore_service_webserver/login/_controller/rest/auth_schemas.py b/services/web/server/src/simcore_service_webserver/login/_controller/rest/auth_schemas.py new file mode 100644 index 000000000000..a06d22596559 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/login/_controller/rest/auth_schemas.py @@ -0,0 +1,33 @@ +from aiohttp.web import RouteTableDef +from models_library.emails import LowerCaseEmailStr +from pydantic import BaseModel, Field, PositiveInt, SecretStr + +from ....utils_aiohttp import NextPage +from ..._models import InputSchema + +routes = RouteTableDef() + + +class LoginBody(InputSchema): + email: LowerCaseEmailStr + password: SecretStr + + +class CodePageParams(BaseModel): + message: str + expiration_2fa: PositiveInt | None = None + next_url: str | None = None + + +class LoginNextPage(NextPage[CodePageParams]): ... + + +class LoginTwoFactorAuthBody(InputSchema): + email: LowerCaseEmailStr + code: SecretStr + + +class LogoutBody(InputSchema): + client_session_id: str | None = Field( + None, examples=["5ac57685-c40f-448f-8711-70be1936fd63"] + ) diff --git a/services/web/server/src/simcore_service_webserver/login/_controller/rest/change.py b/services/web/server/src/simcore_service_webserver/login/_controller/rest/change.py index 14a61d428438..1fe1e28662bc 100644 --- a/services/web/server/src/simcore_service_webserver/login/_controller/rest/change.py +++ b/services/web/server/src/simcore_service_webserver/login/_controller/rest/change.py @@ -2,25 +2,28 @@ from aiohttp import web from aiohttp.web import RouteTableDef -from models_library.emails import LowerCaseEmailStr -from pydantic import SecretStr, field_validator +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs +from servicelib.aiohttp.request_keys import RQT_USERID_KEY from servicelib.aiohttp.requests_validation import parse_request_body_as -from servicelib.logging_errors import create_troubleshotting_log_kwargs -from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON -from servicelib.request_keys import RQT_USERID_KEY -from simcore_postgres_database.utils_repos import pass_or_acquire_connection from simcore_postgres_database.utils_users import UsersRepo -from simcore_service_webserver.db.plugin import get_asyncpg_engine from ...._meta import API_VTAG +from ....db.plugin import get_asyncpg_engine from ....products import products_web from ....products.models import Product -from ....security.api import check_password, encrypt_password -from ....users import api as users_service +from ....users import users_service from ....utils import HOUR from ....utils_rate_limiting import global_rate_limit_route -from ... import _confirmation_service, _confirmation_web -from ..._constants import ( +from ....web_utils import flash_response +from ... import _auth_service, _confirmation_service, _confirmation_web +from ..._emails_service import get_template_path, send_email_from_template +from ..._login_repository_legacy import AsyncpgStorage, get_plugin_storage +from ..._login_service import ( + ACTIVE, + CHANGE_EMAIL, + validate_user_access, +) +from ...constants import ( MSG_CANT_SEND_MAIL, MSG_CHANGE_EMAIL_REQUESTED, MSG_EMAIL_SENT, @@ -28,17 +31,10 @@ MSG_PASSWORD_CHANGED, MSG_WRONG_PASSWORD, ) -from ..._emails_service import get_template_path, send_email_from_template -from ..._login_repository_legacy import AsyncpgStorage, get_plugin_storage -from ..._login_service import ( - ACTIVE, - CHANGE_EMAIL, - flash_response, - validate_user_status, -) -from ..._models import InputSchema, create_password_match_validator from ...decorators import login_required +from ...errors import WrongPasswordError from ...settings import LoginOptions, get_plugin_options +from .change_schemas import ChangeEmailBody, ChangePasswordBody, ResetPasswordBody _logger = logging.getLogger(__name__) @@ -46,10 +42,6 @@ routes = RouteTableDef() -class ResetPasswordBody(InputSchema): - email: LowerCaseEmailStr - - @routes.post(f"/{API_VTAG}/auth/reset-password", name="initiate_reset_password") @global_rate_limit_route( number_of_requests=10, interval_seconds=HOUR, error_msg=MSG_OFTEN_RESET_PASSWORD @@ -127,10 +119,10 @@ def _get_error_context( ok = True # CHECK user exists - user = await db.get_user({"email": request_body.email}) + user = await _auth_service.get_user_or_none(request.app, email=request_body.email) if not user: _logger.warning( - **create_troubleshotting_log_kwargs( + **create_troubleshooting_log_kwargs( f"{_error_msg_prefix} for non-existent email. {_error_msg_suffix}", error=Exception("No user found with this email"), error_context=_get_error_context(), @@ -144,15 +136,19 @@ def _get_error_context( # CHECK user state try: - validate_user_status(user=dict(user), support_email=product.support_email) + validate_user_access( + user_status=user["status"], + user_role=user["role"], + support_email=product.support_email, + ) except web.HTTPError as err: # NOTE: we abuse here (untiby reusing `validate_user_status` and catching http errors that we # do not want to forward but rather log due to the special rules in this entrypoint _logger.warning( - **create_troubleshotting_log_kwargs( - f"{_error_msg_prefix} for invalid user. {_error_msg_suffix}.", + **create_troubleshooting_log_kwargs( + f"{_error_msg_prefix} for invalid user. {err.text}. {_error_msg_suffix}", error=err, - error_context=_get_error_context(user), + error_context={**_get_error_context(user), "error.text": err.text}, ) ) ok = False @@ -167,8 +163,8 @@ def _get_error_context( request.app, user_id=user["id"], product_name=product.name ): _logger.warning( - **create_troubleshotting_log_kwargs( - f"{_error_msg_prefix} for a user with NO access to this product. {_error_msg_suffix}.", + **create_troubleshooting_log_kwargs( + f"{_error_msg_prefix} for a user with NO access to this product. {_error_msg_suffix}", error=Exception("User cannot access this product"), error_context=_get_error_context(user), ) @@ -210,7 +206,7 @@ def _get_error_context( ) except Exception as err: # pylint: disable=broad-except _logger.exception( - **create_troubleshotting_log_kwargs( + **create_troubleshooting_log_kwargs( "Unable to send email", error=err, error_context=_get_error_context(user), @@ -222,10 +218,6 @@ def _get_error_context( return flash_response(MSG_EMAIL_SENT.format(email=request_body.email), "INFO") -class ChangeEmailBody(InputSchema): - email: LowerCaseEmailStr - - async def initiate_change_email(request: web.Request): # NOTE: This code have been intentially disabled in https://github.com/ITISFoundation/osparc-simcore/pull/5472 db: AsyncpgStorage = get_plugin_storage(request.app) @@ -233,15 +225,17 @@ async def initiate_change_email(request: web.Request): request_body = await parse_request_body_as(ChangeEmailBody, request) - user = await db.get_user({"id": request[RQT_USERID_KEY]}) + user = await _auth_service.get_user_or_none( + request.app, user_id=request[RQT_USERID_KEY] + ) assert user # nosec if user["email"] == request_body.email: return flash_response("Email changed") - async with pass_or_acquire_connection(get_asyncpg_engine(request.app)) as conn: - if await UsersRepo.is_email_used(conn, email=request_body.email): - raise web.HTTPUnprocessableEntity(text="This email cannot be used") + repo = UsersRepo(get_asyncpg_engine(request.app)) + if await repo.is_email_used(email=request_body.email): + raise web.HTTPUnprocessableEntity(text="This email cannot be used") # Reset if previously requested confirmation = await db.get_confirmation({"user": user, "action": CHANGE_EMAIL}) @@ -273,34 +267,29 @@ async def initiate_change_email(request: web.Request): return flash_response(MSG_CHANGE_EMAIL_REQUESTED) -class ChangePasswordBody(InputSchema): - current: SecretStr - new: SecretStr - confirm: SecretStr - - _password_confirm_match = field_validator("confirm")( - create_password_match_validator(reference_field="new") - ) - - @routes.post(f"/{API_VTAG}/auth/change-password", name="auth_change_password") @login_required async def change_password(request: web.Request): - - db: AsyncpgStorage = get_plugin_storage(request.app) passwords = await parse_request_body_as(ChangePasswordBody, request) + user_id = request[RQT_USERID_KEY] + user = await _auth_service.get_user_or_none(request.app, user_id=user_id) - user = await db.get_user({"id": request[RQT_USERID_KEY]}) - assert user # nosec - - if not check_password(passwords.current.get_secret_value(), user["password_hash"]): - raise web.HTTPUnprocessableEntity( - reason=MSG_WRONG_PASSWORD, content_type=MIMETYPE_APPLICATION_JSON - ) # 422 - - await db.update_user( - dict(user), - {"password_hash": encrypt_password(passwords.new.get_secret_value())}, + try: + await _auth_service.check_authorized_user_credentials( + request.app, + user=user, + password=passwords.current.get_secret_value(), + product=products_web.get_current_product(request), + ) + except WrongPasswordError as err: + raise web.HTTPUnprocessableEntity(text=MSG_WRONG_PASSWORD) from err + + await _auth_service.update_user_password( + request.app, + user_id=user_id, + current_password=passwords.current.get_secret_value(), + new_password=passwords.new.get_secret_value(), + verify_current_password=False, ) return flash_response(MSG_PASSWORD_CHANGED) diff --git a/services/web/server/src/simcore_service_webserver/login/_controller/rest/change_schemas.py b/services/web/server/src/simcore_service_webserver/login/_controller/rest/change_schemas.py new file mode 100644 index 000000000000..0af207301faf --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/login/_controller/rest/change_schemas.py @@ -0,0 +1,22 @@ +from models_library.emails import LowerCaseEmailStr +from pydantic import SecretStr, field_validator + +from ..._models import InputSchema, create_password_match_validator + + +class ResetPasswordBody(InputSchema): + email: LowerCaseEmailStr + + +class ChangeEmailBody(InputSchema): + email: LowerCaseEmailStr + + +class ChangePasswordBody(InputSchema): + current: SecretStr + new: SecretStr + confirm: SecretStr + + _password_confirm_match = field_validator("confirm")( + create_password_match_validator(reference_field="new") + ) diff --git a/services/web/server/src/simcore_service_webserver/login/_controller/rest/confirmation.py b/services/web/server/src/simcore_service_webserver/login/_controller/rest/confirmation.py index 10a31cd29583..7003eef09a0d 100644 --- a/services/web/server/src/simcore_service_webserver/login/_controller/rest/confirmation.py +++ b/services/web/server/src/simcore_service_webserver/login/_controller/rest/confirmation.py @@ -1,45 +1,36 @@ import logging -from contextlib import suppress -from json import JSONDecodeError from aiohttp import web from aiohttp.web import RouteTableDef from common_library.error_codes import create_error_code +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from models_library.emails import LowerCaseEmailStr from models_library.products import ProductName from pydantic import ( - BaseModel, - Field, - PositiveInt, - SecretStr, TypeAdapter, - ValidationError, - field_validator, ) from servicelib.aiohttp import status from servicelib.aiohttp.requests_validation import ( parse_request_body_as, parse_request_path_parameters_as, ) -from servicelib.logging_errors import create_troubleshotting_log_kwargs from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON -from simcore_postgres_database.aiopg_errors import UniqueViolation from yarl import URL from ....products import products_web from ....products.models import Product -from ....security.api import encrypt_password from ....session.access_policies import session_access_required from ....utils import HOUR, MINUTE from ....utils_aiohttp import create_redirect_to_page_response from ....utils_rate_limiting import global_rate_limit_route -from ... import _confirmation_service, _security_service, _twofa_service -from ..._constants import ( - MSG_PASSWORD_CHANGE_NOT_ALLOWED, - MSG_PASSWORD_CHANGED, - MSG_UNAUTHORIZED_PHONE_CONFIRMATION, +from ....web_utils import flash_response +from ... import ( + _auth_service, + _confirmation_service, + _registration_service, + _security_service, + _twofa_service, ) -from ..._invitations_service import ConfirmedInvitationData from ..._login_repository_legacy import ( AsyncpgStorage, ConfirmationTokenDict, @@ -50,16 +41,25 @@ CHANGE_EMAIL, REGISTRATION, RESET_PASSWORD, - flash_response, notify_user_confirmation, ) -from ..._models import InputSchema, check_confirm_password_match +from ...constants import ( + MSG_PASSWORD_CHANGE_NOT_ALLOWED, + MSG_PASSWORD_CHANGED, + MSG_UNAUTHORIZED_PHONE_CONFIRMATION, +) from ...settings import ( LoginOptions, LoginSettingsForProduct, get_plugin_options, get_plugin_settings, ) +from .confirmation_schemas import ( + CodePathParam, + PhoneConfirmationBody, + ResetPasswordConfirmation, + parse_extra_credits_in_usd_or_none, +) _logger = logging.getLogger(__name__) @@ -67,20 +67,6 @@ routes = RouteTableDef() -class _PathParam(BaseModel): - code: SecretStr - - -def _parse_extra_credits_in_usd_or_none( - confirmation: ConfirmationTokenDict, -) -> PositiveInt | None: - with suppress(ValidationError, JSONDecodeError): - confirmation_data = confirmation.get("data", "EMPTY") or "EMPTY" - invitation = ConfirmedInvitationData.model_validate_json(confirmation_data) - return invitation.extra_credits_in_usd - return None - - async def _handle_confirm_registration( app: web.Application, product_name: ProductName, @@ -100,7 +86,7 @@ async def _handle_confirm_registration( app, user_id=user_id, product_name=product_name, - extra_credits_in_usd=_parse_extra_credits_in_usd_or_none(confirmation), + extra_credits_in_usd=parse_extra_credits_in_usd_or_none(confirmation), ) @@ -143,7 +129,7 @@ async def validate_confirmation_and_redirect(request: web.Request): cfg: LoginOptions = get_plugin_options(request.app) product: Product = products_web.get_current_product(request) - path_params = parse_request_path_parameters_as(_PathParam, request) + path_params = parse_request_path_parameters_as(CodePathParam, request) confirmation: ConfirmationTokenDict | None = ( await _confirmation_service.validate_confirmation_code( @@ -195,7 +181,7 @@ async def validate_confirmation_and_redirect(request: web.Request): ) _logger.exception( - **create_troubleshotting_log_kwargs( + **create_troubleshooting_log_kwargs( user_error_msg, error=err, error_code=error_code, @@ -213,14 +199,6 @@ async def validate_confirmation_and_redirect(request: web.Request): raise web.HTTPFound(location=redirect_to_login_url) -class PhoneConfirmationBody(InputSchema): - email: LowerCaseEmailStr - phone: str = Field( - ..., description="Phone number E.164, needed on the deployments with 2FA" - ) - code: SecretStr - - @routes.post("/v0/auth/validate-code-register", name="auth_phone_confirmation") @global_rate_limit_route(number_of_requests=5, interval_seconds=MINUTE) @session_access_required( @@ -233,11 +211,9 @@ async def phone_confirmation(request: web.Request): request.app, product_name=product.name ) - db: AsyncpgStorage = get_plugin_storage(request.app) - if not settings.LOGIN_2FA_REQUIRED: raise web.HTTPServiceUnavailable( - reason="Phone registration is not available", + text="Phone registration is not available", content_type=MIMETYPE_APPLICATION_JSON, ) @@ -249,33 +225,22 @@ async def phone_confirmation(request: web.Request): # consumes code await _twofa_service.delete_2fa_code(request.app, request_body.email) - # updates confirmed phone number - try: - user = await db.get_user({"email": request_body.email}) - assert user is not None # nosec - await db.update_user(dict(user), {"phone": request_body.phone}) - - except UniqueViolation as err: - raise web.HTTPUnauthorized( - reason="Invalid phone number", - content_type=MIMETYPE_APPLICATION_JSON, - ) from err + user = _auth_service.check_not_null_user( + await _auth_service.get_user_or_none(request.app, email=request_body.email) + ) + + await _registration_service.register_user_phone( + request.app, user_id=user["id"], user_phone=request_body.phone + ) - return await _security_service.login_granted_response(request, user=dict(user)) + return await _security_service.login_granted_response(request, user=user) # fails because of invalid or no code raise web.HTTPUnauthorized( - reason="Invalid 2FA code", content_type=MIMETYPE_APPLICATION_JSON + text="Invalid 2FA code", content_type=MIMETYPE_APPLICATION_JSON ) -class ResetPasswordConfirmation(InputSchema): - password: SecretStr - confirm: SecretStr - - _password_confirm_match = field_validator("confirm")(check_confirm_password_match) - - @routes.post("/v0/auth/reset-password/{code}", name="complete_reset_password") @global_rate_limit_route(number_of_requests=10, interval_seconds=HOUR) async def complete_reset_password(request: web.Request): @@ -288,7 +253,7 @@ async def complete_reset_password(request: web.Request): cfg: LoginOptions = get_plugin_options(request.app) product: Product = products_web.get_current_product(request) - path_params = parse_request_path_parameters_as(_PathParam, request) + path_params = parse_request_path_parameters_as(CodePathParam, request) request_body = await parse_request_body_as(ResetPasswordConfirmation, request) confirmation = await _confirmation_service.validate_confirmation_code( @@ -296,24 +261,25 @@ async def complete_reset_password(request: web.Request): ) if confirmation: - user = await db.get_user({"id": confirmation["user_id"]}) + user = await _auth_service.get_user_or_none( + request.app, user_id=confirmation["user_id"] + ) assert user # nosec - await db.update_user( - user={"id": user["id"]}, - updates={ - "password_hash": encrypt_password( - request_body.password.get_secret_value() - ) - }, + await _auth_service.update_user_password( + request.app, + user_id=user["id"], + current_password="", + new_password=request_body.password.get_secret_value(), + verify_current_password=False, # confirmed by code ) + await db.delete_confirmation(confirmation) return flash_response(MSG_PASSWORD_CHANGED) raise web.HTTPUnauthorized( - reason=MSG_PASSWORD_CHANGE_NOT_ALLOWED.format( + text=MSG_PASSWORD_CHANGE_NOT_ALLOWED.format( support_email=product.support_email ), - content_type=MIMETYPE_APPLICATION_JSON, ) # 401 diff --git a/services/web/server/src/simcore_service_webserver/login/_controller/rest/confirmation_schemas.py b/services/web/server/src/simcore_service_webserver/login/_controller/rest/confirmation_schemas.py new file mode 100644 index 000000000000..f1d732d4c322 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/login/_controller/rest/confirmation_schemas.py @@ -0,0 +1,47 @@ +from contextlib import suppress +from json import JSONDecodeError + +from models_library.emails import LowerCaseEmailStr +from pydantic import ( + BaseModel, + Field, + PositiveInt, + SecretStr, + ValidationError, + field_validator, +) + +from ..._invitations_service import ConfirmedInvitationData +from ..._login_repository_legacy import ( + ConfirmationTokenDict, +) +from ..._models import InputSchema, check_confirm_password_match + + +class CodePathParam(BaseModel): + code: SecretStr + + +def parse_extra_credits_in_usd_or_none( + confirmation: ConfirmationTokenDict, +) -> PositiveInt | None: + with suppress(ValidationError, JSONDecodeError): + confirmation_data = confirmation.get("data", "EMPTY") or "EMPTY" + invitation = ConfirmedInvitationData.model_validate_json(confirmation_data) + return invitation.extra_credits_in_usd + return None + + +class PhoneConfirmationBody(InputSchema): + email: LowerCaseEmailStr + phone: str = Field( + ..., description="Phone number E.164, needed on the deployments with 2FA" + ) + code: SecretStr + + +class ResetPasswordConfirmation(InputSchema): + password: SecretStr + confirm: SecretStr + + _password_confirm_match = field_validator("confirm")(check_confirm_password_match) diff --git a/services/web/server/src/simcore_service_webserver/login/_controller/rest/registration.py b/services/web/server/src/simcore_service_webserver/login/_controller/rest/registration.py index f6cffbdbc600..081bbe650416 100644 --- a/services/web/server/src/simcore_service_webserver/login/_controller/rest/registration.py +++ b/services/web/server/src/simcore_service_webserver/login/_controller/rest/registration.py @@ -1,22 +1,12 @@ import logging from datetime import UTC, datetime, timedelta -from typing import Literal from aiohttp import web from aiohttp.web import RouteTableDef from common_library.error_codes import create_error_code -from models_library.emails import LowerCaseEmailStr -from pydantic import ( - BaseModel, - ConfigDict, - Field, - PositiveInt, - SecretStr, - field_validator, -) +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from servicelib.aiohttp import status from servicelib.aiohttp.requests_validation import parse_request_body_as -from servicelib.logging_errors import create_troubleshotting_log_kwargs from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON from simcore_postgres_database.models.users import UserStatus @@ -30,17 +20,15 @@ session_access_required, ) from ....utils import MINUTE -from ....utils_aiohttp import NextPage, envelope_json_response +from ....utils_aiohttp import envelope_json_response from ....utils_rate_limiting import global_rate_limit_route -from ... import _auth_service, _confirmation_web, _security_service, _twofa_service -from ..._constants import ( - CODE_2FA_SMS_CODE_REQUIRED, - MAX_2FA_CODE_RESEND, - MAX_2FA_CODE_TRIALS, - MSG_2FA_CODE_SENT, - MSG_CANT_SEND_MAIL, - MSG_UNAUTHORIZED_REGISTER_PHONE, - MSG_WEAK_PASSWORD, +from ....web_utils import envelope_response, flash_response +from ... import ( + _auth_service, + _confirmation_web, + _registration_service, + _security_service, + _twofa_service, ) from ..._emails_service import get_template_path, send_email_from_template from ..._invitations_service import ( @@ -55,18 +43,29 @@ get_plugin_storage, ) from ..._login_service import ( - envelope_response, - flash_response, - get_user_name_from_email, notify_user_confirmation, ) -from ..._models import InputSchema, check_confirm_password_match +from ...constants import ( + CODE_2FA_SMS_CODE_REQUIRED, + MAX_2FA_CODE_RESEND, + MAX_2FA_CODE_TRIALS, + MSG_2FA_CODE_SENT, + MSG_CANT_SEND_MAIL, + MSG_UNAUTHORIZED_REGISTER_PHONE, + MSG_WEAK_PASSWORD, +) from ...settings import ( LoginOptions, LoginSettingsForProduct, get_plugin_options, get_plugin_settings, ) +from .registration_schemas import ( + InvitationCheck, + InvitationInfo, + RegisterBody, + RegisterPhoneBody, +) _logger = logging.getLogger(__name__) @@ -74,16 +73,6 @@ routes = RouteTableDef() -class InvitationCheck(InputSchema): - invitation: str = Field(..., description="Invitation code") - - -class InvitationInfo(InputSchema): - email: LowerCaseEmailStr | None = Field( - None, description="Email associated to invitation or None" - ) - - @routes.post( f"/{API_VTAG}/auth/register/invitations:check", name="auth_check_registration_invitation", @@ -118,27 +107,6 @@ async def check_registration_invitation(request: web.Request): return envelope_json_response(InvitationInfo(email=email)) -class RegisterBody(InputSchema): - email: LowerCaseEmailStr - password: SecretStr - confirm: SecretStr | None = Field(None, description="Password confirmation") - invitation: str | None = Field(None, description="Invitation code") - - _password_confirm_match = field_validator("confirm")(check_confirm_password_match) - model_config = ConfigDict( - json_schema_extra={ - "examples": [ - { - "email": "foo@mymail.com", - "password": "my secret", # NOSONAR - "confirm": "my secret", # optional - "invitation": "33c451d4-17b7-4e65-9880-694559b8ffc2", # optional only active - } - ] - } - ) - - @routes.post(f"/{API_VTAG}/auth/register", name="auth_register") async def register(request: web.Request): """ @@ -168,7 +136,7 @@ async def register(request: web.Request): < settings.LOGIN_PASSWORD_MIN_LENGTH ): raise web.HTTPUnauthorized( - reason=MSG_WEAK_PASSWORD.format( + text=MSG_WEAK_PASSWORD.format( LOGIN_PASSWORD_MIN_LENGTH=settings.LOGIN_PASSWORD_MIN_LENGTH ), content_type=MIMETYPE_APPLICATION_JSON, @@ -198,7 +166,7 @@ async def register(request: web.Request): invitation_code = registration.invitation if invitation_code is None: raise web.HTTPBadRequest( - reason="invitation field is required", + text="invitation field is required", content_type=MIMETYPE_APPLICATION_JSON, ) @@ -217,9 +185,10 @@ async def register(request: web.Request): ).replace(tzinfo=None) # get authorized user or create new - user = await _auth_service.get_user_by_email(request.app, email=registration.email) + user = await _auth_service.get_user_or_none(request.app, email=registration.email) if user: - await _auth_service.check_authorized_user_credentials_or_raise( + await _auth_service.check_authorized_user_credentials( + request.app, user, password=registration.password.get_secret_value(), product=product, @@ -237,6 +206,8 @@ async def register(request: web.Request): expires_at=expires_at, ) + assert user is not None # nosec + # setup user groups assert ( # nosec product.name == invitation.product @@ -284,7 +255,7 @@ async def register(request: web.Request): user_error_msg = MSG_CANT_SEND_MAIL _logger.exception( - **create_troubleshotting_log_kwargs( + **create_troubleshooting_log_kwargs( user_error_msg, error=err, error_code=error_code, @@ -299,7 +270,7 @@ async def register(request: web.Request): ) ) - await db.delete_confirmation_and_user(user, _confirmation) + await db.delete_confirmation_and_user(user["id"], _confirmation) raise web.HTTPServiceUnavailable(text=user_error_msg) from err @@ -331,23 +302,6 @@ async def register(request: web.Request): return await _security_service.login_granted_response(request=request, user=user) -class RegisterPhoneBody(InputSchema): - email: LowerCaseEmailStr - phone: str = Field( - ..., description="Phone number E.164, needed on the deployments with 2FA" - ) - - -class _PageParams(BaseModel): - expiration_2fa: PositiveInt | None = None - - -class RegisterPhoneNextPage(NextPage[_PageParams]): - logger: str = Field("user", deprecated=True) - level: Literal["INFO", "WARNING", "ERROR"] = "INFO" - message: str - - @routes.post(f"/{API_VTAG}/auth/verify-phone-number", name="auth_register_phone") @session_access_required( name="auth_register_phone", @@ -374,7 +328,7 @@ async def register_phone(request: web.Request): if not settings.LOGIN_2FA_REQUIRED: raise web.HTTPServiceUnavailable( - reason="Phone registration is not available", + text="Phone registration is not available", content_type=MIMETYPE_APPLICATION_JSON, ) @@ -398,7 +352,9 @@ async def register_phone(request: web.Request): twilio_auth=settings.LOGIN_TWILIO, twilio_messaging_sid=product.twilio_messaging_sid, twilio_alpha_numeric_sender=product.twilio_alpha_numeric_sender_id, - first_name=get_user_name_from_email(registration.email), + first_name=_registration_service.get_user_name_from_email( + registration.email + ), ) return envelope_response( @@ -426,7 +382,7 @@ async def register_phone(request: web.Request): user_error_msg = "Currently we cannot register phone numbers" _logger.exception( - **create_troubleshotting_log_kwargs( + **create_troubleshooting_log_kwargs( user_error_msg, error=err, error_code=error_code, @@ -436,6 +392,6 @@ async def register_phone(request: web.Request): ) raise web.HTTPServiceUnavailable( - reason=user_error_msg, + text=user_error_msg, content_type=MIMETYPE_APPLICATION_JSON, ) from err diff --git a/services/web/server/src/simcore_service_webserver/login/_controller/rest/registration_schemas.py b/services/web/server/src/simcore_service_webserver/login/_controller/rest/registration_schemas.py new file mode 100644 index 000000000000..a22b36be172a --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/login/_controller/rest/registration_schemas.py @@ -0,0 +1,70 @@ +import logging +from typing import Annotated, Literal + +from models_library.emails import LowerCaseEmailStr +from pydantic import ( + BaseModel, + ConfigDict, + Field, + PositiveInt, + SecretStr, + field_validator, +) + +from ....models import PhoneNumberStr +from ....utils_aiohttp import NextPage +from ..._models import InputSchema, check_confirm_password_match + +_logger = logging.getLogger(__name__) + + +class InvitationCheck(InputSchema): + invitation: Annotated[str, Field(description="Invitation code")] + + +class InvitationInfo(InputSchema): + email: Annotated[ + LowerCaseEmailStr | None, + Field(description="Email associated to invitation or None"), + ] = None + + +class RegisterBody(InputSchema): + email: LowerCaseEmailStr + password: SecretStr + confirm: Annotated[SecretStr | None, Field(description="Password confirmation")] = ( + None + ) + invitation: Annotated[str | None, Field(description="Invitation code")] = None + + _password_confirm_match = field_validator("confirm")(check_confirm_password_match) + model_config = ConfigDict( + json_schema_extra={ + "examples": [ + { + "email": "foo@mymail.com", + "password": "my secret", # NOSONAR + "confirm": "my secret", # optional + "invitation": "33c451d4-17b7-4e65-9880-694559b8ffc2", # optional only active + } + ] + } + ) + + +class RegisterPhoneBody(InputSchema): + email: LowerCaseEmailStr + phone: Annotated[ + PhoneNumberStr, + Field(description="Phone number E.164, needed on the deployments with 2FA"), + ] + + +class _PageParams(BaseModel): + expiration_2fa: PositiveInt | None = None + + +class RegisterPhoneNextPage(NextPage[_PageParams]): + logger: Annotated[str, Field(deprecated=True)] = "user" + level: Literal["INFO", "WARNING", "ERROR"] = "INFO" + message: str diff --git a/services/web/server/src/simcore_service_webserver/login/_controller/rest/twofa.py b/services/web/server/src/simcore_service_webserver/login/_controller/rest/twofa.py index bb9cc3ff01ce..62bce235f1d0 100644 --- a/services/web/server/src/simcore_service_webserver/login/_controller/rest/twofa.py +++ b/services/web/server/src/simcore_service_webserver/login/_controller/rest/twofa.py @@ -1,30 +1,26 @@ import logging -from typing import Literal from aiohttp import web from aiohttp.web import RouteTableDef -from models_library.emails import LowerCaseEmailStr -from pydantic import Field +from common_library.user_messages import user_message from servicelib.aiohttp import status from servicelib.aiohttp.requests_validation import parse_request_body_as -from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON from ....products import products_web from ....products.models import Product from ....session.access_policies import session_access_required -from ... import _twofa_service -from ..._constants import ( +from ....web_utils import envelope_response +from ... import _auth_service, _twofa_service +from ...constants import ( CODE_2FA_EMAIL_CODE_REQUIRED, CODE_2FA_SMS_CODE_REQUIRED, MSG_2FA_CODE_SENT, MSG_EMAIL_SENT, MSG_UNKNOWN_EMAIL, ) -from ..._login_repository_legacy import AsyncpgStorage, get_plugin_storage -from ..._login_service import envelope_response -from ..._models import InputSchema -from ...errors import handle_login_exceptions from ...settings import LoginSettingsForProduct, get_plugin_settings +from ._rest_exceptions import handle_rest_requests_exceptions +from .twofa_schemas import Resend2faBody _logger = logging.getLogger(__name__) @@ -32,36 +28,27 @@ routes = RouteTableDef() -class Resend2faBody(InputSchema): - email: LowerCaseEmailStr = Field(..., description="User email (identifier)") - via: Literal["SMS", "Email"] = "SMS" - - @routes.post("/v0/auth/two_factor:resend", name="auth_resend_2fa_code") @session_access_required( name="auth_resend_2fa_code", one_time_access=False, ) -@handle_login_exceptions +@handle_rest_requests_exceptions async def resend_2fa_code(request: web.Request): """Resends 2FA code via SMS/Email""" product: Product = products_web.get_current_product(request) settings: LoginSettingsForProduct = get_plugin_settings( request.app, product_name=product.name ) - db: AsyncpgStorage = get_plugin_storage(request.app) resend_2fa_ = await parse_request_body_as(Resend2faBody, request) - user = await db.get_user({"email": resend_2fa_.email}) + user = await _auth_service.get_user_or_none(request.app, email=resend_2fa_.email) if not user: - raise web.HTTPUnauthorized( - reason=MSG_UNKNOWN_EMAIL, content_type=MIMETYPE_APPLICATION_JSON - ) + raise web.HTTPUnauthorized(text=MSG_UNKNOWN_EMAIL) if not settings.LOGIN_2FA_REQUIRED: raise web.HTTPServiceUnavailable( - reason="2FA login is not available", - content_type=MIMETYPE_APPLICATION_JSON, + text=user_message("2FA login is not available") ) # Already a code? @@ -85,8 +72,14 @@ async def resend_2fa_code(request: web.Request): # sends via SMS if resend_2fa_.via == "SMS": + user_phone_number = user.get("phone") + if not user_phone_number: + raise web.HTTPBadRequest( + text=user_message("User does not have a phone number registered") + ) + await _twofa_service.send_sms_code( - phone_number=user["phone"], + phone_number=user_phone_number, code=code, twilio_auth=settings.LOGIN_TWILIO, twilio_messaging_sid=product.twilio_messaging_sid, @@ -100,7 +93,7 @@ async def resend_2fa_code(request: web.Request): "name": CODE_2FA_SMS_CODE_REQUIRED, "parameters": { "message": MSG_2FA_CODE_SENT.format( - phone_number=_twofa_service.mask_phone_number(user["phone"]) + phone_number=_twofa_service.mask_phone_number(user_phone_number) ), "expiration_2fa": settings.LOGIN_2FA_CODE_EXPIRATION_SEC, }, diff --git a/services/web/server/src/simcore_service_webserver/login/_controller/rest/twofa_schemas.py b/services/web/server/src/simcore_service_webserver/login/_controller/rest/twofa_schemas.py new file mode 100644 index 000000000000..ec31efc8d445 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/login/_controller/rest/twofa_schemas.py @@ -0,0 +1,11 @@ +from typing import Literal + +from models_library.emails import LowerCaseEmailStr +from pydantic import Field + +from ..._models import InputSchema + + +class Resend2faBody(InputSchema): + email: LowerCaseEmailStr = Field(..., description="User email (identifier)") + via: Literal["SMS", "Email"] = "SMS" diff --git a/services/web/server/src/simcore_service_webserver/login/_emails_service.py b/services/web/server/src/simcore_service_webserver/login/_emails_service.py index 9aef8317104c..4a0fb78f2f86 100644 --- a/services/web/server/src/simcore_service_webserver/login/_emails_service.py +++ b/services/web/server/src/simcore_service_webserver/login/_emails_service.py @@ -1,11 +1,12 @@ import logging from pathlib import Path -from aiohttp import web - from .._resources import webserver_resources -from ..email.utils import AttachmentTuple, send_email_from_template -from ..products import products_web +from ..email.email_service import ( + AttachmentTuple, + get_template_path, + send_email_from_template, +) log = logging.getLogger(__name__) @@ -15,17 +16,15 @@ def themed(dirname: str, template: str) -> Path: return path -async def get_template_path(request: web.Request, filename: str) -> Path: - return await products_web.get_product_template_path(request, filename) - - # prevents auto-removal by pycln # mypy: disable-error-code=truthy-function assert AttachmentTuple # nosec assert send_email_from_template # nosec +assert get_template_path # nosec __all__: tuple[str, ...] = ( "AttachmentTuple", "send_email_from_template", + "get_template_path", ) diff --git a/services/web/server/src/simcore_service_webserver/login/_invitations_service.py b/services/web/server/src/simcore_service_webserver/login/_invitations_service.py index df26f4f6aae0..7d753fb26713 100644 --- a/services/web/server/src/simcore_service_webserver/login/_invitations_service.py +++ b/services/web/server/src/simcore_service_webserver/login/_invitations_service.py @@ -11,6 +11,7 @@ from aiohttp import web from common_library.error_codes import create_error_code +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from models_library.basic_types import IdInt from models_library.emails import LowerCaseEmailStr from models_library.products import ProductName @@ -23,7 +24,6 @@ ValidationError, field_validator, ) -from servicelib.logging_errors import create_troubleshotting_log_kwargs from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON from simcore_postgres_database.models.confirmations import ConfirmationAction from simcore_postgres_database.models.users import UserStatus @@ -40,17 +40,18 @@ InvitationsServiceUnavailableError, ) from ..products.models import Product -from . import _confirmation_service -from ._constants import ( - MSG_EMAIL_ALREADY_REGISTERED, - MSG_INVITATIONS_CONTACT_SUFFIX, - MSG_USER_DISABLED, -) +from ..users import users_service +from . import _auth_service, _confirmation_service from ._login_repository_legacy import ( AsyncpgStorage, BaseConfirmationTokenDict, ConfirmationTokenDict, ) +from .constants import ( + MSG_EMAIL_ALREADY_REGISTERED, + MSG_INVITATIONS_CONTACT_SUFFIX, + MSG_USER_DISABLED, +) from .settings import LoginOptions _logger = logging.getLogger(__name__) @@ -102,7 +103,7 @@ async def _raise_if_registered_in_product(app: web.Application, user_email, prod app, user_email=user_email, group_id=product.group_id ): raise web.HTTPConflict( - reason=MSG_EMAIL_ALREADY_REGISTERED, + text=MSG_EMAIL_ALREADY_REGISTERED, content_type=MIMETYPE_APPLICATION_JSON, ) @@ -115,10 +116,9 @@ async def check_other_registrations( cfg: LoginOptions, ) -> None: # An account is already registered with this email - if user := await db.get_user({"email": email}): + if user := await _auth_service.get_user_or_none(app, email=email): user_status = UserStatus(user["status"]) match user_status: - case UserStatus.ACTIVE: await _raise_if_registered_in_product( app, user_email=user["email"], product=current_product @@ -143,10 +143,12 @@ async def check_other_registrations( ) if drop_previous_registration: if not _confirmation: - await db.delete_user(user=dict(user)) + await users_service.delete_user_without_projects( + app, user_id=user["id"], clean_cache=False + ) else: await db.delete_confirmation_and_user( - user=dict(user), confirmation=_confirmation + user_id=user["id"], confirmation=_confirmation ) _logger.warning( @@ -163,7 +165,7 @@ async def check_other_registrations( UserStatus.DELETED, ) raise web.HTTPConflict( - reason=MSG_USER_DISABLED.format( + text=MSG_USER_DISABLED.format( support_email=current_product.support_email ), content_type=MIMETYPE_APPLICATION_JSON, @@ -222,7 +224,7 @@ def _invitations_request_context(invitation_code: str) -> Iterator[URL]: user_error_msg = f"Invalid invitation. {MSG_INVITATIONS_CONTACT_SUFFIX}" _logger.exception( - **create_troubleshotting_log_kwargs( + **create_troubleshooting_log_kwargs( user_error_msg, error=err, error_code=error_code, @@ -230,7 +232,7 @@ def _invitations_request_context(invitation_code: str) -> Iterator[URL]: ) ) raise web.HTTPForbidden( - reason=user_error_msg, + text=user_error_msg, content_type=MIMETYPE_APPLICATION_JSON, ) from err @@ -239,7 +241,7 @@ def _invitations_request_context(invitation_code: str) -> Iterator[URL]: user_error_msg = "Unable to process your invitation since the invitations service is currently unavailable" _logger.exception( - **create_troubleshotting_log_kwargs( + **create_troubleshooting_log_kwargs( user_error_msg, error=err, error_code=error_code, @@ -247,7 +249,7 @@ def _invitations_request_context(invitation_code: str) -> Iterator[URL]: ) ) raise web.HTTPServiceUnavailable( - reason=user_error_msg, + text=user_error_msg, content_type=MIMETYPE_APPLICATION_JSON, ) from err @@ -323,7 +325,7 @@ async def check_and_consume_invitation( _logger.info("Invitation with %s was consumed", f"{confirmation_token=}") raise web.HTTPForbidden( - reason=( + text=( "Invalid invitation code." "Your invitation was already used or might have expired." + MSG_INVITATIONS_CONTACT_SUFFIX diff --git a/services/web/server/src/simcore_service_webserver/login/_login_repository_legacy.py b/services/web/server/src/simcore_service_webserver/login/_login_repository_legacy.py index d119c462d8b3..33be73a0fb1a 100644 --- a/services/web/server/src/simcore_service_webserver/login/_login_repository_legacy.py +++ b/services/web/server/src/simcore_service_webserver/login/_login_repository_legacy.py @@ -48,44 +48,6 @@ def __init__( self.user_tbl = user_table_name self.confirm_tbl = confirmation_table_name - # - # CRUD user - # - - async def get_user(self, with_data: dict[str, Any]) -> asyncpg.Record | None: - async with self.pool.acquire() as conn: - return await _login_repository_legacy_sql.find_one( - conn, self.user_tbl, with_data - ) - - async def create_user(self, data: dict[str, Any]) -> dict[str, Any]: - async with self.pool.acquire() as conn: - user_id = await _login_repository_legacy_sql.insert( - conn, self.user_tbl, data - ) - new_user = await _login_repository_legacy_sql.find_one( - conn, self.user_tbl, {"id": user_id} - ) - assert new_user # nosec - data.update( - id=new_user["id"], - created_at=new_user["created_at"], - primary_gid=new_user["primary_gid"], - ) - return data - - async def update_user(self, user: dict[str, Any], updates: dict[str, Any]) -> None: - async with self.pool.acquire() as conn: - await _login_repository_legacy_sql.update( - conn, self.user_tbl, {"id": user["id"]}, updates - ) - - async def delete_user(self, user: dict[str, Any]) -> None: - async with self.pool.acquire() as conn: - await _login_repository_legacy_sql.delete( - conn, self.user_tbl, {"id": user["id"]} - ) - # # CRUD confirmation # @@ -142,14 +104,14 @@ async def delete_confirmation(self, confirmation: ConfirmationTokenDict): # async def delete_confirmation_and_user( - self, user: dict[str, Any], confirmation: ConfirmationTokenDict + self, user_id: int, confirmation: ConfirmationTokenDict ): async with self.pool.acquire() as conn, conn.transaction(): await _login_repository_legacy_sql.delete( conn, self.confirm_tbl, {"code": confirmation["code"]} ) await _login_repository_legacy_sql.delete( - conn, self.user_tbl, {"id": user["id"]} + conn, self.user_tbl, {"id": user_id} ) async def delete_confirmation_and_update_user( diff --git a/services/web/server/src/simcore_service_webserver/login/_login_service.py b/services/web/server/src/simcore_service_webserver/login/_login_service.py index 259795c8cc37..345153e55557 100644 --- a/services/web/server/src/simcore_service_webserver/login/_login_service.py +++ b/services/web/server/src/simcore_service_webserver/login/_login_service.py @@ -1,19 +1,14 @@ -from dataclasses import asdict from typing import Any from aiohttp import web -from common_library.json_serialization import json_dumps from models_library.products import ProductName -from models_library.rest_error import LogMessageType from models_library.users import UserID from pydantic import PositiveInt from servicelib.aiohttp import observer -from servicelib.aiohttp.status import HTTP_200_OK -from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON from simcore_postgres_database.models.users import UserRole from ..db.models import ConfirmationAction, UserStatus -from ._constants import ( +from .constants import ( MSG_ACTIVATION_REQUIRED, MSG_USER_BANNED, MSG_USER_DELETED, @@ -44,38 +39,31 @@ def _to_names(enum_cls, names) -> list[str]: ) -def validate_user_status(*, user: dict, support_email: str): +def validate_user_access(*, user_status: str, user_role: str, support_email: str): """ Raises: web.HTTPUnauthorized """ - assert "role" in user # nosec - - user_status: str = user["status"] if user_status == DELETED: raise web.HTTPUnauthorized( - reason=MSG_USER_DELETED.format(support_email=support_email), - content_type=MIMETYPE_APPLICATION_JSON, + text=MSG_USER_DELETED.format(support_email=support_email), ) # 401 - if user_status == BANNED or user["role"] == ANONYMOUS: + if user_status == BANNED or user_role == ANONYMOUS: raise web.HTTPUnauthorized( - reason=MSG_USER_BANNED.format(support_email=support_email), - content_type=MIMETYPE_APPLICATION_JSON, + text=MSG_USER_BANNED.format(support_email=support_email), ) # 401 if user_status == EXPIRED: raise web.HTTPUnauthorized( - reason=MSG_USER_EXPIRED.format(support_email=support_email), - content_type=MIMETYPE_APPLICATION_JSON, + text=MSG_USER_EXPIRED.format(support_email=support_email), ) # 401 if user_status == CONFIRMATION_PENDING: raise web.HTTPUnauthorized( - reason=MSG_ACTIVATION_REQUIRED, - content_type=MIMETYPE_APPLICATION_JSON, + text=MSG_ACTIVATION_REQUIRED, ) # 401 assert user_status == ACTIVE # nosec @@ -114,27 +102,3 @@ async def notify_user_logout( client_session_id, app, ) - - -def flash_response( - message: str, level: str = "INFO", *, status: int = HTTP_200_OK -) -> web.Response: - return envelope_response( - data=asdict(LogMessageType(message, level)), - status=status, - ) - - -def envelope_response(data: Any, *, status: int = HTTP_200_OK) -> web.Response: - return web.json_response( - { - "data": data, - "error": None, - }, - dumps=json_dumps, - status=status, - ) - - -def get_user_name_from_email(email: str) -> str: - return email.split("@")[0] diff --git a/services/web/server/src/simcore_service_webserver/login/_models.py b/services/web/server/src/simcore_service_webserver/login/_models.py index c63430b59e68..b7243d76039a 100644 --- a/services/web/server/src/simcore_service_webserver/login/_models.py +++ b/services/web/server/src/simcore_service_webserver/login/_models.py @@ -2,7 +2,7 @@ from pydantic import BaseModel, ConfigDict, SecretStr, ValidationInfo -from ._constants import MSG_PASSWORD_MISMATCH +from .constants import MSG_PASSWORD_MISMATCH class InputSchema(BaseModel): diff --git a/services/web/server/src/simcore_service_webserver/login/_registration_service.py b/services/web/server/src/simcore_service_webserver/login/_registration_service.py new file mode 100644 index 000000000000..21b897327efd --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/login/_registration_service.py @@ -0,0 +1,16 @@ +from aiohttp import web +from simcore_postgres_database.utils_users import UsersRepo + +from ..db.plugin import get_asyncpg_engine + + +def get_user_name_from_email(email: str) -> str: + return email.split("@")[0] + + +async def register_user_phone( + app: web.Application, *, user_id: int, user_phone: str +) -> None: + asyncpg_engine = get_asyncpg_engine(app) + repo = UsersRepo(asyncpg_engine) + await repo.update_user_phone(user_id=user_id, phone=user_phone) diff --git a/services/web/server/src/simcore_service_webserver/login/_security_service.py b/services/web/server/src/simcore_service_webserver/login/_security_service.py index 0f8685d01abb..932c02287193 100644 --- a/services/web/server/src/simcore_service_webserver/login/_security_service.py +++ b/services/web/server/src/simcore_service_webserver/login/_security_service.py @@ -1,20 +1,21 @@ """Utils that extends on security_api plugin""" import logging -from typing import Any from aiohttp import web -from servicelib.logging_utils import get_log_record_extra, log_context +from common_library.logging.logging_base import get_log_record_extra +from servicelib.logging_utils import log_context -from ..security import api as security_service -from ._constants import MSG_LOGGED_IN -from ._login_service import flash_response +from ..security import security_web +from ..web_utils import flash_response +from ._auth_service import UserInfoDict +from .constants import MSG_LOGGED_IN _logger = logging.getLogger(__name__) async def login_granted_response( - request: web.Request, *, user: dict[str, Any] + request: web.Request, *, user: UserInfoDict ) -> web.Response: """ Grants authorization for user creating a responses with an auth cookie @@ -36,7 +37,7 @@ async def login_granted_response( extra=get_log_record_extra(user_id=user_id), ): response = flash_response(MSG_LOGGED_IN, "INFO") - return await security_service.remember_identity( + return await security_web.remember_identity( request=request, response=response, user_email=email, diff --git a/services/web/server/src/simcore_service_webserver/login/_twofa_service.py b/services/web/server/src/simcore_service_webserver/login/_twofa_service.py index c799e29e4a31..a7ef6d5fd067 100644 --- a/services/web/server/src/simcore_service_webserver/login/_twofa_service.py +++ b/services/web/server/src/simcore_service_webserver/login/_twofa_service.py @@ -134,7 +134,7 @@ def _sender(): except TwilioException as exc: raise SendingVerificationSmsError( - reason=f"Could not send SMS to {mask_phone_number(phone_number)}", + details=f"Could not send SMS to {mask_phone_number(phone_number)}", user_id=user_id, twilio_error=exc, ) from exc @@ -176,7 +176,7 @@ async def send_email_code( ) except Exception as exc: raise SendingVerificationEmailError( - reason=f"Could not send email to {user_email}", + details=f"Could not send email to {user_email}", user_id=user_id, user_email=user_email, email_error=exc, diff --git a/services/web/server/src/simcore_service_webserver/login/constants.py b/services/web/server/src/simcore_service_webserver/login/constants.py new file mode 100644 index 000000000000..2ee9e5f874f8 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/login/constants.py @@ -0,0 +1,139 @@ +from typing import Final + +from common_library.user_messages import user_message + +MSG_2FA_CODE_SENT: Final[str] = user_message( + "A verification code has been sent via SMS to {phone_number}.", _version=1 +) +MSG_2FA_UNAVAILABLE: Final[str] = user_message( + "Two-factor authentication is temporarily unavailable. Please try again later.", + _version=1, +) +MSG_ACTIVATED: Final[str] = user_message( + "Your account has been successfully activated.", _version=1 +) +MSG_ACTIVATION_REQUIRED: Final[str] = user_message( + "Please activate your account using the activation link we sent to your email before logging in.", + _version=1, +) +MSG_AUTH_FAILED: Final[str] = user_message( + "We couldn't sign you in with those credentials. Please check your email and password and try again.", + _version=1, +) +MSG_CANT_SEND_MAIL: Final[str] = user_message( + "We're unable to send emails at this time. Please try again later.", _version=1 +) +MSG_CHANGE_EMAIL_REQUESTED: Final[str] = user_message( + "Please check your new email address and click the verification link we sent you.", + _version=1, +) +MSG_EMAIL_CHANGED: Final[str] = user_message( + "Your email address has been successfully updated.", _version=1 +) +MSG_EMAIL_ALREADY_REGISTERED: Final[str] = user_message( + "This email address is already associated with an account. Please sign in or use a different email address.", + _version=1, +) +MSG_EMAIL_SENT: Final[str] = user_message( + "We've sent an email to {email} with further instructions.", _version=1 +) +MSG_LOGGED_IN: Final[str] = user_message("You have successfully signed in.", _version=1) +MSG_LOGGED_OUT: Final[str] = user_message( + "You have successfully signed out.", _version=1 +) +MSG_OFTEN_RESET_PASSWORD: Final[str] = user_message( + "You've recently requested a password reset. Please check your email for the reset link or wait before requesting another one.", + _version=1, +) +MSG_PASSWORD_CHANGE_NOT_ALLOWED: Final[str] = user_message( + "Unable to reset password. Permissions may have expired or been removed. " + "Please try again, or contact support if the problem continues: {support_email}", + _version=1, +) +MSG_PASSWORD_CHANGED: Final[str] = user_message( + "Your password has been updated.", _version=1 +) +MSG_PASSWORD_MISMATCH: Final[str] = user_message( + "Password and confirmation do not match. Please try again.", _version=1 +) +MSG_PHONE_MISSING: Final[str] = user_message( + "No phone number is associated with this account.", _version=1 +) +MSG_UNAUTHORIZED_CODE_RESEND_2FA: Final[str] = user_message( + "You can no longer resend the verification code. Please restart the verification process.", + _version=2, +) +MSG_UNAUTHORIZED_LOGIN_2FA: Final[str] = user_message( + "You can no longer submit a verification code. Please restart the login process.", + _version=2, +) +MSG_UNAUTHORIZED_REGISTER_PHONE: Final[str] = user_message( + "Phone registration is no longer allowed. Please restart the registration process.", + _version=1, +) +MSG_UNAUTHORIZED_PHONE_CONFIRMATION: Final[str] = user_message( + "You can no longer submit a verification code. Please restart the confirmation process.", + _version=2, +) +MSG_UNKNOWN_EMAIL: Final[str] = user_message( + "This email address is not registered.", _version=1 +) +MSG_USER_DELETED: Final[str] = user_message( + "This account is scheduled for deletion. To reactivate it or for more information, please contact support: {support_email}", + _version=1, +) +MSG_USER_BANNED: Final[str] = user_message( + "Access to this account is no longer available. Please contact support for more information: {support_email}", + _version=1, +) +MSG_USER_EXPIRED: Final[str] = user_message( + "This account has expired and access is no longer available. Please contact support for assistance: {support_email}", + _version=1, +) +MSG_USER_DISABLED: Final[str] = user_message( + "This account has been disabled and cannot be registered again. Please contact support for details: {support_email}", + _version=1, +) +MSG_WRONG_2FA_CODE__INVALID: Final[str] = user_message( + "The verification code entered is not valid. Please enter a valid verification code or generate a new one.", + _version=2, +) +MSG_WRONG_2FA_CODE__EXPIRED: Final[str] = user_message( + "The verification code is either incorrect or has expired. Please request a new verification code and try again.", + _version=3, +) +MSG_WRONG_CAPTCHA__INVALID: Final[str] = user_message( + "The CAPTCHA entered is incorrect. Please try again.", _version=1 +) +MSG_WRONG_PASSWORD: Final[str] = user_message( + "The password is incorrect. Please try again.", _version=1 +) +MSG_WEAK_PASSWORD: Final[str] = user_message( + "Password must be at least {LOGIN_PASSWORD_MIN_LENGTH} characters long.", _version=1 +) +MSG_INVITATIONS_CONTACT_SUFFIX: Final[str] = user_message( + "Please contact our support team to request a new invitation.", _version=1 +) + +# Login Accepted Response Codes: +# - These string codes are used to identify next step in the login (e.g. login_2fa or register_phone?) +# - The frontend uses them also to determine what page/form has to display to the user for next step +CODE_PHONE_NUMBER_REQUIRED: Final[str] = "PHONE_NUMBER_REQUIRED" +CODE_2FA_SMS_CODE_REQUIRED: Final[str] = "SMS_CODE_REQUIRED" +CODE_2FA_EMAIL_CODE_REQUIRED: Final[str] = "EMAIL_CODE_REQUIRED" + + +# App keys for login plugin +# Naming convention: APP_LOGIN_...KEY +APP_LOGIN_SETTINGS_PER_PRODUCT_KEY: Final[str] = ( + f"{__name__}.LOGIN_SETTINGS_PER_PRODUCT" +) + + +# maximum amount the user can resend the code via email or phone +MAX_2FA_CODE_RESEND: Final[int] = 5 + +# maximum number of trials to validate the passcode +MAX_2FA_CODE_TRIALS: Final[int] = 5 + +CAPTCHA_SESSION_KEY: Final[str] = "captcha" diff --git a/services/web/server/src/simcore_service_webserver/login/decorators.py b/services/web/server/src/simcore_service_webserver/login/decorators.py index e5be70e1efb8..011c25bbc083 100644 --- a/services/web/server/src/simcore_service_webserver/login/decorators.py +++ b/services/web/server/src/simcore_service_webserver/login/decorators.py @@ -1,77 +1,7 @@ -import functools -import inspect -from typing import cast +from ..login_auth.decorators import get_user_id, login_required -from aiohttp import web -from models_library.users import UserID -from servicelib.aiohttp.typing_extension import HandlerAnyReturn -from servicelib.request_keys import RQT_USERID_KEY - -from ..products import products_web -from ..security.api import ( - PERMISSION_PRODUCT_LOGIN_KEY, - AuthContextDict, - check_user_authorized, - check_user_permission, +__all__: tuple[str, ...] = ( + "get_user_id", + "login_required", ) - - -def login_required(handler: HandlerAnyReturn) -> HandlerAnyReturn: - """Decorator that restrict access only for authorized users with permissions to access a given product - - - User is considered authorized if check_authorized(request) raises no exception - - If authorized, it injects user_id in request[RQT_USERID_KEY] - - Use this decorator instead of aiohttp_security.api.login_required! - - WARNING: Add always @router. decorator FIRST, e.g. - - @router.get("/foo") - @login_required - async def get_foo(request: web.Request): - ... - - and NOT as - - @login_required - @router.get("/foo") - async def get_foo(request: web.Request): - ... - - since the latter will register in `router` get_foo **without** `login_required` - """ - assert set(inspect.signature(handler).parameters.values()) == { # nosec - inspect.Parameter( - name="request", - kind=inspect.Parameter.POSITIONAL_OR_KEYWORD, - annotation=web.Request, - ) - }, f"Expected {handler.__name__} with request as signature, got {handler.__annotations__}" - - @functools.wraps(handler) - async def _wrapper(request: web.Request): - """ - Raises: - HTTPUnauthorized: if unauthorized user - HTTPForbidden: if user not allowed in product - """ - # WARNING: note that check_authorized is patched in some tests. - # Careful when changing the function signature - user_id = await check_user_authorized(request) - - await check_user_permission( - request, - PERMISSION_PRODUCT_LOGIN_KEY, - context=AuthContextDict( - product_name=products_web.get_product_name(request), - authorized_uid=user_id, - ), - ) - - request[RQT_USERID_KEY] = user_id - return await handler(request) - - return _wrapper - - -def get_user_id(request: web.Request) -> UserID: - return cast(UserID, request[RQT_USERID_KEY]) +# nopycln: file diff --git a/services/web/server/src/simcore_service_webserver/login/errors.py b/services/web/server/src/simcore_service_webserver/login/errors.py index 835c971d312f..4d34827a5e29 100644 --- a/services/web/server/src/simcore_service_webserver/login/errors.py +++ b/services/web/server/src/simcore_service_webserver/login/errors.py @@ -1,48 +1,16 @@ -import functools -import logging - -from aiohttp import web -from servicelib.aiohttp.typing_extension import Handler -from servicelib.logging_errors import create_troubleshotting_log_kwargs -from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON - from ..errors import WebServerBaseError -from ._constants import MSG_2FA_UNAVAILABLE - -_logger = logging.getLogger(__name__) class LoginError(WebServerBaseError, ValueError): ... class SendingVerificationSmsError(LoginError): - msg_template = "Sending verification sms failed. {reason}" + msg_template = "Sending verification sms failed: {details}" class SendingVerificationEmailError(LoginError): - msg_template = "Sending verification email failed. {reason}" - - -def handle_login_exceptions(handler: Handler): - @functools.wraps(handler) - async def wrapper(request: web.Request) -> web.StreamResponse: - try: - return await handler(request) - - except (SendingVerificationSmsError, SendingVerificationEmailError) as exc: - error_code = exc.error_code() - front_end_msg = MSG_2FA_UNAVAILABLE - # in these cases I want to log the cause - _logger.exception( - **create_troubleshotting_log_kwargs( - front_end_msg, - error=exc, - error_code=error_code, - ) - ) - raise web.HTTPServiceUnavailable( - reason=front_end_msg, - content_type=MIMETYPE_APPLICATION_JSON, - ) from exc - - return wrapper + msg_template = "Sending verification email failed: {details}" + + +class WrongPasswordError(LoginError): + msg_template = "Invalid password provided" diff --git a/services/web/server/src/simcore_service_webserver/login/plugin.py b/services/web/server/src/simcore_service_webserver/login/plugin.py index 60c8db47b6fc..46845dce7d8b 100644 --- a/services/web/server/src/simcore_service_webserver/login/plugin.py +++ b/services/web/server/src/simcore_service_webserver/login/plugin.py @@ -1,13 +1,19 @@ import asyncio import logging +from typing import Final import asyncpg from aiohttp import web from pydantic import ValidationError -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup from settings_library.email import SMTPSettings from settings_library.postgres import PostgresSettings +from .._meta import APP_NAME +from ..application_setup import ( + ModuleCategory, + app_setup_func, + ensure_single_setup, +) from ..constants import ( APP_PUBLIC_CONFIG_PER_PRODUCT, APP_SETTINGS_KEY, @@ -18,21 +24,22 @@ from ..email.plugin import setup_email from ..email.settings import get_plugin_settings as get_email_plugin_settings from ..invitations.plugin import setup_invitations +from ..login_accounts.plugin import setup_login_account +from ..login_auth.plugin import setup_login_auth from ..products import products_service from ..products.models import ProductName from ..products.plugin import setup_products from ..redis import setup_redis from ..rest.plugin import setup_rest -from ._constants import APP_LOGIN_SETTINGS_PER_PRODUCT_KEY from ._controller.rest import ( auth, change, confirmation, - preregistration, registration, twofa, ) from ._login_repository_legacy import APP_LOGIN_STORAGE_KEY, AsyncpgStorage +from .constants import APP_LOGIN_SETTINGS_PER_PRODUCT_KEY from .settings import ( APP_LOGIN_OPTIONS_KEY, LoginOptions, @@ -42,6 +49,7 @@ log = logging.getLogger(__name__) +APP_LOGIN_CLIENT_KEY: Final = web.AppKey("APP_LOGIN_CLIENT_KEY", object) MAX_TIME_TO_CLOSE_POOL_SECS = 5 @@ -51,7 +59,7 @@ async def _setup_login_storage_ctx(app: web.Application): settings: PostgresSettings = get_db_plugin_settings(app) async with asyncpg.create_pool( - dsn=settings.dsn_with_query, + dsn=settings.dsn_with_query(f"{APP_NAME}-login", suffix="asyncpg"), min_size=settings.POSTGRES_MINSIZE, max_size=settings.POSTGRES_MAXSIZE, loop=asyncio.get_event_loop(), @@ -61,11 +69,13 @@ async def _setup_login_storage_ctx(app: web.Application): yield # ---------------- +@ensure_single_setup(f"{__name__}.storage", logger=log) def setup_login_storage(app: web.Application): if _setup_login_storage_ctx not in app.cleanup_ctx: app.cleanup_ctx.append(_setup_login_storage_ctx) +@ensure_single_setup(f"{__name__}.login_options", logger=log) def _setup_login_options(app: web.Application): settings: SMTPSettings = get_email_plugin_settings(app) @@ -120,7 +130,7 @@ async def _resolve_login_settings_per_product(app: web.Application): app.setdefault(APP_PUBLIC_CONFIG_PER_PRODUCT, public_data_per_product) -@app_module_setup( +@app_setup_func( "simcore_service_webserver.login", ModuleCategory.ADDON, settings_name="WEBSERVER_LOGIN", @@ -139,9 +149,11 @@ def setup_login(app: web.Application): # routes app.router.add_routes(auth.routes) + setup_login_auth(app) + setup_login_account(app) + app.router.add_routes(confirmation.routes) app.router.add_routes(registration.routes) - app.router.add_routes(preregistration.routes) app.router.add_routes(change.routes) app.router.add_routes(twofa.routes) diff --git a/services/web/server/src/simcore_service_webserver/login/settings.py b/services/web/server/src/simcore_service_webserver/login/settings.py index 909b3a64eb6f..697822affab6 100644 --- a/services/web/server/src/simcore_service_webserver/login/settings.py +++ b/services/web/server/src/simcore_service_webserver/login/settings.py @@ -10,7 +10,7 @@ from settings_library.twilio import TwilioSettings from simcore_postgres_database.models.products import ProductLoginSettingsDict -from ._constants import APP_LOGIN_SETTINGS_PER_PRODUCT_KEY +from .constants import APP_LOGIN_SETTINGS_PER_PRODUCT_KEY _DAYS: Final[float] = 1.0 # in days _MINUTES: Final[float] = 1.0 / 24.0 / 60.0 # in days diff --git a/services/web/server/src/simcore_service_webserver/login_accounts/__init__.py b/services/web/server/src/simcore_service_webserver/login_accounts/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/services/web/server/src/simcore_service_webserver/login/_controller/rest/preregistration.py b/services/web/server/src/simcore_service_webserver/login_accounts/_controller_rest.py similarity index 57% rename from services/web/server/src/simcore_service_webserver/login/_controller/rest/preregistration.py rename to services/web/server/src/simcore_service_webserver/login_accounts/_controller_rest.py index b656d7cc4d36..1c10912dc8d5 100644 --- a/services/web/server/src/simcore_service_webserver/login/_controller/rest/preregistration.py +++ b/services/web/server/src/simcore_service_webserver/login_accounts/_controller_rest.py @@ -2,46 +2,47 @@ from typing import Any from aiohttp import web +from common_library.logging.logging_base import get_log_record_extra +from common_library.user_messages import user_message from models_library.api_schemas_webserver.auth import ( AccountRequestInfo, UnregisterCheck, ) -from models_library.users import UserID -from pydantic import BaseModel, Field from servicelib.aiohttp import status -from servicelib.aiohttp.application_keys import APP_FIRE_AND_FORGET_TASKS_KEY -from servicelib.aiohttp.requests_validation import parse_request_body_as -from servicelib.logging_utils import get_log_record_extra, log_context -from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON -from servicelib.request_keys import RQT_USERID_KEY +from servicelib.aiohttp.requests_validation import ( + handle_validation_as_http_error, + parse_request_body_as, +) +from servicelib.logging_utils import log_context from servicelib.utils import fire_and_forget_task -from ...._meta import API_VTAG -from ....constants import RQ_PRODUCT_KEY -from ....products import products_web -from ....products.models import Product -from ....security import api as security_service -from ....security.decorators import permission_required -from ....session.api import get_session -from ....users.api import get_user_credentials, set_user_as_deleted -from ....utils import MINUTE -from ....utils_rate_limiting import global_rate_limit_route -from ... import _preregistration_service -from ..._constants import ( +from .._meta import API_VTAG +from ..constants import APP_FIRE_AND_FORGET_TASKS_KEY +from ..login import login_service +from ..login._controller.rest._rest_exceptions import handle_rest_requests_exceptions +from ..login.constants import ( CAPTCHA_SESSION_KEY, MSG_LOGGED_OUT, MSG_WRONG_CAPTCHA__INVALID, ) -from ..._login_service import flash_response, notify_user_logout -from ...decorators import login_required -from ...settings import LoginSettingsForProduct, get_plugin_settings +from ..login.settings import get_plugin_settings +from ..login_auth.decorators import login_required +from ..models import AuthenticatedRequestContext +from ..products import products_web +from ..products.models import Product +from ..security import security_service, security_web +from ..security.decorators import permission_required +from ..session import api as session_service +from ..users import users_service +from ..users.schemas import UserAccountRestPreRegister +from ..utils import MINUTE +from ..utils_rate_limiting import global_rate_limit_route +from ..web_utils import flash_response +from . import _service _logger = logging.getLogger(__name__) -routes = web.RouteTableDef() - - def _get_ipinfo(request: web.Request) -> dict[str, Any]: # NOTE: Traefik is also configured to transmit the original IP. x_real_ip = request.headers.get("X-Real-IP", None) @@ -57,29 +58,59 @@ def _get_ipinfo(request: web.Request) -> dict[str, Any]: } +routes = web.RouteTableDef() + + +@routes.get(f"/{API_VTAG}/auth/captcha", name="create_captcha") +@global_rate_limit_route(number_of_requests=30, interval_seconds=MINUTE) +@handle_rest_requests_exceptions +async def create_captcha(request: web.Request): + session = await session_service.get_session(request) + + captcha_text, image_data = await _service.create_captcha() + + # Store captcha text in session + session[CAPTCHA_SESSION_KEY] = captcha_text + + return web.Response(body=image_data, content_type="image/png") + + @routes.post( f"/{API_VTAG}/auth/request-account", name="request_product_account", ) @global_rate_limit_route(number_of_requests=30, interval_seconds=MINUTE) +@handle_rest_requests_exceptions async def request_product_account(request: web.Request): product = products_web.get_current_product(request) - session = await get_session(request) + session = await session_service.get_session(request) body = await parse_request_body_as(AccountRequestInfo, request) assert body.form # nosec assert body.captcha # nosec if body.captcha != session.get(CAPTCHA_SESSION_KEY): - raise web.HTTPUnprocessableEntity( - reason=MSG_WRONG_CAPTCHA__INVALID, content_type=MIMETYPE_APPLICATION_JSON - ) + raise web.HTTPUnprocessableEntity(text=MSG_WRONG_CAPTCHA__INVALID) session.pop(CAPTCHA_SESSION_KEY, None) - # send email to fogbugz or user itself + with handle_validation_as_http_error( + error_msg_template=user_message( + "The form contains invalid information: '{failed}'", _version=1 + ), + resource_name=request.rel_url.path, + ): + profile = UserAccountRestPreRegister.model_validate(body.form) + + await _service.create_pre_registration( + request.app, + profile=profile, + product_name=product.name, + ) + + # if created send email to fogbugz or user itself fire_and_forget_task( - _preregistration_service.send_account_request_email_to_support( - request, + _service.send_account_request_email_to_support( + request=request, product=product, request_form=body.form, ipinfo=_get_ipinfo(request), @@ -90,30 +121,28 @@ async def request_product_account(request: web.Request): return web.json_response(status=status.HTTP_204_NO_CONTENT) -class _AuthenticatedContext(BaseModel): - user_id: UserID = Field(..., alias=RQT_USERID_KEY) # type: ignore[literal-required] - product_name: str = Field(..., alias=RQ_PRODUCT_KEY) # type: ignore[literal-required] - - @routes.post(f"/{API_VTAG}/auth/unregister", name="unregister_account") @login_required @permission_required("user.profile.delete") +@handle_rest_requests_exceptions async def unregister_account(request: web.Request): - req_ctx = _AuthenticatedContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) body = await parse_request_body_as(UnregisterCheck, request) product: Product = products_web.get_current_product(request) - settings: LoginSettingsForProduct = get_plugin_settings( - request.app, product_name=product.name - ) + settings = get_plugin_settings(request.app, product_name=product.name) # checks before deleting - credentials = await get_user_credentials(request.app, user_id=req_ctx.user_id) - if body.email != credentials.email.lower() or not security_service.check_password( + credentials = await users_service.get_user_credentials( + request.app, user_id=req_ctx.user_id + ) + if body.email != credentials.email or not security_service.check_password( body.password.get_secret_value(), credentials.password_hash ): raise web.HTTPConflict( - reason="Wrong email or password. Please try again to delete this account" + text=user_message( + "Wrong email or password. Please try again to delete this account" + ) ) with log_context( @@ -124,18 +153,18 @@ async def unregister_account(request: web.Request): extra=get_log_record_extra(user_id=req_ctx.user_id), ): # update user table - await set_user_as_deleted(request.app, user_id=req_ctx.user_id) + await users_service.set_user_as_deleted(request.app, user_id=req_ctx.user_id) # logout - await notify_user_logout( + await login_service.notify_user_logout( request.app, user_id=req_ctx.user_id, client_session_id=None ) response = flash_response(MSG_LOGGED_OUT, "INFO") - await security_service.forget_identity(request, response) + await security_web.forget_identity(request, response) # send email in the background fire_and_forget_task( - _preregistration_service.send_close_account_email( + _service.send_close_account_email( request, user_email=credentials.email, user_first_name=credentials.display_name, @@ -146,16 +175,3 @@ async def unregister_account(request: web.Request): ) return response - - -@routes.get(f"/{API_VTAG}/auth/captcha", name="create_captcha") -@global_rate_limit_route(number_of_requests=30, interval_seconds=MINUTE) -async def create_captcha(request: web.Request): - session = await get_session(request) - - captcha_text, image_data = await _preregistration_service.create_captcha() - - # Store captcha text in session - session[CAPTCHA_SESSION_KEY] = captcha_text - - return web.Response(body=image_data, content_type="image/png") diff --git a/services/web/server/src/simcore_service_webserver/login/_preregistration_service.py b/services/web/server/src/simcore_service_webserver/login_accounts/_service.py similarity index 77% rename from services/web/server/src/simcore_service_webserver/login/_preregistration_service.py rename to services/web/server/src/simcore_service_webserver/login_accounts/_service.py index e5e8a9f29eaf..c882f9501e9d 100644 --- a/services/web/server/src/simcore_service_webserver/login/_preregistration_service.py +++ b/services/web/server/src/simcore_service_webserver/login_accounts/_service.py @@ -1,3 +1,4 @@ +import asyncio import functools import logging from io import BytesIO @@ -7,14 +8,17 @@ from captcha.image import ImageCaptcha from common_library.json_serialization import json_dumps from models_library.emails import LowerCaseEmailStr +from models_library.products import ProductName from models_library.utils.fastapi_encoders import jsonable_encoder from PIL.Image import Image from pydantic import EmailStr, PositiveInt, TypeAdapter, ValidationError from servicelib.utils_secrets import generate_passcode -from ..email.utils import send_email_from_template +from ..email.email_service import send_email_from_template from ..products import products_web from ..products.models import Product +from ..users import _accounts_service +from ..users.schemas import UserAccountRestPreRegister _logger = logging.getLogger(__name__) @@ -109,16 +113,32 @@ async def send_account_request_email_to_support( async def create_captcha() -> tuple[str, bytes]: - captcha_text = generate_passcode(number_of_digits=6) - image = ImageCaptcha(width=140, height=45) + def _run() -> tuple[str, bytes]: + captcha_text = generate_passcode(number_of_digits=6) + image = ImageCaptcha(width=140, height=45) - # Generate image - data: Image = image.create_captcha_image( - chars=captcha_text, color=(221, 221, 221), background=(0, 20, 46) - ) + # Generate image + data: Image = image.create_captcha_image( + chars=captcha_text, color=(221, 221, 221), background=(0, 20, 46) + ) + + img_byte_arr = BytesIO() + data.save(img_byte_arr, format="PNG") + image_data = img_byte_arr.getvalue() + + return (captcha_text, image_data) + + loop = asyncio.get_event_loop() + return await loop.run_in_executor(None, _run) - img_byte_arr = BytesIO() - data.save(img_byte_arr, format="PNG") - image_data = img_byte_arr.getvalue() - return (captcha_text, image_data) +async def create_pre_registration( + app: web.Application, + *, + profile: UserAccountRestPreRegister, + product_name: ProductName, +): + + await _accounts_service.pre_register_user( + app, profile=profile, creator_user_id=None, product_name=product_name + ) diff --git a/services/web/server/src/simcore_service_webserver/login_accounts/plugin.py b/services/web/server/src/simcore_service_webserver/login_accounts/plugin.py new file mode 100644 index 000000000000..9354d73dd355 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/login_accounts/plugin.py @@ -0,0 +1,13 @@ +import logging + +from aiohttp import web + +from ..application_setup import ensure_single_setup +from . import _controller_rest + +_logger = logging.getLogger(__name__) + + +@ensure_single_setup(__name__, logger=_logger) +def setup_login_account(app: web.Application): + app.add_routes(_controller_rest.routes) diff --git a/services/web/server/src/simcore_service_webserver/login_auth/__init__.py b/services/web/server/src/simcore_service_webserver/login_auth/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/services/web/server/src/simcore_service_webserver/login_auth/_controller_rest.py b/services/web/server/src/simcore_service_webserver/login_auth/_controller_rest.py new file mode 100644 index 000000000000..8c22e32fac42 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/login_auth/_controller_rest.py @@ -0,0 +1,30 @@ +import logging + +from aiohttp import web +from aiohttp.web import RouteTableDef +from servicelib.aiohttp import status + +from .._meta import API_VTAG +from .decorators import login_required + +_logger = logging.getLogger(__name__) + + +routes = RouteTableDef() + + +@routes.get(f"/{API_VTAG}/auth:check", name="check_auth") +@login_required +async def check_auth(request: web.Request) -> web.Response: + """Lightweight endpoint for checking if users are authenticated & authorized to this product + + Used primarily by Traefik auth middleware to verify session cookies + SEE https://doc.traefik.io/traefik/middlewares/http/forwardauth + """ + # NOTE: for future development + # if database access is added here, services like jupyter-math + # which load a lot of resources will have a big performance hit + # consider caching some properties required by this endpoint or rely on Redis + assert request # nosec + + return web.json_response(status=status.HTTP_204_NO_CONTENT) diff --git a/services/web/server/src/simcore_service_webserver/login_auth/decorators.py b/services/web/server/src/simcore_service_webserver/login_auth/decorators.py new file mode 100644 index 000000000000..50dfd6456d72 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/login_auth/decorators.py @@ -0,0 +1,73 @@ +import functools +import inspect +from typing import cast + +from aiohttp import web +from models_library.users import UserID +from servicelib.aiohttp.request_keys import RQT_USERID_KEY +from servicelib.aiohttp.typing_extension import HandlerAnyReturn + +from ..products import products_web +from ..security import security_web + + +def login_required(handler: HandlerAnyReturn) -> HandlerAnyReturn: + """Decorator that restrict access only for authorized users with permissions to access a given product + + - User is considered authorized if check_authorized(request) raises no exception + - If authorized, it injects user_id in request[RQT_USERID_KEY] + - Use this decorator instead of aiohttp_security.api.login_required! + + WARNING: Add always @router. decorator FIRST, e.g. + + @router.get("/foo") + @login_required + async def get_foo(request: web.Request): + ... + + and NOT as + + @login_required + @router.get("/foo") + async def get_foo(request: web.Request): + ... + + since the latter will register in `router` get_foo **without** `login_required` + """ + assert set(inspect.signature(handler).parameters.values()) == { # nosec + inspect.Parameter( + name="request", + kind=inspect.Parameter.POSITIONAL_OR_KEYWORD, + annotation=web.Request, + ) + }, f"Expected {handler.__name__} with request as signature, got {handler.__annotations__}" + + @functools.wraps(handler) + async def _wrapper(request: web.Request): + """ + Raises: + HTTPUnauthorized: if unauthorized user + HTTPForbidden: if user not allowed in product + """ + # WARNING: note that check_authorized is patched in some tests. + # Careful when changing the function signature + user_id = await security_web.check_user_authorized(request) + product_name = products_web.get_product_name(request) + + await security_web.check_user_permission( + request, + security_web.PERMISSION_PRODUCT_LOGIN_KEY, + context=security_web.AuthContextDict( + product_name=product_name, + authorized_uid=user_id, + ), + ) + + request[RQT_USERID_KEY] = user_id + return await handler(request) + + return _wrapper + + +def get_user_id(request: web.Request) -> UserID: + return cast(UserID, request[RQT_USERID_KEY]) diff --git a/services/web/server/src/simcore_service_webserver/login_auth/plugin.py b/services/web/server/src/simcore_service_webserver/login_auth/plugin.py new file mode 100644 index 000000000000..8113d4aa6934 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/login_auth/plugin.py @@ -0,0 +1,20 @@ +import logging + +from aiohttp import web + +from ..application_setup import ensure_single_setup +from ..products.plugin import setup_products_without_rpc +from ..rest.plugin import setup_rest +from ..security.plugin import setup_security +from . import _controller_rest + +_logger = logging.getLogger(__name__) + + +@ensure_single_setup(__name__, logger=_logger) +def setup_login_auth(app: web.Application): + setup_products_without_rpc(app) + setup_security(app) + setup_rest(app) + + app.add_routes(_controller_rest.routes) diff --git a/services/web/server/src/simcore_service_webserver/long_running_tasks.py b/services/web/server/src/simcore_service_webserver/long_running_tasks.py deleted file mode 100644 index b98bf7ed7036..000000000000 --- a/services/web/server/src/simcore_service_webserver/long_running_tasks.py +++ /dev/null @@ -1,35 +0,0 @@ -from functools import wraps - -from aiohttp import web -from models_library.utils.fastapi_encoders import jsonable_encoder -from servicelib.aiohttp.long_running_tasks._constants import ( - RQT_LONG_RUNNING_TASKS_CONTEXT_KEY, -) -from servicelib.aiohttp.long_running_tasks.server import setup -from servicelib.aiohttp.typing_extension import Handler - -from ._meta import API_VTAG -from .login.decorators import login_required -from .models import RequestContext - - -def webserver_request_context_decorator(handler: Handler): - @wraps(handler) - async def _test_task_context_decorator( - request: web.Request, - ) -> web.StreamResponse: - """this task context callback tries to get the user_id from the query if available""" - req_ctx = RequestContext.model_validate(request) - request[RQT_LONG_RUNNING_TASKS_CONTEXT_KEY] = jsonable_encoder(req_ctx) - return await handler(request) - - return _test_task_context_decorator - - -def setup_long_running_tasks(app: web.Application) -> None: - setup( - app, - router_prefix=f"/{API_VTAG}/tasks-legacy", - handler_check_decorator=login_required, - task_request_context_decorator=webserver_request_context_decorator, - ) diff --git a/services/web/server/src/simcore_service_webserver/long_running_tasks/__init__.py b/services/web/server/src/simcore_service_webserver/long_running_tasks/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/services/web/server/src/simcore_service_webserver/long_running_tasks/plugin.py b/services/web/server/src/simcore_service_webserver/long_running_tasks/plugin.py new file mode 100644 index 000000000000..959e3efe41ed --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/long_running_tasks/plugin.py @@ -0,0 +1,60 @@ +import logging +from functools import wraps + +from aiohttp import web +from models_library.utils.fastapi_encoders import jsonable_encoder +from servicelib.aiohttp.long_running_tasks._constants import ( + RQT_LONG_RUNNING_TASKS_CONTEXT_KEY, +) +from servicelib.aiohttp.long_running_tasks.server import setup +from servicelib.aiohttp.typing_extension import Handler + +from .. import rabbitmq_settings, redis +from .._meta import API_VTAG, APP_NAME +from ..application_setup import ModuleCategory, app_setup_func +from ..login.decorators import login_required +from ..models import AuthenticatedRequestContext +from ..projects.plugin import register_projects_long_running_tasks +from . import settings as long_running_tasks_settings + +_logger = logging.getLogger(__name__) + + +def _get_lrt_namespace(suffix: str) -> str: + return f"{APP_NAME}-{suffix}" + + +def webserver_request_context_decorator(handler: Handler): + @wraps(handler) + async def _test_task_context_decorator( + request: web.Request, + ) -> web.StreamResponse: + """this task context callback tries to get the user_id from the query if available""" + req_ctx = AuthenticatedRequestContext.model_validate(request) + request[RQT_LONG_RUNNING_TASKS_CONTEXT_KEY] = jsonable_encoder(req_ctx) + return await handler(request) + + return _test_task_context_decorator + + +@app_setup_func( + __name__, + ModuleCategory.ADDON, + settings_name="WEBSERVER_LONG_RUNNING_TASKS", + logger=_logger, +) +def setup_long_running_tasks(app: web.Application) -> None: + # register all long-running tasks from different modules + register_projects_long_running_tasks(app) + + settings = long_running_tasks_settings.get_plugin_settings(app) + + setup( + app, + redis_settings=redis.get_plugin_settings(app), + rabbit_settings=rabbitmq_settings.get_plugin_settings(app), + lrt_namespace=_get_lrt_namespace(settings.LONG_RUNNING_TASKS_NAMESPACE_SUFFIX), + router_prefix=f"/{API_VTAG}/tasks-legacy", + handler_check_decorator=login_required, + task_request_context_decorator=webserver_request_context_decorator, + ) diff --git a/services/web/server/src/simcore_service_webserver/long_running_tasks/settings.py b/services/web/server/src/simcore_service_webserver/long_running_tasks/settings.py new file mode 100644 index 000000000000..ac3feb588005 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/long_running_tasks/settings.py @@ -0,0 +1,26 @@ +from typing import Annotated + +from aiohttp import web +from pydantic import Field +from settings_library.base import BaseCustomSettings + +from ..constants import APP_SETTINGS_KEY + + +class LongRunningTasksSettings(BaseCustomSettings): + LONG_RUNNING_TASKS_NAMESPACE_SUFFIX: Annotated[ + str, + Field( + description=( + "suffix to distinguish between the various services based on this image " + "inside the long_running_tasks framework" + ), + ), + ] + + +def get_plugin_settings(app: web.Application) -> LongRunningTasksSettings: + settings = app[APP_SETTINGS_KEY].WEBSERVER_LONG_RUNNING_TASKS + assert settings, "setup_settings not called?" # nosec + assert isinstance(settings, LongRunningTasksSettings) # nosec + return settings diff --git a/services/web/server/src/simcore_service_webserver/models.py b/services/web/server/src/simcore_service_webserver/models.py index 0b816268baaa..cf795a6e252e 100644 --- a/services/web/server/src/simcore_service_webserver/models.py +++ b/services/web/server/src/simcore_service_webserver/models.py @@ -1,11 +1,67 @@ +from typing import Annotated, TypeAlias + +from models_library.products import ProductName from models_library.rest_base import RequestParameters from models_library.users import UserID -from pydantic import Field -from servicelib.request_keys import RQT_USERID_KEY +from pydantic import ConfigDict, Field, StringConstraints +from pydantic_extra_types.phone_numbers import PhoneNumberValidator +from servicelib.aiohttp.request_keys import RQT_USERID_KEY +from servicelib.celery.models import OwnerMetadata +from servicelib.rest_constants import X_CLIENT_SESSION_ID_HEADER +from ._meta import APP_NAME from .constants import RQ_PRODUCT_KEY +PhoneNumberStr: TypeAlias = Annotated[ + # NOTE: validator require installing `phonenumbers`` + str, + PhoneNumberValidator(number_format="E164"), +] + + +ClientSessionID: TypeAlias = Annotated[ + str, + StringConstraints( + strip_whitespace=True, + min_length=36, + max_length=36, + pattern=r"^[0-9a-fA-F\-]{36}$", # UUID format + strict=True, + ), +] + + +class AuthenticatedRequestContext(RequestParameters): + """Fields expected in the request context for authenticated endpoints""" + + user_id: Annotated[UserID, Field(alias=RQT_USERID_KEY)] + product_name: Annotated[ProductName, Field(alias=RQ_PRODUCT_KEY)] + + model_config = ConfigDict( + frozen=True # prevents modifications after middlewares creates this model + ) + + +assert X_CLIENT_SESSION_ID_HEADER + + +class ClientSessionHeaderParams(RequestParameters): + """Header parameters for client session tracking in collaborative features.""" + + client_session_id: ClientSessionID | None = Field( + default=None, + alias="X-Client-Session-Id", # X_CLIENT_SESSION_ID_HEADER, + description="Client session identifier for collaborative features (UUID string)", + ) + + model_config = ConfigDict( + validate_by_name=True, + ) + -class RequestContext(RequestParameters): - user_id: UserID = Field(..., alias=RQT_USERID_KEY) # type: ignore[literal-required] - product_name: str = Field(..., alias=RQ_PRODUCT_KEY) # type: ignore[literal-required] +class WebServerOwnerMetadata(OwnerMetadata): + user_id: UserID + product_name: ProductName + owner: Annotated[ + str, StringConstraints(pattern=rf"^{APP_NAME}$"), Field(frozen=True) + ] = APP_NAME diff --git a/services/web/server/src/simcore_service_webserver/notifications/_rabbitmq_exclusive_queue_consumers.py b/services/web/server/src/simcore_service_webserver/notifications/_rabbitmq_exclusive_queue_consumers.py index 7c3925b8c50f..1ca13228757d 100644 --- a/services/web/server/src/simcore_service_webserver/notifications/_rabbitmq_exclusive_queue_consumers.py +++ b/services/web/server/src/simcore_service_webserver/notifications/_rabbitmq_exclusive_queue_consumers.py @@ -1,10 +1,14 @@ +import asyncio import logging +from collections import defaultdict from collections.abc import AsyncIterator, Generator from typing import Final from aiohttp import web from models_library.groups import GroupID +from models_library.projects_state import RUNNING_STATE_COMPLETED_STATES from models_library.rabbitmq_messages import ( + ComputationalPipelineStatusMessage, EventRabbitMessage, LoggerRabbitMessage, ProgressRabbitMessageNode, @@ -16,16 +20,15 @@ from pydantic import TypeAdapter from servicelib.logging_utils import log_catch, log_context from servicelib.rabbitmq import RabbitMQClient -from servicelib.utils import logged_gather +from servicelib.utils import limited_gather, logged_gather -from ..projects import _projects_service -from ..projects.exceptions import ProjectNotFoundError +from ..projects import _nodes_service, _projects_service from ..rabbitmq import get_rabbitmq_client from ..socketio.messages import ( SOCKET_IO_EVENT, SOCKET_IO_LOG_EVENT, - SOCKET_IO_NODE_UPDATED_EVENT, SOCKET_IO_WALLET_OSPARC_CREDITS_UPDATED_EVENT, + send_message_to_project_room, send_message_to_standard_group, send_message_to_user, ) @@ -36,32 +39,19 @@ _logger = logging.getLogger(__name__) _APP_RABBITMQ_CONSUMERS_KEY: Final[str] = f"{__name__}.rabbit_consumers" +APP_WALLET_SUBSCRIPTIONS_KEY: Final[str] = "wallet_subscriptions" +APP_WALLET_SUBSCRIPTION_LOCK_KEY: Final[str] = "wallet_subscription_lock" -async def _convert_to_node_update_event( +async def _notify_comp_node_progress( app: web.Application, message: ProgressRabbitMessageNode -) -> SocketMessageDict | None: - try: - project = await _projects_service.get_project_for_user( - app, f"{message.project_id}", message.user_id - ) - if f"{message.node_id}" in project["workbench"]: - # update the project node progress with the latest value - project["workbench"][f"{message.node_id}"].update( - {"progress": round(message.report.percent_value * 100.0)} - ) - return SocketMessageDict( - event_type=SOCKET_IO_NODE_UPDATED_EVENT, - data={ - "project_id": message.project_id, - "node_id": message.node_id, - "data": project["workbench"][f"{message.node_id}"], - }, - ) - _logger.warning("node not found: '%s'", message.model_dump()) - except ProjectNotFoundError: - _logger.warning("project not found: '%s'", message.model_dump()) - return None +) -> None: + project = await _projects_service.get_project_for_user( + app, f"{message.project_id}", message.user_id, include_state=True + ) + await _projects_service.notify_project_node_update( + app, project, message.node_id, None + ) async def _progress_message_parser(app: web.Application, data: bytes) -> bool: @@ -75,25 +65,59 @@ async def _progress_message_parser(app: web.Application, data: bytes) -> bool: message = WebSocketProjectProgress.from_rabbit_message( rabbit_message ).to_socket_dict() - elif rabbit_message.progress_type is ProgressType.COMPUTATION_RUNNING: - message = await _convert_to_node_update_event(app, rabbit_message) - + await _notify_comp_node_progress(app, rabbit_message) else: message = WebSocketNodeProgress.from_rabbit_message( rabbit_message ).to_socket_dict() if message: - await send_message_to_user( + await send_message_to_project_room( app, - rabbit_message.user_id, + project_id=rabbit_message.project_id, message=message, - ignore_queue=True, ) return True +def _is_computational_node(node_key: str) -> bool: + return "/comp/" in node_key + + +async def _computational_pipeline_status_message_parser( + app: web.Application, data: bytes +) -> bool: + rabbit_message = ComputationalPipelineStatusMessage.model_validate_json(data) + project = await _projects_service.get_project_for_user( + app, + f"{rabbit_message.project_id}", + rabbit_message.user_id, + include_state=True, + ) + if rabbit_message.run_result in RUNNING_STATE_COMPLETED_STATES: + # the pipeline finished, the frontend needs to update all computational nodes + computational_node_ids = ( + n.node_id + for n in await _nodes_service.get_project_nodes( + app, project_uuid=project["uuid"] + ) + if _is_computational_node(n.key) + ) + await limited_gather( + *[ + _projects_service.notify_project_node_update( + app, project, n_id, errors=None + ) + for n_id in computational_node_ids + ], + limit=10, # notify 10 nodes at a time + ) + await _projects_service.notify_project_state_update(app, project) + + return True + + async def _log_message_parser(app: web.Application, data: bytes) -> bool: rabbit_message = LoggerRabbitMessage.model_validate_json(data) await send_message_to_user( @@ -103,7 +127,6 @@ async def _log_message_parser(app: web.Application, data: bytes) -> bool: event_type=SOCKET_IO_LOG_EVENT, data=rabbit_message.model_dump(exclude={"user_id", "channel_name"}), ), - ignore_queue=True, ) return True @@ -120,7 +143,6 @@ async def _events_message_parser(app: web.Application, data: bytes) -> bool: "node_id": f"{rabbit_message.node_id}", }, ), - ignore_queue=True, ) return True @@ -170,13 +192,19 @@ async def _osparc_credits_message_parser(app: web.Application, data: bytes) -> b _osparc_credits_message_parser, {"topics": []}, ), + SubcribeArgumentsTuple( + ComputationalPipelineStatusMessage.get_channel_name(), + _computational_pipeline_status_message_parser, + {"topics": []}, + ), ) async def _unsubscribe_from_rabbitmq(app) -> None: - with log_context( - _logger, logging.INFO, msg="Unsubscribing from rabbitmq channels" - ), log_catch(_logger, reraise=False): + with ( + log_context(_logger, logging.INFO, msg="Unsubscribing from rabbitmq channels"), + log_catch(_logger, reraise=False), + ): rabbit_client: RabbitMQClient = get_rabbitmq_client(app) await logged_gather( *( @@ -192,6 +220,12 @@ async def on_cleanup_ctx_rabbitmq_consumers( app[_APP_RABBITMQ_CONSUMERS_KEY] = await subscribe_to_rabbitmq( app, _EXCHANGE_TO_PARSER_CONFIG ) + + app[APP_WALLET_SUBSCRIPTIONS_KEY] = defaultdict( + int + ) # wallet_id -> subscriber count + app[APP_WALLET_SUBSCRIPTION_LOCK_KEY] = asyncio.Lock() # Ensures exclusive access + yield # cleanup diff --git a/services/web/server/src/simcore_service_webserver/notifications/plugin.py b/services/web/server/src/simcore_service_webserver/notifications/plugin.py index e251a8a356c0..f0969abbd192 100644 --- a/services/web/server/src/simcore_service_webserver/notifications/plugin.py +++ b/services/web/server/src/simcore_service_webserver/notifications/plugin.py @@ -1,12 +1,13 @@ """ - computation module is the main entry-point for computational backend +computation module is the main entry-point for computational backend """ + import logging from aiohttp import web -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from ..application_setup import ModuleCategory, app_setup_func from ..diagnostics.plugin import setup_diagnostics from ..rabbitmq import setup_rabbitmq from ..socketio.plugin import setup_socketio @@ -19,7 +20,7 @@ _logger = logging.getLogger(__name__) -@app_module_setup( +@app_setup_func( __name__, ModuleCategory.ADDON, settings_name="WEBSERVER_NOTIFICATIONS", diff --git a/services/web/server/src/simcore_service_webserver/notifications/project_logs.py b/services/web/server/src/simcore_service_webserver/notifications/project_logs.py index 5b971da7fbef..dde5c310bb9d 100644 --- a/services/web/server/src/simcore_service_webserver/notifications/project_logs.py +++ b/services/web/server/src/simcore_service_webserver/notifications/project_logs.py @@ -4,6 +4,7 @@ from aiohttp import web from models_library.projects import ProjectID from models_library.rabbitmq_messages import ( + ComputationalPipelineStatusMessage, LoggerRabbitMessage, ProgressRabbitMessageNode, ProgressRabbitMessageProject, @@ -21,6 +22,7 @@ LoggerRabbitMessage, ProgressRabbitMessageNode, ProgressRabbitMessageProject, + ComputationalPipelineStatusMessage, ] diff --git a/services/web/server/src/simcore_service_webserver/notifications/wallet_osparc_credits.py b/services/web/server/src/simcore_service_webserver/notifications/wallet_osparc_credits.py index f66293bf3775..2f314bd93330 100644 --- a/services/web/server/src/simcore_service_webserver/notifications/wallet_osparc_credits.py +++ b/services/web/server/src/simcore_service_webserver/notifications/wallet_osparc_credits.py @@ -7,28 +7,37 @@ from servicelib.rabbitmq import RabbitMQClient from ..rabbitmq import get_rabbitmq_client +from ._rabbitmq_exclusive_queue_consumers import ( + APP_WALLET_SUBSCRIPTION_LOCK_KEY, + APP_WALLET_SUBSCRIPTIONS_KEY, +) _logger = logging.getLogger(__name__) -_SUBSCRIBABLE_EXCHANGES = [ - WalletCreditsMessage, -] - - async def subscribe(app: web.Application, wallet_id: WalletID) -> None: - rabbit_client: RabbitMQClient = get_rabbitmq_client(app) - for exchange in _SUBSCRIBABLE_EXCHANGES: - exchange_name = exchange.get_channel_name() - await rabbit_client.add_topics(exchange_name, topics=[f"{wallet_id}"]) + async with app[APP_WALLET_SUBSCRIPTION_LOCK_KEY]: + counter = app[APP_WALLET_SUBSCRIPTIONS_KEY][wallet_id] + app[APP_WALLET_SUBSCRIPTIONS_KEY][wallet_id] += 1 + + if counter == 0: # First subscriber + rabbit_client: RabbitMQClient = get_rabbitmq_client(app) + await rabbit_client.add_topics( + WalletCreditsMessage.get_channel_name(), topics=[f"{wallet_id}"] + ) async def unsubscribe(app: web.Application, wallet_id: WalletID) -> None: - rabbit_client: RabbitMQClient = get_rabbitmq_client(app) - for exchange in _SUBSCRIBABLE_EXCHANGES: - exchange_name = exchange.get_channel_name() - with log_catch(_logger, reraise=False): - # NOTE: in case something bad happenned with the connection to the RabbitMQ server - # such as a network disconnection. this call can fail. - await rabbit_client.remove_topics(exchange_name, topics=[f"{wallet_id}"]) + + async with app[APP_WALLET_SUBSCRIPTION_LOCK_KEY]: + counter = app[APP_WALLET_SUBSCRIPTIONS_KEY].get(wallet_id, 0) + if counter > 0: + app[APP_WALLET_SUBSCRIPTIONS_KEY][wallet_id] -= 1 + + if counter == 1: # Last subscriber + rabbit_client: RabbitMQClient = get_rabbitmq_client(app) + with log_catch(_logger, reraise=False): + await rabbit_client.remove_topics( + WalletCreditsMessage.get_channel_name(), topics=[f"{wallet_id}"] + ) diff --git a/services/web/server/src/simcore_service_webserver/payments/_autorecharge_db.py b/services/web/server/src/simcore_service_webserver/payments/_autorecharge_db.py index 813fa6b9eb12..b3ac90985f2e 100644 --- a/services/web/server/src/simcore_service_webserver/payments/_autorecharge_db.py +++ b/services/web/server/src/simcore_service_webserver/payments/_autorecharge_db.py @@ -9,7 +9,7 @@ from pydantic import BaseModel, ConfigDict, PositiveInt from simcore_postgres_database.utils_payments_autorecharge import AutoRechargeStmts -from ..db.plugin import get_database_engine +from ..db.plugin import get_database_engine_legacy from .errors import InvalidPaymentMethodError _logger = logging.getLogger(__name__) @@ -32,7 +32,7 @@ async def get_wallet_autorecharge( *, wallet_id: WalletID, ) -> PaymentsAutorechargeDB | None: - async with get_database_engine(app).acquire() as conn: + async with get_database_engine_legacy(app).acquire() as conn: stmt = AutoRechargeStmts.get_wallet_autorecharge(wallet_id) result = await conn.execute(stmt) row = await result.first() @@ -51,7 +51,7 @@ async def replace_wallet_autorecharge( InvalidPaymentMethodError: if `new` includes some invalid 'primary_payment_method_id' """ - async with get_database_engine(app).acquire() as conn: + async with get_database_engine_legacy(app).acquire() as conn: stmt = AutoRechargeStmts.is_valid_payment_method( user_id=user_id, wallet_id=new.wallet_id, diff --git a/services/web/server/src/simcore_service_webserver/payments/_methods_api.py b/services/web/server/src/simcore_service_webserver/payments/_methods_api.py index d19313d5bcc4..a5fefc9ebd2a 100644 --- a/services/web/server/src/simcore_service_webserver/payments/_methods_api.py +++ b/services/web/server/src/simcore_service_webserver/payments/_methods_api.py @@ -20,7 +20,7 @@ from simcore_postgres_database.models.payments_methods import InitPromptAckFlowState from yarl import URL -from ..users.api import get_user_display_and_id_names +from ..users import users_service from ..wallets.api import get_wallet_by_user from . import _rpc from ._autorecharge_db import get_wallet_autorecharge @@ -278,7 +278,7 @@ async def init_creation_of_wallet_payment_method( ) assert user_wallet.wallet_id == wallet_id # nosec - user = await get_user_display_and_id_names(app, user_id=user_id) + user = await users_service.get_user_display_and_id_names(app, user_id=user_id) return await _rpc.init_creation_of_payment_method( app, wallet_id=wallet_id, diff --git a/services/web/server/src/simcore_service_webserver/payments/_methods_db.py b/services/web/server/src/simcore_service_webserver/payments/_methods_db.py index 135eaf41a9e2..745c0e7caac2 100644 --- a/services/web/server/src/simcore_service_webserver/payments/_methods_db.py +++ b/services/web/server/src/simcore_service_webserver/payments/_methods_db.py @@ -16,7 +16,7 @@ from sqlalchemy import literal_column from sqlalchemy.sql import func -from ..db.plugin import get_database_engine +from ..db.plugin import get_database_engine_legacy from .errors import ( PaymentMethodAlreadyAckedError, PaymentMethodNotFoundError, @@ -46,7 +46,7 @@ async def insert_init_payment_method( wallet_id: WalletID, initiated_at: datetime.datetime, ) -> None: - async with get_database_engine(app).acquire() as conn: + async with get_database_engine_legacy(app).acquire() as conn: try: await conn.execute( payments_methods.insert().values( @@ -68,7 +68,7 @@ async def list_successful_payment_methods( user_id: UserID, wallet_id: WalletID, ) -> list[PaymentsMethodsDB]: - async with get_database_engine(app).acquire() as conn: + async with get_database_engine_legacy(app).acquire() as conn: result: ResultProxy = await conn.execute( payments_methods.select() .where( @@ -89,7 +89,7 @@ async def get_successful_payment_method( wallet_id: WalletID, payment_method_id: PaymentMethodID, ) -> PaymentsMethodsDB: - async with get_database_engine(app).acquire() as conn: + async with get_database_engine_legacy(app).acquire() as conn: result: ResultProxy = await conn.execute( payments_methods.select().where( (payments_methods.c.user_id == user_id) @@ -108,7 +108,7 @@ async def get_successful_payment_method( async def get_pending_payment_methods_ids( app: web.Application, ) -> list[PaymentMethodID]: - async with get_database_engine(app).acquire() as conn: + async with get_database_engine_legacy(app).acquire() as conn: result = await conn.execute( sa.select(payments_methods.c.payment_method_id) .where(payments_methods.c.completed_at.is_(None)) @@ -142,7 +142,7 @@ async def udpate_payment_method( if state_message: optional["state_message"] = state_message - async with get_database_engine(app).acquire() as conn, conn.begin(): + async with get_database_engine_legacy(app).acquire() as conn, conn.begin(): row = await ( await conn.execute( sa.select( @@ -179,7 +179,7 @@ async def delete_payment_method( wallet_id: WalletID, payment_method_id: PaymentMethodID, ): - async with get_database_engine(app).acquire() as conn: + async with get_database_engine_legacy(app).acquire() as conn: await conn.execute( payments_methods.delete().where( (payments_methods.c.user_id == user_id) diff --git a/services/web/server/src/simcore_service_webserver/payments/_onetime_api.py b/services/web/server/src/simcore_service_webserver/payments/_onetime_api.py index 488189a81a58..6d427cfc023a 100644 --- a/services/web/server/src/simcore_service_webserver/payments/_onetime_api.py +++ b/services/web/server/src/simcore_service_webserver/payments/_onetime_api.py @@ -5,6 +5,7 @@ import arrow from aiohttp import web +from common_library.user_messages import user_message from models_library.api_schemas_webserver.wallets import ( PaymentID, PaymentMethodID, @@ -23,10 +24,10 @@ from simcore_postgres_database.utils_payments import insert_init_payment_transaction from yarl import URL -from ..db.plugin import get_database_engine +from ..db.plugin import get_database_engine_legacy from ..products import products_service from ..resource_usage.service import add_credits_to_wallet -from ..users.api import get_user_display_and_id_names, get_user_invoice_address +from ..users import users_service from ..wallets.api import get_wallet_by_user, get_wallet_with_permissions_by_user from ..wallets.errors import WalletAccessForbiddenError from . import _onetime_db, _rpc @@ -36,7 +37,11 @@ _logger = logging.getLogger(__name__) -MSG_WALLET_NO_ACCESS_ERROR = "User {user_id} does not have necessary permissions to do a payment into wallet {wallet_id}" +MSG_WALLET_NO_ACCESS_ERROR = user_message( + "You do not have the necessary permissions to make payments to wallet {wallet_id}.", + _version=1, +) + _FAKE_PAYMENT_TRANSACTION_ID_PREFIX = "fpt" @@ -76,7 +81,6 @@ async def _fake_init_payment( user_email, comment, ): - # (1) Init payment payment_id = f"{_FAKE_PAYMENT_TRANSACTION_ID_PREFIX}_{uuid4()}" # get_form_payment_url settings: PaymentsSettings = get_plugin_settings(app) @@ -86,7 +90,7 @@ async def _fake_init_payment( .with_query(id=payment_id) ) # (2) Annotate INIT transaction - async with get_database_engine(app).acquire() as conn: + async with get_database_engine_legacy(app).acquire() as conn: await insert_init_payment_transaction( conn, payment_id=payment_id, @@ -258,7 +262,7 @@ async def raise_for_wallet_payments_permissions( ) if not permissions.read or not permissions.write: raise WalletAccessForbiddenError( - reason=MSG_WALLET_NO_ACCESS_ERROR.format( + details=MSG_WALLET_NO_ACCESS_ERROR.format( user_id=user_id, wallet_id=wallet_id ) ) @@ -292,8 +296,10 @@ async def init_creation_of_wallet_payment( assert user_wallet.wallet_id == wallet_id # nosec # user info - user = await get_user_display_and_id_names(app, user_id=user_id) - user_invoice_address = await get_user_invoice_address(app, user_id=user_id) + user = await users_service.get_user_display_and_id_names(app, user_id=user_id) + user_invoice_address = await users_service.get_user_invoice_address( + app, user_id=user_id, product_name=product_name + ) # stripe info product_stripe_info = await products_service.get_product_stripe_info( @@ -385,8 +391,10 @@ async def pay_with_payment_method( ) # user info - user = await get_user_display_and_id_names(app, user_id=user_id) - user_invoice_address = await get_user_invoice_address(app, user_id=user_id) + user_info = await users_service.get_user_display_and_id_names(app, user_id=user_id) + user_invoice_address = await users_service.get_user_invoice_address( + app, user_id=user_id, product_name=product_name + ) settings: PaymentsSettings = get_plugin_settings(app) if settings.PAYMENTS_FAKE_COMPLETION: @@ -399,8 +407,8 @@ async def pay_with_payment_method( wallet_id=wallet_id, wallet_name=user_wallet.name, user_id=user_id, - user_name=user.full_name, - user_email=user.email, + user_name=user_info.full_name, + user_email=user_info.email, comment=comment, ) @@ -415,8 +423,8 @@ async def pay_with_payment_method( wallet_id=wallet_id, wallet_name=user_wallet.name, user_id=user_id, - user_name=user.full_name, - user_email=user.email, + user_name=user_info.full_name, + user_email=user_info.email, user_address=user_invoice_address, stripe_price_id=product_stripe_info.stripe_price_id, stripe_tax_rate_id=product_stripe_info.stripe_tax_rate_id, diff --git a/services/web/server/src/simcore_service_webserver/payments/_onetime_db.py b/services/web/server/src/simcore_service_webserver/payments/_onetime_db.py index d6146cd0f813..d6014509461f 100644 --- a/services/web/server/src/simcore_service_webserver/payments/_onetime_db.py +++ b/services/web/server/src/simcore_service_webserver/payments/_onetime_db.py @@ -21,7 +21,7 @@ update_payment_transaction_state, ) -from ..db.plugin import get_database_engine +from ..db.plugin import get_database_engine_legacy from .errors import PaymentCompletedError, PaymentNotFoundError _logger = logging.getLogger(__name__) @@ -58,7 +58,7 @@ async def list_user_payment_transactions( Sorted by newest-first """ - async with get_database_engine(app).acquire() as conn: + async with get_database_engine_legacy(app).acquire() as conn: total_number_of_items, rows = await get_user_payments_transactions( conn, user_id=user_id, offset=offset, limit=limit ) @@ -67,7 +67,7 @@ async def list_user_payment_transactions( async def get_pending_payment_transactions_ids(app: web.Application) -> list[PaymentID]: - async with get_database_engine(app).acquire() as conn: + async with get_database_engine_legacy(app).acquire() as conn: result = await conn.execute( sa.select(payments_transactions.c.payment_id) .where(payments_transactions.c.completed_at == None) # noqa: E711 @@ -95,7 +95,7 @@ async def complete_payment_transaction( if invoice_url: optional_kwargs["invoice_url"] = invoice_url - async with get_database_engine(app).acquire() as conn: + async with get_database_engine_legacy(app).acquire() as conn: row = await update_payment_transaction_state( conn, payment_id=payment_id, diff --git a/services/web/server/src/simcore_service_webserver/payments/_rpc_invoice.py b/services/web/server/src/simcore_service_webserver/payments/_rpc_invoice.py index d799d04fe6f7..dd0a84abe01e 100644 --- a/services/web/server/src/simcore_service_webserver/payments/_rpc_invoice.py +++ b/services/web/server/src/simcore_service_webserver/payments/_rpc_invoice.py @@ -1,7 +1,6 @@ from decimal import Decimal from aiohttp import web -from models_library.api_schemas_webserver import WEBSERVER_RPC_NAMESPACE from models_library.emails import LowerCaseEmailStr from models_library.payments import InvoiceDataGet, UserInvoiceAddress from models_library.products import ProductName @@ -10,8 +9,8 @@ from ..products import products_service from ..products.models import CreditResult -from ..rabbitmq import get_rabbitmq_rpc_server -from ..users.api import get_user_display_and_id_names, get_user_invoice_address +from ..rabbitmq import create_register_rpc_routes_on_startup +from ..users import users_service router = RPCRouter() @@ -30,10 +29,12 @@ async def get_invoice_data( product_stripe_info = await products_service.get_product_stripe_info( app, product_name=product_name ) - user_invoice_address: UserInvoiceAddress = await get_user_invoice_address( - app, user_id=user_id + user_invoice_address: UserInvoiceAddress = ( + await users_service.get_user_invoice_address( + app, product_name=product_name, user_id=user_id + ) ) - user_info = await get_user_display_and_id_names(app, user_id=user_id) + user_info = await users_service.get_user_display_and_id_names(app, user_id=user_id) return InvoiceDataGet( credit_amount=credit_result.credit_amount, @@ -45,6 +46,4 @@ async def get_invoice_data( ) -async def register_rpc_routes_on_startup(app: web.Application): - rpc_server = get_rabbitmq_rpc_server(app) - await rpc_server.register_router(router, WEBSERVER_RPC_NAMESPACE, app) +register_rpc_routes_on_startup = create_register_rpc_routes_on_startup(router) diff --git a/services/web/server/src/simcore_service_webserver/payments/_socketio.py b/services/web/server/src/simcore_service_webserver/payments/_socketio.py index 01bf0ec3268f..9d475c6e777c 100644 --- a/services/web/server/src/simcore_service_webserver/payments/_socketio.py +++ b/services/web/server/src/simcore_service_webserver/payments/_socketio.py @@ -29,7 +29,6 @@ async def notify_payment_completed( event_type=SOCKET_IO_PAYMENT_COMPLETED_EVENT, data=jsonable_encoder(payment, by_alias=True), ), - ignore_queue=True, ) @@ -46,5 +45,4 @@ async def notify_payment_method_acked( event_type=SOCKET_IO_PAYMENT_METHOD_ACKED_EVENT, data=jsonable_encoder(payment_method_transaction, by_alias=True), ), - ignore_queue=True, ) diff --git a/services/web/server/src/simcore_service_webserver/payments/_tasks.py b/services/web/server/src/simcore_service_webserver/payments/_tasks.py index b87465f5f3e4..5e6cd74c05b6 100644 --- a/services/web/server/src/simcore_service_webserver/payments/_tasks.py +++ b/services/web/server/src/simcore_service_webserver/payments/_tasks.py @@ -5,6 +5,7 @@ from typing import Any from aiohttp import web +from common_library.async_tools import cancel_wait_task from models_library.api_schemas_webserver.wallets import PaymentID, PaymentMethodID from pydantic import HttpUrl, TypeAdapter from servicelib.aiohttp.typing_extension import CleanupContextFunc @@ -143,10 +144,6 @@ async def _cleanup_ctx_fun( yield # tear-down - task.cancel() - try: - await task - except asyncio.CancelledError: - assert task.cancelled() # nosec + await cancel_wait_task(task) return _cleanup_ctx_fun diff --git a/services/web/server/src/simcore_service_webserver/payments/plugin.py b/services/web/server/src/simcore_service_webserver/payments/plugin.py index 3e8bbecc56ec..ab5384ee5ce0 100644 --- a/services/web/server/src/simcore_service_webserver/payments/plugin.py +++ b/services/web/server/src/simcore_service_webserver/payments/plugin.py @@ -1,12 +1,12 @@ """ - Plugin to interact with the 'payments' service +Plugin to interact with the 'payments' service """ import logging from aiohttp import web -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from ..application_setup import ModuleCategory, app_setup_func from ..constants import APP_SETTINGS_KEY from ..db.plugin import setup_db from ..products.plugin import setup_products @@ -18,7 +18,7 @@ _logger = logging.getLogger(__name__) -@app_module_setup( +@app_setup_func( __name__, ModuleCategory.ADDON, settings_name="WEBSERVER_PAYMENTS", @@ -26,6 +26,7 @@ ) def setup_payments(app: web.Application): settings = app[APP_SETTINGS_KEY].WEBSERVER_PAYMENTS + assert settings is not None # nosec setup_db(app) setup_products(app) diff --git a/services/web/server/src/simcore_service_webserver/products/_controller/rest.py b/services/web/server/src/simcore_service_webserver/products/_controller/rest.py index 77c72afe3b0c..f82b1f4b1ace 100644 --- a/services/web/server/src/simcore_service_webserver/products/_controller/rest.py +++ b/services/web/server/src/simcore_service_webserver/products/_controller/rest.py @@ -10,7 +10,10 @@ from ..._meta import API_VTAG as VTAG from ...login.decorators import login_required -from ...security.decorators import permission_required +from ...security.decorators import ( + group_or_role_permission_required, + permission_required, +) from ...utils_aiohttp import envelope_json_response from .. import _service, products_web from .._repository import ProductRepository @@ -46,7 +49,7 @@ async def _get_current_product_price(request: web.Request): @routes.get(f"/{VTAG}/products/{{product_name}}", name="get_product") @login_required -@permission_required("product.details.*") +@group_or_role_permission_required("product.details.*") @handle_rest_requests_exceptions async def _get_product(request: web.Request): req_ctx = ProductsRequestContext.model_validate(request) diff --git a/services/web/server/src/simcore_service_webserver/products/_controller/rest_exceptions.py b/services/web/server/src/simcore_service_webserver/products/_controller/rest_exceptions.py index a9e8cb13f00c..78635508bd95 100644 --- a/services/web/server/src/simcore_service_webserver/products/_controller/rest_exceptions.py +++ b/services/web/server/src/simcore_service_webserver/products/_controller/rest_exceptions.py @@ -1,3 +1,4 @@ +from common_library.user_messages import user_message from servicelib.aiohttp import status from ...constants import MSG_TRY_AGAIN_OR_SUPPORT @@ -12,11 +13,17 @@ _TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = { ProductNotFoundError: HttpErrorInfo( status.HTTP_404_NOT_FOUND, - "{product_name} was not found", + user_message( + "This product could not be found." + MSG_TRY_AGAIN_OR_SUPPORT, _version=1 + ), ), MissingStripeConfigError: HttpErrorInfo( status.HTTP_503_SERVICE_UNAVAILABLE, - "{product_name} service is currently unavailable." + MSG_TRY_AGAIN_OR_SUPPORT, + user_message( + "This service is temporarily unavailable due to a configuration issue. " + + MSG_TRY_AGAIN_OR_SUPPORT, + _version=1, + ), ), } diff --git a/services/web/server/src/simcore_service_webserver/products/_controller/rest_schemas.py b/services/web/server/src/simcore_service_webserver/products/_controller/rest_schemas.py index 6a4ac2100b17..3ab7f66287d4 100644 --- a/services/web/server/src/simcore_service_webserver/products/_controller/rest_schemas.py +++ b/services/web/server/src/simcore_service_webserver/products/_controller/rest_schemas.py @@ -7,7 +7,7 @@ from models_library.rest_base import RequestParameters, StrictRequestParameters from models_library.users import UserID from pydantic import Field -from servicelib.request_keys import RQT_USERID_KEY +from servicelib.aiohttp.request_keys import RQT_USERID_KEY from ...constants import RQ_PRODUCT_KEY diff --git a/services/web/server/src/simcore_service_webserver/products/_controller/rpc.py b/services/web/server/src/simcore_service_webserver/products/_controller/rpc.py index 852cf2e4f8c0..a8fbfb15a1a2 100644 --- a/services/web/server/src/simcore_service_webserver/products/_controller/rpc.py +++ b/services/web/server/src/simcore_service_webserver/products/_controller/rpc.py @@ -1,13 +1,12 @@ from decimal import Decimal from aiohttp import web -from models_library.api_schemas_webserver import WEBSERVER_RPC_NAMESPACE from models_library.api_schemas_webserver.products import CreditResultRpcGet from models_library.products import ProductName from servicelib.rabbitmq import RPCRouter from ...constants import APP_SETTINGS_KEY -from ...rabbitmq import get_rabbitmq_rpc_server, setup_rabbitmq +from ...rabbitmq import create_register_rpc_routes_on_startup, setup_rabbitmq from .. import _service from .._models import CreditResult @@ -27,9 +26,7 @@ async def get_credit_amount( return CreditResultRpcGet.model_validate(credit_result, from_attributes=True) -async def _register_rpc_routes_on_startup(app: web.Application): - rpc_server = get_rabbitmq_rpc_server(app) - await rpc_server.register_router(router, WEBSERVER_RPC_NAMESPACE, app) +_register_rpc_routes_on_startup = create_register_rpc_routes_on_startup(router) def setup_rpc(app: web.Application): diff --git a/services/web/server/src/simcore_service_webserver/products/_models.py b/services/web/server/src/simcore_service_webserver/products/_models.py index 06e80d7b83a2..e28c20eb2b68 100644 --- a/services/web/server/src/simcore_service_webserver/products/_models.py +++ b/services/web/server/src/simcore_service_webserver/products/_models.py @@ -12,7 +12,6 @@ from models_library.basic_types import NonNegativeDecimal from models_library.emails import LowerCaseEmailStr from models_library.products import ProductName, StripePriceID, StripeTaxRateID -from models_library.utils.change_case import snake_to_camel from pydantic import ( BaseModel, BeforeValidator, @@ -140,6 +139,17 @@ class Product(BaseModel): group_id: Annotated[ int | None, Field(description="Groups associated to this product") ] = None + support_standard_group_id: Annotated[ + int | None, Field(description="Support standard group ID, None if disabled") + ] = None + support_assigned_fogbugz_person_id: Annotated[ + int | None, + Field(description="Support assigned Fogbugz person ID, None if disabled"), + ] = None + support_assigned_fogbugz_project_id: Annotated[ + int | None, + Field(description="Support assigned Fogbugz project ID, None if disabled"), + ] = None is_payment_enabled: Annotated[ bool, @@ -245,7 +255,6 @@ def _update_json_schema_extra(schema: JsonDict) -> None: "ui": { "logo_url": "https://acme.com/logo", "strong_color": "#123456", - "project_alias": "study", }, }, "issues": [ @@ -285,6 +294,9 @@ def _update_json_schema_extra(schema: JsonDict) -> None: "LOGIN_2FA_REQUIRED": False, }, "group_id": 12345, + "support_standard_group_id": 67890, + "support_assigned_fogbugz_person_id": 112, + "support_assigned_fogbugz_project_id": 72, "is_payment_enabled": False, }, ] @@ -292,13 +304,13 @@ def _update_json_schema_extra(schema: JsonDict) -> None: ) model_config = ConfigDict( - alias_generator=snake_to_camel, - populate_by_name=True, - str_strip_whitespace=True, - frozen=True, + # NOTE: do not add aliases. Use ProductGet schema for rest API from_attributes=True, - extra="ignore", + frozen=True, json_schema_extra=_update_json_schema_extra, + str_strip_whitespace=True, + validate_by_name=True, + extra="ignore", ) def to_statics(self) -> dict[str, Any]: @@ -320,16 +332,16 @@ def to_statics(self) -> dict[str, Any]: "support": True, "is_payment_enabled": True, "is_dynamic_services_telemetry_enabled": True, + "support_standard_group_id": True, }, exclude_none=True, exclude_unset=True, - by_alias=True, ) def get_template_name_for(self, filename: str) -> str | None: """Checks for field marked with 'x_template_name' that fits the argument""" template_name = filename.removesuffix(".jinja2") - for name, field in self.model_fields.items(): + for name, field in self.__class__.model_fields.items(): if ( field.json_schema_extra and field.json_schema_extra.get("x_template_name") == template_name # type: ignore[union-attr] diff --git a/services/web/server/src/simcore_service_webserver/products/_repository.py b/services/web/server/src/simcore_service_webserver/products/_repository.py index 197e7781aca9..64bf33af45e7 100644 --- a/services/web/server/src/simcore_service_webserver/products/_repository.py +++ b/services/web/server/src/simcore_service_webserver/products/_repository.py @@ -22,7 +22,6 @@ pass_or_acquire_connection, transaction_context, ) -from simcore_service_webserver.constants import FRONTEND_APPS_AVAILABLE from sqlalchemy.engine import Row from sqlalchemy.ext.asyncio import AsyncConnection @@ -54,6 +53,9 @@ products.c.registration_email_template, products.c.max_open_studies_per_user, products.c.group_id, + products.c.support_standard_group_id, + products.c.support_assigned_fogbugz_person_id, + products.c.support_assigned_fogbugz_project_id, ] assert {column.name for column in _PRODUCTS_COLUMNS}.issubset( # nosec @@ -61,7 +63,7 @@ ) -def _to_domain(products_row: Row, payments: PaymentFields) -> Product: +def _db_to_domain(products_row: Row, payments: PaymentFields) -> Product: return Product( **products_row._asdict(), is_payment_enabled=payments.enabled, @@ -113,7 +115,7 @@ async def list_products( async for row in rows: name = row.name payments = await _get_product_payment_fields(conn, product_name=name) - app_products.append(_to_domain(row, payments)) + app_products.append(_db_to_domain(row, payments)) assert name in FRONTEND_APPS_AVAILABLE # nosec @@ -140,7 +142,7 @@ async def get_product( payments = await _get_product_payment_fields( conn, product_name=row.name ) - return _to_domain(row, payments) + return _db_to_domain(row, payments) return None async def get_default_product_name( diff --git a/services/web/server/src/simcore_service_webserver/products/_service.py b/services/web/server/src/simcore_service_webserver/products/_service.py index 234198e08e72..4a894770d716 100644 --- a/services/web/server/src/simcore_service_webserver/products/_service.py +++ b/services/web/server/src/simcore_service_webserver/products/_service.py @@ -95,7 +95,7 @@ async def get_credit_amount( if price_info is None or not price_info.usd_per_credit: # '0 or None' should raise raise ProductPriceNotDefinedError( - reason=f"Product {product_name} usd_per_credit is either not defined or zero" + details=f"Product {product_name} usd_per_credit is either not defined or zero" ) if dollar_amount < price_info.min_payment_amount_usd: diff --git a/services/web/server/src/simcore_service_webserver/products/_web_helpers.py b/services/web/server/src/simcore_service_webserver/products/_web_helpers.py index 859793d9e0a8..b6f5f15bfbe6 100644 --- a/services/web/server/src/simcore_service_webserver/products/_web_helpers.py +++ b/services/web/server/src/simcore_service_webserver/products/_web_helpers.py @@ -4,17 +4,19 @@ import aiofiles from aiohttp import web from models_library.products import ProductName +from models_library.users import UserID from simcore_postgres_database.utils_products_prices import ProductPriceInfo -from simcore_service_webserver.products.errors import ( - FileTemplateNotFoundError, - ProductNotFoundError, - UnknownProductError, -) from .._resources import webserver_resources from ..constants import RQ_PRODUCT_KEY +from ..groups import api as groups_service from . import _service from ._web_events import APP_PRODUCTS_TEMPLATES_DIR_KEY +from .errors import ( + FileTemplateNotFoundError, + ProductNotFoundError, + UnknownProductError, +) from .models import Product @@ -24,8 +26,7 @@ def get_product_name(request: web.Request) -> str: try: product_name: str = request[RQ_PRODUCT_KEY] except KeyError as exc: - error = UnknownProductError() - error.add_note("TIP: Check products middleware") + error = UnknownProductError(tip="Check products middleware") raise error from exc return product_name @@ -39,6 +40,22 @@ def get_current_product(request: web.Request) -> Product: return current_product +async def is_user_in_product_support_group( + request: web.Request, *, user_id: UserID +) -> bool: + """Checks if the user belongs to the support group of the given product. + If the product does not have a support group, returns False. + """ + product = get_current_product(request) + if product.support_standard_group_id is None: + return False + return await groups_service.is_user_in_group( + app=request.app, + user_id=user_id, + group_id=product.support_standard_group_id, + ) + + def _get_current_product_or_none(request: web.Request) -> Product | None: with contextlib.suppress(ProductNotFoundError, UnknownProductError): product: Product = get_current_product(request) diff --git a/services/web/server/src/simcore_service_webserver/products/errors.py b/services/web/server/src/simcore_service_webserver/products/errors.py index 3b0da3564f51..6b5bff54a4d2 100644 --- a/services/web/server/src/simcore_service_webserver/products/errors.py +++ b/services/web/server/src/simcore_service_webserver/products/errors.py @@ -13,7 +13,7 @@ class ProductNotFoundError(ProductError): class ProductPriceNotDefinedError(ProductError): - msg_template = "Product price not defined. {reason}" + msg_template = "Product price not defined: {details}" class BelowMinimumPaymentError(ProductError): diff --git a/services/web/server/src/simcore_service_webserver/products/plugin.py b/services/web/server/src/simcore_service_webserver/products/plugin.py index 5aea6edcf7e2..c7f5d84b0fa2 100644 --- a/services/web/server/src/simcore_service_webserver/products/plugin.py +++ b/services/web/server/src/simcore_service_webserver/products/plugin.py @@ -11,33 +11,46 @@ import logging from aiohttp import web -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup + +from ..application_setup import ( + ModuleCategory, + app_setup_func, + ensure_single_setup, +) _logger = logging.getLogger(__name__) -@app_module_setup( - __name__, - ModuleCategory.ADDON, - depends=["simcore_service_webserver.db"], - settings_name="WEBSERVER_PRODUCTS", - logger=_logger, -) -def setup_products(app: web.Application): +@ensure_single_setup(f"{__name__}.without_rpc", logger=_logger) +def setup_products_without_rpc(app: web.Application): # # NOTE: internal import speeds up booting app # specially if this plugin is not set up to be loaded # from ..constants import APP_SETTINGS_KEY from . import _web_events, _web_middlewares - from ._controller import rest, rpc + from ._controller import rest assert app[APP_SETTINGS_KEY].WEBSERVER_PRODUCTS is True # nosec + # rest API app.middlewares.append(_web_middlewares.discover_product_middleware) - app.router.add_routes(rest.routes) - rpc.setup_rpc(app) - _web_events.setup_web_events(app) + + +@app_setup_func( + __name__, + ModuleCategory.ADDON, + depends=["simcore_service_webserver.db"], + settings_name="WEBSERVER_PRODUCTS", + logger=_logger, +) +def setup_products(app: web.Application): + from ._controller import rpc + + setup_products_without_rpc(app) + + # rpc API (optional) + rpc.setup_rpc(app) diff --git a/services/web/server/src/simcore_service_webserver/products/products_service.py b/services/web/server/src/simcore_service_webserver/products/products_service.py index 2e888de625a9..1fbc880d7e95 100644 --- a/services/web/server/src/simcore_service_webserver/products/products_service.py +++ b/services/web/server/src/simcore_service_webserver/products/products_service.py @@ -13,9 +13,9 @@ "get_product", "get_product_stripe_info", "get_product_ui", + "is_product_billable", "list_products", "list_products_names", - "is_product_billable", ) # nopycln: file diff --git a/services/web/server/src/simcore_service_webserver/products/products_web.py b/services/web/server/src/simcore_service_webserver/products/products_web.py index 38ddb1634ece..e22c97407dc4 100644 --- a/services/web/server/src/simcore_service_webserver/products/products_web.py +++ b/services/web/server/src/simcore_service_webserver/products/products_web.py @@ -3,6 +3,7 @@ get_current_product_credit_price_info, get_product_name, get_product_template_path, + is_user_in_product_support_group, ) __all__: tuple[str, ...] = ( @@ -10,5 +11,6 @@ "get_current_product_credit_price_info", "get_product_name", "get_product_template_path", + "is_user_in_product_support_group", ) # nopycln: file diff --git a/services/web/server/src/simcore_service_webserver/projects/_access_rights_service.py b/services/web/server/src/simcore_service_webserver/projects/_access_rights_service.py index 7007dc72e333..ddc790d5ece1 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_access_rights_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_access_rights_service.py @@ -3,7 +3,7 @@ from models_library.projects import ProjectID from models_library.users import UserID -from ..db.plugin import get_database_engine +from ..db.plugin import get_database_engine_legacy from ..workspaces.api import get_workspace from ._access_rights_repository import get_project_owner from ._projects_repository_legacy import APP_PROJECT_DBAPI, ProjectDBAPI @@ -20,7 +20,9 @@ async def validate_project_ownership( ProjectInvalidRightsError: if 'user_id' does not own 'project_uuid' """ if ( - await get_project_owner(get_database_engine(app), project_uuid=project_uuid) + await get_project_owner( + get_database_engine_legacy(app), project_uuid=project_uuid + ) != user_id ): raise ProjectInvalidRightsError(user_id=user_id, project_uuid=project_uuid) diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/_rest_exceptions.py b/services/web/server/src/simcore_service_webserver/projects/_controller/_rest_exceptions.py index f986af6a1f89..60f24a3c6f69 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/_rest_exceptions.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/_rest_exceptions.py @@ -2,6 +2,7 @@ import logging from collections import Counter +from common_library.user_messages import user_message from servicelib.aiohttp import status from servicelib.rabbitmq.rpc_interfaces.catalog.errors import ( CatalogForbiddenError, @@ -9,6 +10,7 @@ CatalogNotAvailableError, ) +from ...catalog._controller_rest_exceptions import catalog_exceptions_handlers_map from ...conversations.errors import ( ConversationErrorNotFoundError, ConversationMessageErrorNotFoundError, @@ -19,6 +21,7 @@ exception_handling_decorator, to_exceptions_handlers_map, ) +from ...exception_handling._base import ExceptionHandlersMap from ...folders.errors import FolderAccessForbiddenError, FolderNotFoundError from ...resource_usage.errors import DefaultPricingPlanNotFoundError from ...users.exceptions import UserDefaultWalletNotFoundError @@ -41,6 +44,7 @@ ProjectOwnerNotFoundInTheProjectAccessRightsError, ProjectStartsTooManyDynamicNodesError, ProjectTooManyProjectOpenedError, + ProjectTooManyUserSessionsError, ProjectTypeAndTemplateIncompatibilityError, ProjectWalletPendingTransactionError, WrongTagIdsInQueryError, @@ -52,11 +56,11 @@ _FOLDER_ERRORS: ExceptionToHttpErrorMap = { FolderAccessForbiddenError: HttpErrorInfo( status.HTTP_403_FORBIDDEN, - "Access to folder forbidden", + user_message("Access to this folder is forbidden.", _version=1), ), FolderNotFoundError: HttpErrorInfo( status.HTTP_404_NOT_FOUND, - "Folder not found: {reason}", + user_message("The requested folder could not be found", _version=1), ), } @@ -64,15 +68,19 @@ _NODE_ERRORS: ExceptionToHttpErrorMap = { NodeNotFoundError: HttpErrorInfo( status.HTTP_404_NOT_FOUND, - "Node '{node_uuid}' not found in project '{project_uuid}'", + user_message( + "Node '{node_uuid}' was not found in project '{project_uuid}'.", _version=1 + ), ), ParentNodeNotFoundError: HttpErrorInfo( status.HTTP_404_NOT_FOUND, - "Parent node '{node_uuid}' not found", + user_message("Parent node '{node_uuid}' was not found.", _version=1), ), ProjectNodeRequiredInputsNotSetError: HttpErrorInfo( status.HTTP_409_CONFLICT, - "Project node is required but input is not set", + user_message( + "Required input values for this project node have not been set.", _version=1 + ), ), } @@ -80,59 +88,98 @@ _PROJECT_ERRORS: ExceptionToHttpErrorMap = { ProjectDeleteError: HttpErrorInfo( status.HTTP_409_CONFLICT, - "Failed to complete deletion of '{project_uuid}': {reason}", + user_message( + "Unable to complete deletion of project '{project_uuid}': {details}", + _version=1, + ), ), ProjectGroupNotFoundError: HttpErrorInfo( status.HTTP_404_NOT_FOUND, - "Project group not found: {reason}", + user_message( + "The requested project group could not be found: {details}", _version=1 + ), ), ProjectInvalidRightsError: HttpErrorInfo( status.HTTP_403_FORBIDDEN, - "Do not have sufficient access rights on project {project_uuid} for this action", + user_message( + "You do not have sufficient access rights to perform this action on project {project_uuid}.", + _version=1, + ), ), InsufficientRoleForProjectTemplateTypeUpdateError: HttpErrorInfo( status.HTTP_403_FORBIDDEN, - "Do not have sufficient access rights on updating project template type", + user_message( + "You do not have sufficient permissions to update the project template type.", + _version=1, + ), ), ProjectInvalidUsageError: HttpErrorInfo( status.HTTP_422_UNPROCESSABLE_ENTITY, - "Invalid usage for project", + user_message("The project cannot be used in this way.", _version=1), ), ProjectNotFoundError: HttpErrorInfo( status.HTTP_404_NOT_FOUND, - "Project {project_uuid} not found", + user_message("Project {project_uuid} could not be found.", _version=1), ), ProjectOwnerNotFoundInTheProjectAccessRightsError: HttpErrorInfo( status.HTTP_400_BAD_REQUEST, - "Project owner identifier was not found in the project's access-rights field", + user_message( + "The project owner could not be found in the project's access rights.", + _version=1, + ), ), ProjectTooManyProjectOpenedError: HttpErrorInfo( status.HTTP_409_CONFLICT, - "You cannot open more than {max_num_projects} study/ies at once. Please close another study and retry.", + user_message( + "You cannot open more than {max_num_projects} project/s at once. Please close another project and retry.", + _version=2, + ), ), ProjectStartsTooManyDynamicNodesError: HttpErrorInfo( status.HTTP_409_CONFLICT, - "The maximal amount of concurrently running dynamic services was reached. Please manually stop a service and retry.", + user_message( + "The maximum number of concurrently running dynamic services has been reached. Please manually stop a service and retry.", + _version=1, + ), ), ProjectWalletPendingTransactionError: HttpErrorInfo( status.HTTP_409_CONFLICT, - "Project has currently pending transactions. It is forbidden to change wallet.", + user_message( + "This project has pending transactions. Changing the wallet is currently not allowed.", + _version=1, + ), + ), + ProjectTooManyUserSessionsError: HttpErrorInfo( + status.HTTP_409_CONFLICT, + user_message( + "You cannot open more than {max_num_sessions} session(s) for the same project at once. Please close another session and retry.", + _version=1, + ), ), ProjectInDebtCanNotChangeWalletError: HttpErrorInfo( status.HTTP_402_PAYMENT_REQUIRED, - "Unable to change the credit account linked to the project. The project is embargoed because the last transaction of {debt_amount} resulted in the credit account going negative.", + user_message( + "Unable to change the credit account linked to the project. The project is embargoed because the last transaction of {debt_amount} resulted in the credit account going negative.", + _version=1, + ), ), ProjectInDebtCanNotOpenError: HttpErrorInfo( status.HTTP_402_PAYMENT_REQUIRED, - "Unable to open the project. The project is embargoed because the last transaction of {debt_amount} resulted in the credit account going negative.", + user_message( + "Unable to open the project. The project is embargoed because the last transaction of {debt_amount} resulted in the credit account going negative.", + _version=1, + ), ), WrongTagIdsInQueryError: HttpErrorInfo( status.HTTP_400_BAD_REQUEST, - "Wrong tag IDs in query", + user_message("Invalid tag IDs were provided in the request.", _version=1), ), ProjectTypeAndTemplateIncompatibilityError: HttpErrorInfo( status.HTTP_400_BAD_REQUEST, - "Wrong project type and template type combination: {reason}", + user_message( + "The project type and template type combination is not valid", + _version=1, + ), ), } @@ -140,11 +187,13 @@ _WORKSPACE_ERRORS: ExceptionToHttpErrorMap = { WorkspaceAccessForbiddenError: HttpErrorInfo( status.HTTP_403_FORBIDDEN, - "Access to workspace forbidden: {reason}", + user_message("Access to this workspace is forbidden: {details}", _version=1), ), WorkspaceNotFoundError: HttpErrorInfo( status.HTTP_404_NOT_FOUND, - "Workspace not found: {reason}", + user_message( + "The requested workspace could not be found: {details}", _version=1 + ), ), } @@ -152,15 +201,21 @@ _WALLET_ERRORS: ExceptionToHttpErrorMap = { UserDefaultWalletNotFoundError: HttpErrorInfo( status.HTTP_404_NOT_FOUND, - "Wallet not found: {reason}", + user_message("The requested wallet could not be found: {details}", _version=1), ), WalletAccessForbiddenError: HttpErrorInfo( status.HTTP_403_FORBIDDEN, - "Payment required, but the user lacks access to the project's linked wallet: Wallet access forbidden. {reason}", + user_message( + "Payment is required, but you do not have access to the project's linked wallet: {details}", + _version=1, + ), ), WalletNotEnoughCreditsError: HttpErrorInfo( status.HTTP_402_PAYMENT_REQUIRED, - "Wallet does not have enough credits. {reason}", + user_message( + "The wallet does not have enough credits to complete this operation: {details}", + _version=1, + ), ), } @@ -168,11 +223,11 @@ _PRICING_ERRORS: ExceptionToHttpErrorMap = { DefaultPricingPlanNotFoundError: HttpErrorInfo( status.HTTP_404_NOT_FOUND, - "Default pricing plan not found", + user_message("The default pricing plan could not be found.", _version=1), ), DefaultPricingUnitNotFoundError: HttpErrorInfo( status.HTTP_404_NOT_FOUND, - "Default pricing unit not found", + user_message("The default pricing unit could not be found.", _version=1), ), } @@ -180,11 +235,13 @@ _CONVERSATION_ERRORS: ExceptionToHttpErrorMap = { ConversationErrorNotFoundError: HttpErrorInfo( status.HTTP_404_NOT_FOUND, - "Conversation not found", + user_message("The requested conversation could not be found.", _version=1), ), ConversationMessageErrorNotFoundError: HttpErrorInfo( status.HTTP_404_NOT_FOUND, - "Conversation message not found", + user_message( + "The requested conversation message could not be found.", _version=1 + ), ), } @@ -192,18 +249,24 @@ _OTHER_ERRORS: ExceptionToHttpErrorMap = { CatalogNotAvailableError: HttpErrorInfo( status.HTTP_503_SERVICE_UNAVAILABLE, - "This service is currently not available", + user_message("The catalog service is currently unavailable.", _version=1), ), ClustersKeeperNotAvailableError: HttpErrorInfo( status.HTTP_503_SERVICE_UNAVAILABLE, - "Clusters-keeper service is not available", + user_message( + "The clusters-keeper service is currently unavailable.", _version=1 + ), ), CatalogForbiddenError: HttpErrorInfo( status.HTTP_403_FORBIDDEN, - "Catalog forbidden: Insufficient access rights for {name}", + user_message( + "Access denied: You do not have sufficient permissions for {name}.", + _version=1, + ), ), CatalogItemNotFoundError: HttpErrorInfo( - status.HTTP_404_NOT_FOUND, "{name} was not found" + status.HTTP_404_NOT_FOUND, + user_message("The requested item '{name}' was not found.", _version=1), ), } @@ -239,6 +302,9 @@ def _assert_duplicate(): } -handle_plugin_requests_exceptions = exception_handling_decorator( - to_exceptions_handlers_map(_TO_HTTP_ERROR_MAP) -) +_handlers: ExceptionHandlersMap = { + **catalog_exceptions_handlers_map, + **to_exceptions_handlers_map(_TO_HTTP_ERROR_MAP), +} + +handle_plugin_requests_exceptions = exception_handling_decorator(_handlers) diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/_rest_schemas.py b/services/web/server/src/simcore_service_webserver/projects/_controller/_rest_schemas.py index 9618a73bb4c5..dffb94787142 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/_rest_schemas.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/_rest_schemas.py @@ -5,9 +5,9 @@ Field, ) -from ...models import RequestContext +from ...models import AuthenticatedRequestContext -assert RequestContext.__name__ # nosec +assert AuthenticatedRequestContext.__name__ # nosec class ProjectPathParams(BaseModel): @@ -21,4 +21,4 @@ class RemoveQueryParams(BaseModel): ) -__all__: tuple[str, ...] = ("RequestContext",) +__all__: tuple[str, ...] = ("AuthenticatedRequestContext",) diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/_rest_utils.py b/services/web/server/src/simcore_service_webserver/projects/_controller/_rest_utils.py index beab5959668f..077761083a23 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/_rest_utils.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/_rest_utils.py @@ -4,6 +4,7 @@ from models_library.rest_pagination_utils import paginate_data from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON from servicelib.rest_constants import RESPONSE_MODEL_POLICY +from yarl import URL from .. import _permalink_service from .._crud_api_read import _paralell_update @@ -11,13 +12,17 @@ async def aggregate_data_to_projects_from_request( - request: web.Request, + app: web.Application, + url: URL, + headers: dict[str, str], projects: list[ProjectDict], ) -> list[ProjectDict]: update_permalink_per_project = [ # permalink - _permalink_service.aggregate_permalink_in_project(request, project=prj) + _permalink_service.aggregate_permalink_in_project( + app, url, headers, project=prj + ) for prj in projects ] diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/access_rights_rest.py b/services/web/server/src/simcore_service_webserver/projects/_controller/access_rights_rest.py index ddf4e0dffbe9..61f38cb12e18 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/access_rights_rest.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/access_rights_rest.py @@ -26,7 +26,7 @@ from .. import _groups_service from .._groups_service import ProjectGroupGet from ._rest_exceptions import handle_plugin_requests_exceptions -from ._rest_schemas import ProjectPathParams, RequestContext +from ._rest_schemas import AuthenticatedRequestContext, ProjectPathParams _logger = logging.getLogger(__name__) @@ -40,7 +40,7 @@ @permission_required("project.access_rights.update") @handle_plugin_requests_exceptions async def share_project(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) body_params = await parse_request_body_as(ProjectShare, request) @@ -98,7 +98,7 @@ async def share_project(request: web.Request): @permission_required("project.access_rights.update") @handle_plugin_requests_exceptions async def create_project_group(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectsGroupsPathParams, request) body_params = await parse_request_body_as(ProjectsGroupsBodyParams, request) @@ -121,7 +121,7 @@ async def create_project_group(request: web.Request): @permission_required("project.read") @handle_plugin_requests_exceptions async def list_project_groups(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) project_groups: list[ProjectGroupGet] = ( @@ -144,7 +144,7 @@ async def list_project_groups(request: web.Request): @permission_required("project.access_rights.update") @handle_plugin_requests_exceptions async def replace_project_group(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectsGroupsPathParams, request) body_params = await parse_request_body_as(ProjectsGroupsBodyParams, request) @@ -169,7 +169,7 @@ async def replace_project_group(request: web.Request): @permission_required("project.access_rights.update") @handle_plugin_requests_exceptions async def delete_project_group(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectsGroupsPathParams, request) await _groups_service.delete_project_group( diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/comments_rest.py b/services/web/server/src/simcore_service_webserver/projects/_controller/comments_rest.py index 183cf1fa3b66..b8e1b3b746a5 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/comments_rest.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/comments_rest.py @@ -26,7 +26,7 @@ from ...utils_aiohttp import envelope_json_response from .. import _comments_service, _projects_service from ._rest_exceptions import handle_plugin_requests_exceptions -from ._rest_schemas import RequestContext +from ._rest_schemas import AuthenticatedRequestContext _logger = logging.getLogger(__name__) @@ -60,7 +60,7 @@ class _ProjectCommentsBodyParams(BaseModel): @permission_required("project.read") @handle_plugin_requests_exceptions async def create_project_comment(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_ProjectCommentsPathParams, request) body_params = await parse_request_body_as(_ProjectCommentsBodyParams, request) @@ -100,7 +100,7 @@ class _ListProjectCommentsQueryParams(BaseModel): @permission_required("project.read") @handle_plugin_requests_exceptions async def list_project_comments(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_ProjectCommentsPathParams, request) query_params: _ListProjectCommentsQueryParams = parse_request_query_parameters_as( _ListProjectCommentsQueryParams, request @@ -149,7 +149,7 @@ async def list_project_comments(request: web.Request): @permission_required("project.read") @handle_plugin_requests_exceptions async def update_project_comment(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as( _ProjectCommentsWithCommentPathParams, request ) @@ -180,7 +180,7 @@ async def update_project_comment(request: web.Request): @permission_required("project.read") @handle_plugin_requests_exceptions async def delete_project_comment(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as( _ProjectCommentsWithCommentPathParams, request ) @@ -208,7 +208,7 @@ async def delete_project_comment(request: web.Request): @permission_required("project.read") @handle_plugin_requests_exceptions async def get_project_comment(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as( _ProjectCommentsWithCommentPathParams, request ) diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/conversations_rest.py b/services/web/server/src/simcore_service_webserver/projects/_controller/conversations_rest.py index fa7a01df6858..89dea13e26e4 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/conversations_rest.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/conversations_rest.py @@ -2,7 +2,7 @@ from aiohttp import web from models_library.api_schemas_webserver._base import InputSchema -from models_library.api_schemas_webserver.projects_conversations import ( +from models_library.api_schemas_webserver.conversations import ( ConversationMessageRestGet, ConversationRestGet, ) @@ -17,7 +17,7 @@ PageQueryParameters, ) from models_library.rest_pagination_utils import paginate_data -from pydantic import BaseModel, ConfigDict +from pydantic import BaseModel, ConfigDict, field_validator from servicelib.aiohttp import status from servicelib.aiohttp.requests_validation import ( parse_request_body_as, @@ -33,7 +33,7 @@ from ...utils_aiohttp import envelope_json_response from .. import _conversations_service from ._rest_exceptions import handle_plugin_requests_exceptions -from ._rest_schemas import ProjectPathParams, RequestContext +from ._rest_schemas import AuthenticatedRequestContext, ProjectPathParams _logger = logging.getLogger(__name__) @@ -56,6 +56,16 @@ class _ProjectConversationsCreateBodyParams(InputSchema): name: str type: ConversationType + @field_validator("type") + @classmethod + def validate_type(cls, value): + if value is not None and value not in [ + ConversationType.PROJECT_ANNOTATION, + ConversationType.PROJECT_STATIC, + ]: + raise ValueError("Only project conversations are allowed") + return value + class _ProjectConversationsPutBodyParams(InputSchema): name: str @@ -69,7 +79,7 @@ class _ProjectConversationsPutBodyParams(InputSchema): @permission_required("project.read") @handle_plugin_requests_exceptions async def create_project_conversation(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) body_params = await parse_request_body_as( _ProjectConversationsCreateBodyParams, request @@ -96,7 +106,7 @@ async def create_project_conversation(request: web.Request): @permission_required("project.read") @handle_plugin_requests_exceptions async def list_project_conversations(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) query_params = parse_request_query_parameters_as( _ListProjectConversationsQueryParams, request @@ -136,7 +146,7 @@ async def list_project_conversations(request: web.Request): @permission_required("project.read") @handle_plugin_requests_exceptions async def update_project_conversation(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as( _ProjectConversationsPathParams, request ) @@ -165,7 +175,7 @@ async def update_project_conversation(request: web.Request): @permission_required("project.read") @handle_plugin_requests_exceptions async def delete_project_conversation(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as( _ProjectConversationsPathParams, request ) @@ -188,7 +198,7 @@ async def delete_project_conversation(request: web.Request): @permission_required("project.read") @handle_plugin_requests_exceptions async def get_project_conversation(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as( _ProjectConversationsPathParams, request ) @@ -238,7 +248,7 @@ class _ProjectConversationMessagesPutBodyParams(BaseModel): @permission_required("project.read") @handle_plugin_requests_exceptions async def create_project_conversation_message(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as( _ProjectConversationsPathParams, request ) @@ -268,7 +278,7 @@ async def create_project_conversation_message(request: web.Request): @permission_required("project.read") @handle_plugin_requests_exceptions async def list_project_conversation_messages(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as( _ProjectConversationsPathParams, request ) @@ -314,7 +324,7 @@ async def list_project_conversation_messages(request: web.Request): @permission_required("project.read") @handle_plugin_requests_exceptions async def update_project_conversation_message(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as( _ProjectConversationsMessagesPathParams, request ) @@ -344,7 +354,7 @@ async def update_project_conversation_message(request: web.Request): @permission_required("project.read") @handle_plugin_requests_exceptions async def delete_project_conversation_message(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as( _ProjectConversationsMessagesPathParams, request ) @@ -368,7 +378,7 @@ async def delete_project_conversation_message(request: web.Request): @permission_required("project.read") @handle_plugin_requests_exceptions async def get_project_conversation_message(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as( _ProjectConversationsMessagesPathParams, request ) diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/folders_rest.py b/services/web/server/src/simcore_service_webserver/projects/_controller/folders_rest.py index 7d7a7f6f954f..16af1531dc95 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/folders_rest.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/folders_rest.py @@ -13,7 +13,7 @@ from ...security.decorators import permission_required from .. import _folders_service from ._rest_exceptions import handle_plugin_requests_exceptions -from ._rest_schemas import RequestContext +from ._rest_schemas import AuthenticatedRequestContext _logger = logging.getLogger(__name__) @@ -40,7 +40,7 @@ class _ProjectsFoldersPathParams(BaseModel): @permission_required("project.folders.*") @handle_plugin_requests_exceptions async def replace_project_folder(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_ProjectsFoldersPathParams, request) await _folders_service.move_project_into_folder( diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/metadata_rest.py b/services/web/server/src/simcore_service_webserver/projects/_controller/metadata_rest.py index b0c9bc1236e7..75c6e56c451e 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/metadata_rest.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/metadata_rest.py @@ -29,7 +29,7 @@ from ...utils_aiohttp import envelope_json_response from .. import _metadata_service from ._rest_exceptions import handle_plugin_requests_exceptions -from ._rest_schemas import ProjectPathParams, RequestContext +from ._rest_schemas import AuthenticatedRequestContext, ProjectPathParams routes = web.RouteTableDef() @@ -44,7 +44,7 @@ @permission_required("project.read") @handle_plugin_requests_exceptions async def get_project_metadata(request: web.Request) -> web.Response: - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) custom_metadata = await _metadata_service.get_project_custom_metadata_for_user( @@ -64,7 +64,7 @@ async def get_project_metadata(request: web.Request) -> web.Response: @permission_required("project.update") @handle_plugin_requests_exceptions async def update_project_metadata(request: web.Request) -> web.Response: - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) update = await parse_request_body_as(ProjectMetadataUpdate, request) diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/nodes_pricing_unit_rest.py b/services/web/server/src/simcore_service_webserver/projects/_controller/nodes_pricing_unit_rest.py index 6476389c4d73..39c4ae1f1c80 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/nodes_pricing_unit_rest.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/nodes_pricing_unit_rest.py @@ -17,7 +17,7 @@ from .. import _projects_service from .._projects_repository_legacy import ProjectDBAPI from ._rest_exceptions import handle_plugin_requests_exceptions -from ._rest_schemas import RequestContext +from ._rest_schemas import AuthenticatedRequestContext from .nodes_rest import NodePathParams _logger = logging.getLogger(__name__) @@ -42,7 +42,7 @@ class PricingUnitNotFoundError(PricingUnitError): @handle_plugin_requests_exceptions async def get_project_node_pricing_unit(request: web.Request): db: ProjectDBAPI = ProjectDBAPI.get_from_app_context(request.app) - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(NodePathParams, request) # ensure the project exists @@ -89,7 +89,7 @@ class _ProjectNodePricingUnitPathParams(BaseModel): @handle_plugin_requests_exceptions async def connect_pricing_unit_to_project_node(request: web.Request): db: ProjectDBAPI = ProjectDBAPI.get_from_app_context(request.app) - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as( _ProjectNodePricingUnitPathParams, request ) diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/nodes_rest.py b/services/web/server/src/simcore_service_webserver/projects/_controller/nodes_rest.py index c7f98d6e3750..925ca516f2c0 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/nodes_rest.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/nodes_rest.py @@ -2,7 +2,10 @@ import logging from aiohttp import web +from common_library.error_codes import create_error_code from common_library.json_serialization import json_dumps +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs +from common_library.user_messages import user_message from models_library.api_schemas_catalog.service_access_rights import ( ServiceAccessRightsGet, ) @@ -23,21 +26,21 @@ NodeServiceGet, ProjectNodeServicesGet, ) +from models_library.basic_types import IDStr from models_library.groups import EVERYONE_GROUP_ID, Group, GroupID, GroupType from models_library.projects import Project, ProjectID from models_library.projects_nodes_io import NodeID, NodeIDStr +from models_library.rest_error import ErrorGet from models_library.services import ServiceKeyVersion from models_library.services_resources import ServiceResourcesDict from models_library.services_types import ServiceKey, ServiceVersion from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import BaseModel, Field from servicelib.aiohttp import status -from servicelib.aiohttp.long_running_tasks.server import ( - TaskProgress, - start_long_running_task, -) +from servicelib.aiohttp.long_running_tasks.server import start_long_running_task from servicelib.aiohttp.requests_validation import ( parse_request_body_as, + parse_request_headers_as, parse_request_path_parameters_as, parse_request_query_parameters_as, ) @@ -45,6 +48,8 @@ UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, X_SIMCORE_USER_AGENT, ) +from servicelib.long_running_tasks.models import TaskProgress +from servicelib.long_running_tasks.task import TaskRegistry from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON from servicelib.rabbitmq import RPCServerError from servicelib.rabbitmq.rpc_interfaces.dynamic_scheduler.errors import ( @@ -57,11 +62,13 @@ from ..._meta import API_VTAG as VTAG from ...catalog import catalog_service from ...dynamic_scheduler import api as dynamic_scheduler_service -from ...groups.api import get_group_from_gid, list_all_user_groups_ids +from ...exception_handling import create_error_response +from ...groups import api as groups_service from ...groups.exceptions import GroupNotFoundError from ...login.decorators import login_required +from ...models import ClientSessionHeaderParams from ...security.decorators import permission_required -from ...users.api import get_user_id_from_gid, get_user_role +from ...users import users_service from ...utils_aiohttp import envelope_json_response, get_api_base_url from .. import _access_rights_service as access_rights_service from .. import _nodes_service, _projects_service, nodes_utils @@ -73,7 +80,7 @@ ProjectNodeResourcesInvalidError, ) from ._rest_exceptions import handle_plugin_requests_exceptions -from ._rest_schemas import ProjectPathParams, RequestContext +from ._rest_schemas import AuthenticatedRequestContext, ProjectPathParams _logger = logging.getLogger(__name__) @@ -94,9 +101,10 @@ class NodePathParams(ProjectPathParams): @permission_required("project.node.create") @handle_plugin_requests_exceptions async def create_node(request: web.Request) -> web.Response: - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) body = await parse_request_body_as(NodeCreate, request) + header_params = parse_request_headers_as(ClientSessionHeaderParams, request) if await _projects_service.is_service_deprecated( request.app, @@ -106,7 +114,7 @@ async def create_node(request: web.Request) -> web.Response: req_ctx.product_name, ): raise web.HTTPNotAcceptable( - reason=f"Service {body.service_key}:{body.service_version} is deprecated" + text=f"Service {body.service_key}:{body.service_version} is deprecated" ) # ensure the project exists @@ -125,6 +133,7 @@ async def create_node(request: web.Request) -> web.Response: body.service_key, body.service_version, body.service_id, + client_session_id=header_params.client_session_id, ) } assert NodeCreated.model_validate(data) is not None # nosec @@ -138,7 +147,7 @@ async def create_node(request: web.Request) -> web.Response: @handle_plugin_requests_exceptions # NOTE: Careful, this endpoint is actually "get_node_state," and it doesn't return a Node resource. async def get_node(request: web.Request) -> web.Response: - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(NodePathParams, request) # ensure the project exists @@ -157,7 +166,7 @@ async def get_node(request: web.Request) -> web.Response: ): project_node = project["workbench"][f"{path_params.node_id}"] raise web.HTTPNotAcceptable( - reason=f"Service {project_node['key']}:{project_node['version']} is deprecated!" + text=f"Service {project_node['key']}:{project_node['version']} is deprecated!" ) service_data: NodeGetIdle | NodeGetUnknown | DynamicServiceGet | NodeGet = ( @@ -176,9 +185,10 @@ async def get_node(request: web.Request) -> web.Response: @permission_required("project.node.update") @handle_plugin_requests_exceptions async def patch_project_node(request: web.Request) -> web.Response: - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(NodePathParams, request) node_patch = await parse_request_body_as(NodePatch, request) + header_params = parse_request_headers_as(ClientSessionHeaderParams, request) await _projects_service.patch_project_node( request.app, @@ -188,6 +198,7 @@ async def patch_project_node(request: web.Request) -> web.Response: project_id=path_params.project_id, node_id=path_params.node_id, partial_node=node_patch.to_domain_model(), + client_session_id=header_params.client_session_id, ) return web.json_response(status=status.HTTP_204_NO_CONTENT) @@ -198,8 +209,9 @@ async def patch_project_node(request: web.Request) -> web.Response: @permission_required("project.node.delete") @handle_plugin_requests_exceptions async def delete_node(request: web.Request) -> web.Response: - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(NodePathParams, request) + header_params = parse_request_headers_as(ClientSessionHeaderParams, request) # ensure the project exists await _projects_service.get_project_for_user( @@ -211,9 +223,10 @@ async def delete_node(request: web.Request) -> web.Response: request, path_params.project_id, req_ctx.user_id, - NodeIDStr(path_params.node_id), + f"{path_params.node_id}", req_ctx.product_name, product_api_base_url=get_api_base_url(request), + client_session_id=header_params.client_session_id, ) return web.json_response(status=status.HTTP_204_NO_CONTENT) @@ -247,9 +260,10 @@ async def retrieve_node(request: web.Request) -> web.Response: @permission_required("project.node.update") @handle_plugin_requests_exceptions async def update_node_outputs(request: web.Request) -> web.Response: - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(NodePathParams, request) node_outputs = await parse_request_body_as(NodeOutputs, request) + header_params = parse_request_headers_as(ClientSessionHeaderParams, request) ui_changed_keys = set() ui_changed_keys.add(f"{path_params.node_id}") @@ -262,6 +276,7 @@ async def update_node_outputs(request: web.Request) -> web.Response: run_hash=None, node_errors=None, ui_changed_keys=ui_changed_keys, + client_session_id=header_params.client_session_id, ) return web.json_response(status=status.HTTP_204_NO_CONTENT) @@ -275,7 +290,7 @@ async def update_node_outputs(request: web.Request) -> web.Response: @handle_plugin_requests_exceptions async def start_node(request: web.Request) -> web.Response: """Has only effect on nodes associated to dynamic services""" - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(NodePathParams, request) await _projects_service.start_project_node( @@ -291,27 +306,66 @@ async def start_node(request: web.Request) -> web.Response: async def _stop_dynamic_service_task( - _task_progress: TaskProgress, + progress: TaskProgress, *, app: web.Application, dynamic_service_stop: DynamicServiceStop, -): +) -> web.Response: + _ = progress # NOTE: _handle_project_nodes_exceptions only decorate handlers try: await dynamic_scheduler_service.stop_dynamic_service( app, dynamic_service_stop=dynamic_service_stop ) + project = await _projects_service.get_project_for_user( + app, + f"{dynamic_service_stop.project_id}", + dynamic_service_stop.user_id, + include_state=True, + ) + await _projects_service.notify_project_node_update( + app, project, dynamic_service_stop.node_id, errors=None + ) return web.json_response(status=status.HTTP_204_NO_CONTENT) except (RPCServerError, ServiceWaitingForManualInterventionError) as exc: - # in case there is an error reply as not found - raise web.HTTPNotFound(text=f"{exc}") from exc + error_code = getattr(exc, "error_code", None) or create_error_code(exc) + user_error_msg = user_message( + f"Could not stop dynamic service {dynamic_service_stop.project_id}.{dynamic_service_stop.node_id}" + ) + _logger.debug( + **create_troubleshooting_log_kwargs( + user_error_msg, + error=exc, + error_code=error_code, + error_context={ + "project_id": dynamic_service_stop.project_id, + "node_id": dynamic_service_stop.node_id, + "user_id": dynamic_service_stop.user_id, + "save_state": dynamic_service_stop.save_state, + "simcore_user_agent": dynamic_service_stop.simcore_user_agent, + }, + ) + ) + # ANE: in case there is an error reply as not found + return create_error_response( + error=ErrorGet( + message=user_error_msg, + support_id=IDStr(error_code), + status=status.HTTP_404_NOT_FOUND, + ), + status_code=status.HTTP_404_NOT_FOUND, + ) except ServiceWasNotFoundError: # in case the service is not found reply as all OK return web.json_response(status=status.HTTP_204_NO_CONTENT) +def register_stop_dynamic_service_task(app: web.Application) -> None: + TaskRegistry.register(_stop_dynamic_service_task, app=app) + + @routes.post( f"/{VTAG}/projects/{{project_id}}/nodes/{{node_id}}:stop", name="stop_node" ) @@ -320,7 +374,7 @@ async def _stop_dynamic_service_task( @handle_plugin_requests_exceptions async def stop_node(request: web.Request) -> web.Response: """Has only effect on nodes associated to dynamic services""" - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(NodePathParams, request) save_state = await has_user_project_access_rights( @@ -330,16 +384,15 @@ async def stop_node(request: web.Request) -> web.Response: permission="write", ) - user_role = await get_user_role(request.app, user_id=req_ctx.user_id) + user_role = await users_service.get_user_role(request.app, user_id=req_ctx.user_id) if user_role is None or user_role <= UserRole.GUEST: save_state = False return await start_long_running_task( request, - _stop_dynamic_service_task, # type: ignore[arg-type] # @GitHK, @pcrespov this one I don't know how to fix + _stop_dynamic_service_task.__name__, task_context=jsonable_encoder(req_ctx), # task arguments from here on --- - app=request.app, dynamic_service_stop=DynamicServiceStop( user_id=req_ctx.user_id, project_id=path_params.project_id, @@ -385,7 +438,7 @@ async def restart_node(request: web.Request) -> web.Response: @permission_required("project.node.read") @handle_plugin_requests_exceptions async def get_node_resources(request: web.Request) -> web.Response: - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(NodePathParams, request) # ensure the project exists @@ -420,7 +473,7 @@ async def get_node_resources(request: web.Request) -> web.Response: @permission_required("project.node.update") @handle_plugin_requests_exceptions async def replace_node_resources(request: web.Request) -> web.Response: - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(NodePathParams, request) body = await parse_request_body_as(ServiceResourcesDict, request) @@ -449,13 +502,11 @@ async def replace_node_resources(request: web.Request) -> web.Response: return envelope_json_response(new_node_resources) except ProjectNodeResourcesInvalidError as exc: raise web.HTTPUnprocessableEntity( # 422 - reason=f"{exc}", text=f"{exc}", content_type=MIMETYPE_APPLICATION_JSON, ) from exc except ProjectNodeResourcesInsufficientRightsError as exc: raise web.HTTPForbidden( - reason=f"{exc}", text=f"{exc}", content_type=MIMETYPE_APPLICATION_JSON, ) from exc @@ -479,7 +530,7 @@ class _ProjectGroupAccess(BaseModel): @permission_required("project.read") @handle_plugin_requests_exceptions async def get_project_services(request: web.Request) -> web.Response: - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) await access_rights_service.check_user_project_permission( @@ -522,7 +573,7 @@ async def get_project_services(request: web.Request) -> web.Response: @permission_required("project.read") @handle_plugin_requests_exceptions async def get_project_services_access_for_gid(request: web.Request) -> web.Response: - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) query_params: _ServicesAccessQuery = parse_request_query_parameters_as( _ServicesAccessQuery, request @@ -556,7 +607,7 @@ async def get_project_services_access_for_gid(request: web.Request) -> web.Respo groups_to_compare = {EVERYONE_GROUP_ID} # Get the group from the provided group ID - _sharing_with_group: Group | None = await get_group_from_gid( + _sharing_with_group: Group | None = await groups_service.get_group_by_gid( app=request.app, group_id=query_params.for_gid ) @@ -566,10 +617,10 @@ async def get_project_services_access_for_gid(request: web.Request) -> web.Respo # Update groups to compare based on the type of sharing group if _sharing_with_group.group_type == GroupType.PRIMARY: - _user_id = await get_user_id_from_gid( + _user_id = await users_service.get_user_id_from_gid( app=request.app, primary_gid=query_params.for_gid ) - user_groups_ids = await list_all_user_groups_ids( + user_groups_ids = await groups_service.list_all_user_groups_ids( app=request.app, user_id=_user_id ) groups_to_compare.update(set(user_groups_ids)) @@ -640,7 +691,7 @@ class _ProjectNodePreview(BaseModel): @permission_required("project.read") @handle_plugin_requests_exceptions async def list_project_nodes_previews(request: web.Request) -> web.Response: - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) assert req_ctx # nosec @@ -680,7 +731,7 @@ async def list_project_nodes_previews(request: web.Request) -> web.Response: @permission_required("project.read") @handle_plugin_requests_exceptions async def get_project_node_preview(request: web.Request) -> web.Response: - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(NodePathParams, request) assert req_ctx # nosec diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/ports_rest.py b/services/web/server/src/simcore_service_webserver/projects/_controller/ports_rest.py index 9e49a471c165..f9336f6e7c31 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/ports_rest.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/ports_rest.py @@ -17,11 +17,13 @@ from pydantic import BaseModel, Field, TypeAdapter from servicelib.aiohttp.requests_validation import ( parse_request_body_as, + parse_request_headers_as, parse_request_path_parameters_as, ) from ..._meta import API_VTAG as VTAG from ...login.decorators import login_required +from ...models import ClientSessionHeaderParams from ...security.decorators import permission_required from ...utils_aiohttp import envelope_json_response from .. import _ports_service, _projects_service @@ -29,7 +31,7 @@ from .._projects_repository_legacy import ProjectDBAPI from ..models import ProjectDict from ._rest_exceptions import handle_plugin_requests_exceptions -from ._rest_schemas import ProjectPathParams, RequestContext +from ._rest_schemas import AuthenticatedRequestContext, ProjectPathParams log = logging.getLogger(__name__) @@ -59,7 +61,7 @@ async def _get_validated_workbench_model( @permission_required("project.read") @handle_plugin_requests_exceptions async def get_project_inputs(request: web.Request) -> web.Response: - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) assert request.app # nosec @@ -85,9 +87,10 @@ async def get_project_inputs(request: web.Request) -> web.Response: @handle_plugin_requests_exceptions async def update_project_inputs(request: web.Request) -> web.Response: db: ProjectDBAPI = ProjectDBAPI.get_from_app_context(request.app) - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) inputs_updates = await parse_request_body_as(list[ProjectInputUpdate], request) + header_params = parse_request_headers_as(ClientSessionHeaderParams, request) assert request.app # nosec @@ -123,6 +126,7 @@ async def update_project_inputs(request: web.Request) -> web.Response: project_uuid=path_params.project_id, product_name=req_ctx.product_name, partial_workbench_data=jsonable_encoder(partial_workbench_data), + client_session_id=header_params.client_session_id, ) workbench = TypeAdapter(dict[NodeID, Node]).validate_python( @@ -150,7 +154,7 @@ async def update_project_inputs(request: web.Request) -> web.Response: @permission_required("project.read") @handle_plugin_requests_exceptions async def get_project_outputs(request: web.Request) -> web.Response: - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) assert request.app # nosec @@ -197,7 +201,7 @@ class ProjectMetadataPortGet(BaseModel): @permission_required("project.read") @handle_plugin_requests_exceptions async def list_project_metadata_ports(request: web.Request) -> web.Response: - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) assert request.app # nosec diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/projects_rest.py b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_rest.py index a403a96ecf48..4168ec608def 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/projects_rest.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_rest.py @@ -10,7 +10,6 @@ ProjectPatch, ) from models_library.generics import Envelope -from models_library.projects_state import ProjectLocked from models_library.rest_ordering import OrderBy from models_library.utils.fastapi_encoders import jsonable_encoder from servicelib.aiohttp import status @@ -28,23 +27,22 @@ from servicelib.redis import get_project_locked_state from ..._meta import API_VTAG as VTAG -from ...catalog import catalog_service from ...login.decorators import login_required +from ...models import ClientSessionHeaderParams from ...redis import get_redis_lock_manager_client_sdk from ...resource_manager.user_sessions import PROJECT_ID_KEY, managed_resource -from ...security.api import check_user_permission +from ...security import security_web from ...security.decorators import permission_required -from ...users.api import get_user_fullname +from ...users import users_service from ...utils_aiohttp import envelope_json_response, get_api_base_url from .. import _crud_api_create, _crud_api_read, _projects_service from .._permalink_service import update_or_pop_permalink_in_project from ..models import ProjectDict -from ..utils import get_project_unavailable_services, project_uses_available_services from . import _rest_utils from ._rest_exceptions import handle_plugin_requests_exceptions from ._rest_schemas import ( + AuthenticatedRequestContext, ProjectPathParams, - RequestContext, ) from .projects_rest_schemas import ( ProjectActiveQueryParams, @@ -55,12 +53,6 @@ ProjectsSearchQueryParams, ) -# When the user requests a project with a repo, the working copy might differ from -# the repo project. A middleware in the meta module (if active) will resolve -# the working copy and redirect to the appropriate project entrypoint. Nonetheless, the -# response needs to refer to the uuid of the request and this is passed through this request key -RQ_REQUESTED_REPO_PROJECT_UUID_KEY = f"{__name__}.RQT_REQUESTED_REPO_PROJECT_UUID_KEY" - _logger = logging.getLogger(__name__) @@ -76,13 +68,13 @@ async def create_project(request: web.Request): # # - Create https://google.aip.dev/133 # - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) query_params: ProjectCreateQueryParams = parse_request_query_parameters_as( ProjectCreateQueryParams, request ) header_params = parse_request_headers_as(ProjectCreateHeaders, request) if query_params.as_template: # create template from - await check_user_permission(request, "project.template.create") + await security_web.check_user_permission(request, "project.template.create") # NOTE: Having so many different types of bodys is an indication that # this entrypoint are in reality multiple entrypoints in one, namely @@ -105,11 +97,12 @@ async def create_project(request: web.Request): return await start_long_running_task( request, - _crud_api_create.create_project, # type: ignore[arg-type] # @GitHK, @pcrespov this one I don't know how to fix + _crud_api_create.create_project.__name__, fire_and_forget=True, task_context=jsonable_encoder(req_ctx), # arguments - request=request, + request_url=request.url, + request_headers=dict(request.headers), new_project_was_hidden_before_data_was_copied=query_params.hidden, from_study=query_params.from_study, as_template=query_params.as_template, @@ -138,7 +131,7 @@ async def list_projects(request: web.Request): web.HTTPUnprocessableEntity: (422) if validation of request parameters fail """ - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) query_params: ProjectsListQueryParams = parse_request_query_parameters_as( ProjectsListQueryParams, request ) @@ -166,7 +159,7 @@ async def list_projects(request: web.Request): ) projects = await _rest_utils.aggregate_data_to_projects_from_request( - request, projects + request.app, request.url, dict(request.headers), projects ) return _rest_utils.create_page_response( @@ -183,7 +176,7 @@ async def list_projects(request: web.Request): @permission_required("project.read") @handle_plugin_requests_exceptions async def list_projects_full_search(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) query_params: ProjectsSearchQueryParams = parse_request_query_parameters_as( ProjectsSearchQueryParams, request ) @@ -195,6 +188,8 @@ async def list_projects_full_search(request: web.Request): user_id=req_ctx.user_id, product_name=req_ctx.product_name, trashed=query_params.filters.trashed, + filter_by_project_type=query_params.project_type, + filter_by_template_type=query_params.template_type, search_by_multi_columns=query_params.text, search_by_project_name=query_params.filters.search_by_project_name, offset=query_params.offset, @@ -203,7 +198,7 @@ async def list_projects_full_search(request: web.Request): ) projects = await _rest_utils.aggregate_data_to_projects_from_request( - request, projects + request.app, request.url, dict(request.headers), projects ) return _rest_utils.create_page_response( @@ -230,7 +225,7 @@ async def get_active_project(request: web.Request) -> web.Response: web.HTTPUnprocessableEntity: (422) if validation of request parameters fail web.HTTPNotFound: If active project is not found """ - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) query_params: ProjectActiveQueryParams = parse_request_query_parameters_as( ProjectActiveQueryParams, request ) @@ -253,7 +248,9 @@ async def get_active_project(request: web.Request) -> web.Response: ) # updates project's permalink field - await update_or_pop_permalink_in_project(request, project) + await update_or_pop_permalink_in_project( + request.app, request.url, dict(request.headers), project + ) data = ProjectGet.from_domain_model(project).data(exclude_unset=True) @@ -274,15 +271,9 @@ async def get_project(request: web.Request): web.HTTPNotFound: This project was not found """ - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) - user_available_services: list[dict] = ( - await catalog_service.get_services_for_user_in_product( - request.app, req_ctx.user_id, req_ctx.product_name, only_key_versions=True - ) - ) - project = await _projects_service.get_project_for_user( request.app, project_uuid=f"{path_params.project_id}", @@ -290,26 +281,11 @@ async def get_project(request: web.Request): include_state=True, include_trashed_by_primary_gid=True, ) - if not await project_uses_available_services(project, user_available_services): - unavilable_services = get_project_unavailable_services( - project, user_available_services - ) - formatted_services = ", ".join( - f"{service}:{version}" for service, version in unavilable_services - ) - # TODO: lack of permissions should be notified with https://httpstatuses.com/403 web.HTTPForbidden - raise web.HTTPNotFound( - reason=( - f"Project '{path_params.project_id}' uses unavailable services. Please ask " - f"for permission for the following services {formatted_services}" - ) - ) - - if new_uuid := request.get(RQ_REQUESTED_REPO_PROJECT_UUID_KEY): - project["uuid"] = new_uuid # Adds permalink - await update_or_pop_permalink_in_project(request, project) + await update_or_pop_permalink_in_project( + request.app, request.url, dict(request.headers), project + ) data = ProjectGet.from_domain_model(project).data(exclude_unset=True) return envelope_json_response(data) @@ -339,16 +315,18 @@ async def patch_project(request: web.Request): # # Update https://google.aip.dev/134 # - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) project_patch = await parse_request_body_as(ProjectPatch, request) + header_params = parse_request_headers_as(ClientSessionHeaderParams, request) - await _projects_service.patch_project( + await _projects_service.patch_project_for_user( request.app, user_id=req_ctx.user_id, project_uuid=path_params.project_id, project_patch=project_patch, product_name=req_ctx.product_name, + client_session_id=header_params.client_session_id, ) return web.json_response(status=status.HTTP_204_NO_CONTENT) @@ -372,7 +350,7 @@ async def delete_project(request: web.Request): web.HTTPConflict: Somethine went wrong while deleting web.HTTPNoContent: Sucess """ - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) await _projects_service.get_project_for_user( @@ -391,26 +369,25 @@ async def delete_project(request: web.Request): # that project is still in use if req_ctx.user_id in project_users: raise web.HTTPForbidden( - reason="Project is still open in another tab/browser." + text="Project is still open in another tab/browser." "It cannot be deleted until it is closed." ) if project_users: other_user_names = { - f"{await get_user_fullname(request.app, user_id=uid)}" + f"{await users_service.get_user_fullname(request.app, user_id=uid)}" for uid in project_users } raise web.HTTPForbidden( - reason=f"Project is open by {other_user_names}. " + text=f"Project is open by {other_user_names}. " "It cannot be deleted until the project is closed." ) - project_locked_state: ProjectLocked | None if project_locked_state := await get_project_locked_state( get_redis_lock_manager_client_sdk(request.app), project_uuid=path_params.project_id, ): raise web.HTTPConflict( - reason=f"Project {path_params.project_id} is locked: {project_locked_state=}" + text=f"Project {path_params.project_id} is locked: {project_locked_state=}" ) await _projects_service.submit_delete_project_task( @@ -438,16 +415,17 @@ async def delete_project(request: web.Request): @permission_required("services.pipeline.*") # due to update_pipeline_db @handle_plugin_requests_exceptions async def clone_project(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) return await start_long_running_task( request, - _crud_api_create.create_project, # type: ignore[arg-type] # @GitHK, @pcrespov this one I don't know how to fix + _crud_api_create.create_project.__name__, fire_and_forget=True, task_context=jsonable_encoder(req_ctx), # arguments - request=request, + request_url=request.url, + request_headers=dict(request.headers), new_project_was_hidden_before_data_was_copied=False, from_study=path_params.project_id, as_template=False, diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/projects_rest_schemas.py b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_rest_schemas.py index 4b7bbff32374..4695e8b602d8 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/projects_rest_schemas.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_rest_schemas.py @@ -207,6 +207,8 @@ class ProjectSearchExtraQueryParams( PageQueryParameters, FiltersQueryParameters[ProjectFilters], ): + project_type: Annotated[ProjectTypeAPI, Field(alias="type")] = ProjectTypeAPI.all + template_type: ProjectTemplateType | None = None text: Annotated[ str | None, Field( @@ -227,6 +229,20 @@ class ProjectSearchExtraQueryParams( empty_str_to_none_pre_validator ) + _template_type_null_or_none_str_to_none_validator = field_validator( + "template_type", mode="before" + )(null_or_none_str_to_none_validator) + + @model_validator(mode="after") + def _check_template_type_compatibility(self): + if ( + self.project_type in [ProjectTypeAPI.all, ProjectTypeAPI.user] + and self.template_type is not None + ): + msg = f"When {self.project_type=} is `all` or `user` the {self.template_type=} should be None" + raise ValueError(msg) + return self + class ProjectsSearchQueryParams( ProjectSearchExtraQueryParams, ProjectsListOrderParams # type: ignore[misc, valid-type] diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/projects_rpc.py b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_rpc.py index e511d4dd4980..8cece3b3efef 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/projects_rpc.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_rpc.py @@ -1,5 +1,4 @@ from aiohttp import web -from models_library.api_schemas_webserver import WEBSERVER_RPC_NAMESPACE from models_library.products import ProductName from models_library.projects import ProjectID from models_library.rest_pagination import PageLimitInt, PageOffsetInt @@ -16,7 +15,7 @@ ProjectNotFoundRpcError, ) -from ...rabbitmq import get_rabbitmq_rpc_server +from ...rabbitmq import create_register_rpc_routes_on_startup from .. import _jobs_service from ..exceptions import ProjectInvalidRightsError, ProjectNotFoundError @@ -38,6 +37,7 @@ async def mark_project_as_job( user_id: UserID, project_uuid: ProjectID, job_parent_resource_name: str, + storage_assets_deleted: bool, ) -> None: try: @@ -48,6 +48,7 @@ async def mark_project_as_job( user_id=user_id, project_uuid=project_uuid, job_parent_resource_name=job_parent_resource_name, + storage_assets_deleted=storage_assets_deleted, ) except ProjectInvalidRightsError as err: raise ProjectForbiddenRpcError.from_domain_error(err) from err @@ -97,6 +98,7 @@ async def list_projects_marked_as_jobs( created_at=project.creation_date, modified_at=project.last_change_date, job_parent_resource_name=project.job_parent_resource_name, + storage_assets_deleted=project.storage_assets_deleted, ) for project in projects ] @@ -111,6 +113,4 @@ async def list_projects_marked_as_jobs( return page -async def register_rpc_routes_on_startup(app: web.Application): - rpc_server = get_rabbitmq_rpc_server(app) - await rpc_server.register_router(router, WEBSERVER_RPC_NAMESPACE, app) +register_rpc_routes_on_startup = create_register_rpc_routes_on_startup(router) diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/projects_slot.py b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_slot.py index 8537fbc66168..e12d9564c286 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/projects_slot.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_slot.py @@ -5,6 +5,7 @@ from aiohttp import web from models_library.products import ProductName from models_library.projects import ProjectID +from models_library.users import UserID from servicelib.aiohttp.observer import ( registed_observers_report, register_observer, @@ -20,8 +21,40 @@ _logger = logging.getLogger(__name__) +async def _on_user_connected( + user_id: UserID, + app: web.Application, + product_name: ProductName, + client_session_id: str, +) -> None: + assert product_name # nosec + # check if there is a project resource + with managed_resource(user_id, client_session_id, app) as user_session: + projects = await user_session.find(PROJECT_ID_KEY) + assert len(projects) <= 1, "At the moment, at most one project per session" # nosec + + if projects: + with log_context( + _logger, + logging.DEBUG, + msg=f"user connects and subscribes to following {projects=}", + ): + await logged_gather( + *[project_logs.subscribe(app, ProjectID(prj)) for prj in projects] + ) + + await logged_gather( + *[ + retrieve_and_notify_project_locked_state( + user_id, prj, app, notify_only_prj_user=True + ) + for prj in projects + ] + ) + + async def _on_user_disconnected( - user_id: int, + user_id: UserID, client_session_id: str, app: web.Application, product_name: ProductName, @@ -30,7 +63,7 @@ async def _on_user_disconnected( # check if there is a project resource with managed_resource(user_id, client_session_id, app) as user_session: - projects: list[str] = await user_session.find(PROJECT_ID_KEY) + projects = await user_session.find(PROJECT_ID_KEY) assert len(projects) <= 1, "At the moment, at most one project per session" # nosec @@ -56,6 +89,7 @@ async def _on_user_disconnected( def setup_project_observer_events(app: web.Application) -> None: setup_observer_registry(app) + register_observer(app, _on_user_connected, event="SIGNAL_USER_CONNECTED") register_observer(app, _on_user_disconnected, event="SIGNAL_USER_DISCONNECTED") _logger.info( diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/projects_states_rest.py b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_states_rest.py index 4e1009addce6..83336ddec8a0 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/projects_states_rest.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/projects_states_rest.py @@ -3,8 +3,9 @@ import logging from aiohttp import web +from common_library.user_messages import user_message from models_library.api_schemas_webserver.projects import ProjectGet -from models_library.projects_state import ProjectState +from models_library.api_schemas_webserver.socketio import SocketIORoomStr from pydantic import BaseModel from servicelib.aiohttp import status from servicelib.aiohttp.requests_validation import ( @@ -16,22 +17,26 @@ UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, X_SIMCORE_USER_AGENT, ) +from servicelib.rest_constants import RESPONSE_MODEL_POLICY from simcore_postgres_database.models.users import UserRole from simcore_postgres_database.webserver_models import ProjectType from ..._meta import API_VTAG as VTAG +from ...application_settings import get_application_settings from ...director_v2.exceptions import DirectorV2ServiceError from ...login.decorators import login_required from ...notifications import project_logs from ...products import products_web from ...products.models import Product +from ...resource_manager.user_sessions import managed_resource from ...security.decorators import permission_required -from ...users import api +from ...socketio.server import get_socket_server +from ...users import users_service from ...utils_aiohttp import envelope_json_response, get_api_base_url from .. import _projects_service, projects_wallets_service from ..exceptions import ProjectStartsTooManyDynamicNodesError from ._rest_exceptions import handle_plugin_requests_exceptions -from ._rest_schemas import ProjectPathParams, RequestContext +from ._rest_schemas import AuthenticatedRequestContext, ProjectPathParams _logger = logging.getLogger(__name__) @@ -53,7 +58,7 @@ class _OpenProjectQuery(BaseModel): @permission_required("project.open") @handle_plugin_requests_exceptions async def open_project(request: web.Request) -> web.Response: - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) query_params: _OpenProjectQuery = parse_request_query_parameters_as( _OpenProjectQuery, request @@ -69,7 +74,7 @@ async def open_project(request: web.Request) -> web.Response: project_type: ProjectType = await _projects_service.get_project_type( request.app, path_params.project_id ) - user_role: UserRole = await api.get_user_role( + user_role: UserRole = await users_service.get_user_role( request.app, user_id=req_ctx.user_id ) if project_type is ProjectType.TEMPLATE and user_role < UserRole.USER: @@ -93,16 +98,41 @@ async def open_project(request: web.Request) -> web.Response: ) product: Product = products_web.get_current_product(request) + app_settings = get_application_settings(request.app) if not await _projects_service.try_open_project_for_user( req_ctx.user_id, project_uuid=path_params.project_id, client_session_id=client_session_id, app=request.app, - max_number_of_studies_per_user=product.max_open_studies_per_user, + max_number_of_opened_projects_per_user=product.max_open_studies_per_user, + allow_multiple_sessions=app_settings.WEBSERVER_REALTIME_COLLABORATION + is not None, + max_number_of_user_sessions_per_project=( + app_settings.WEBSERVER_REALTIME_COLLABORATION.RTC_MAX_NUMBER_OF_USERS + if app_settings.WEBSERVER_REALTIME_COLLABORATION + else None + ), ): raise HTTPLockedError(text="Project is locked, try later") + # Connect the socket_id to a project room + with managed_resource( + req_ctx.user_id, client_session_id, request.app + ) as user_session: + _socket_id = await user_session.get_socket_id() + if _socket_id is None: + raise web.HTTPUnprocessableEntity( + text=user_message( + "Data corruption detected: unable to identify your session (socket_id missing). " + "Please refresh the page and try again. If the problem persists, contact support." + ) + ) + sio = get_socket_server(request.app) + await sio.enter_room( + _socket_id, SocketIORoomStr.from_project_id(path_params.project_id) + ) + # the project can be opened, let's update its product links await _projects_service.update_project_linked_product( request.app, path_params.project_id, req_ctx.product_name @@ -132,10 +162,7 @@ async def open_project(request: web.Request) -> web.Response: # notify users that project is now opened project = await _projects_service.add_project_states_for_user( - user_id=req_ctx.user_id, - project=project, - is_template=False, - app=request.app, + user_id=req_ctx.user_id, project=project, app=request.app ) await _projects_service.notify_project_state_update(request.app, project) @@ -144,9 +171,9 @@ async def open_project(request: web.Request) -> web.Response: except DirectorV2ServiceError as exc: # there was an issue while accessing the director-v2/director-v0 # ensure the project is closed again - await _projects_service.try_close_project_for_user( + await _projects_service.close_project_for_user( user_id=req_ctx.user_id, - project_uuid=f"{path_params.project_id}", + project_uuid=path_params.project_id, client_session_id=client_session_id, app=request.app, simcore_user_agent=request.headers.get( @@ -154,7 +181,7 @@ async def open_project(request: web.Request) -> web.Response: ), ) raise web.HTTPServiceUnavailable( - reason="Unexpected error while starting services." + text="Unexpected error while starting services." ) from exc @@ -168,7 +195,7 @@ async def open_project(request: web.Request) -> web.Response: @permission_required("project.close") @handle_plugin_requests_exceptions async def close_project(request: web.Request) -> web.Response: - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) try: @@ -184,9 +211,9 @@ async def close_project(request: web.Request) -> web.Response: user_id=req_ctx.user_id, include_state=False, ) - await _projects_service.try_close_project_for_user( + await _projects_service.close_project_for_user( req_ctx.user_id, - f"{path_params.project_id}", + path_params.project_id, client_session_id, request.app, simcore_user_agent=request.headers.get( @@ -207,15 +234,20 @@ async def close_project(request: web.Request) -> web.Response: @permission_required("project.read") @handle_plugin_requests_exceptions async def get_project_state(request: web.Request) -> web.Response: - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) # check that project exists and queries state - validated_project = await _projects_service.get_project_for_user( + project = await _projects_service.get_project_for_user( request.app, project_uuid=f"{path_params.project_id}", user_id=req_ctx.user_id, include_state=True, ) - project_state = ProjectState(**validated_project["state"]) - return envelope_json_response(project_state.model_dump()) + project_state = ProjectGet.from_domain_model(project).state + assert project_state # nosec + return envelope_json_response( + project_state.model_dump( + **RESPONSE_MODEL_POLICY, + ) + ) diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/tags_rest.py b/services/web/server/src/simcore_service_webserver/projects/_controller/tags_rest.py index ac38405f9ad0..eec389988655 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/tags_rest.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/tags_rest.py @@ -2,7 +2,7 @@ from aiohttp import web from models_library.projects import ProjectID -from servicelib.request_keys import RQT_USERID_KEY +from servicelib.aiohttp.request_keys import RQT_USERID_KEY from ..._meta import API_VTAG from ...login.decorators import login_required diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/trash_rest.py b/services/web/server/src/simcore_service_webserver/projects/_controller/trash_rest.py index f1cae188a7de..f7e488e92d35 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/trash_rest.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/trash_rest.py @@ -1,6 +1,7 @@ import logging from aiohttp import web +from common_library.user_messages import user_message from servicelib.aiohttp import status from servicelib.aiohttp.requests_validation import ( parse_request_path_parameters_as, @@ -28,11 +29,15 @@ _TRASH_ERRORS: ExceptionToHttpErrorMap = { ProjectRunningConflictError: HttpErrorInfo( status.HTTP_409_CONFLICT, - "Current study is in use and cannot be trashed [project_id={project_uuid}]. Please stop all services first and try again", + user_message( + "Current study is in use and cannot be trashed [project_id={project_uuid}]. Please stop all services first and try again" + ), ), ProjectStoppingError: HttpErrorInfo( status.HTTP_503_SERVICE_UNAVAILABLE, - "Something went wrong while stopping services before trashing. Aborting trash.", + user_message( + "Something went wrong while stopping services before trashing. Aborting trash." + ), ), } diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/wallets_rest.py b/services/web/server/src/simcore_service_webserver/projects/_controller/wallets_rest.py index a2b734a20cb0..cd5e93be9bb6 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/wallets_rest.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/wallets_rest.py @@ -19,7 +19,7 @@ from ...utils_aiohttp import envelope_json_response from .. import _projects_service, _wallets_service from ._rest_exceptions import handle_plugin_requests_exceptions -from ._rest_schemas import ProjectPathParams, RequestContext +from ._rest_schemas import AuthenticatedRequestContext, ProjectPathParams _logger = logging.getLogger(__name__) @@ -32,7 +32,7 @@ @permission_required("project.wallet.*") @handle_plugin_requests_exceptions async def get_project_wallet(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(ProjectPathParams, request) # ensure the project exists @@ -63,7 +63,7 @@ class _ProjectWalletPathParams(BaseModel): @permission_required("project.wallet.*") @handle_plugin_requests_exceptions async def connect_wallet_to_project(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_ProjectWalletPathParams, request) # ensure the project exists @@ -98,7 +98,7 @@ class _PayProjectDebtBody(BaseModel): @permission_required("project.wallet.*") @handle_plugin_requests_exceptions async def pay_project_debt(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_ProjectWalletPathParams, request) body_params = await parse_request_body_as(_PayProjectDebtBody, request) @@ -116,7 +116,7 @@ async def pay_project_debt(request: web.Request): ) if not current_wallet: raise web.HTTPNotFound( - reason="Project doesn't have any wallet associated to the project" + text="Project doesn't have any wallet associated to the project" ) if current_wallet.wallet_id == path_params.wallet_id: diff --git a/services/web/server/src/simcore_service_webserver/projects/_controller/workspaces_rest.py b/services/web/server/src/simcore_service_webserver/projects/_controller/workspaces_rest.py index 14b085c66b11..8ca8858c9837 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_controller/workspaces_rest.py +++ b/services/web/server/src/simcore_service_webserver/projects/_controller/workspaces_rest.py @@ -7,14 +7,18 @@ from models_library.workspaces import WorkspaceID from pydantic import BaseModel, BeforeValidator, ConfigDict, Field from servicelib.aiohttp import status -from servicelib.aiohttp.requests_validation import parse_request_path_parameters_as +from servicelib.aiohttp.requests_validation import ( + parse_request_headers_as, + parse_request_path_parameters_as, +) from ..._meta import api_version_prefix as VTAG from ...login.decorators import login_required +from ...models import ClientSessionHeaderParams from ...security.decorators import permission_required from .. import _workspaces_service from ._rest_exceptions import handle_plugin_requests_exceptions -from ._rest_schemas import RequestContext +from ._rest_schemas import AuthenticatedRequestContext _logger = logging.getLogger(__name__) @@ -39,10 +43,11 @@ class _ProjectWorkspacesPathParams(BaseModel): @permission_required("project.workspaces.*") @handle_plugin_requests_exceptions async def move_project_to_workspace(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as( _ProjectWorkspacesPathParams, request ) + header_params = parse_request_headers_as(ClientSessionHeaderParams, request) await _workspaces_service.move_project_into_workspace( app=request.app, @@ -50,5 +55,6 @@ async def move_project_to_workspace(request: web.Request): project_id=path_params.project_id, workspace_id=path_params.workspace_id, product_name=req_ctx.product_name, + client_session_id=header_params.client_session_id, ) return web.json_response(status=status.HTTP_204_NO_CONTENT) diff --git a/services/web/server/src/simcore_service_webserver/projects/_conversations_service.py b/services/web/server/src/simcore_service_webserver/projects/_conversations_service.py index 8bc393a6c945..c52e8ef01528 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_conversations_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_conversations_service.py @@ -8,6 +8,7 @@ ConversationMessageID, ConversationMessagePatchDB, ConversationMessageType, + ConversationName, ConversationPatchDB, ConversationType, ) @@ -42,7 +43,7 @@ async def create_project_conversation( product_name=product_name, user_id=user_id, project_id=project_uuid, - permission="read", + permission="write", ) return await conversations_service.create_conversation( app, @@ -51,6 +52,7 @@ async def create_project_conversation( project_uuid=project_uuid, name=name, type_=conversation_type, + extra_context={}, ) @@ -71,7 +73,7 @@ async def list_project_conversations( project_id=project_uuid, permission="read", ) - return await conversations_service.list_conversations_for_project( + return await conversations_service.list_project_conversations( app, project_uuid=project_uuid, offset=offset, @@ -87,17 +89,18 @@ async def update_project_conversation( project_uuid: ProjectID, conversation_id: ConversationID, # attributes - name: str, + name: ConversationName, ) -> ConversationGetDB: await check_user_project_permission( app, product_name=product_name, user_id=user_id, project_id=project_uuid, - permission="read", + permission="write", ) return await conversations_service.update_conversation( app, + project_id=project_uuid, conversation_id=conversation_id, updates=ConversationPatchDB(name=name), ) @@ -116,10 +119,14 @@ async def delete_project_conversation( product_name=product_name, user_id=user_id, project_id=project_uuid, - permission="read", + permission="write", ) await conversations_service.delete_conversation( - app, conversation_id=conversation_id + app, + product_name=product_name, + project_id=project_uuid, + user_id=user_id, + conversation_id=conversation_id, ) @@ -164,11 +171,13 @@ async def create_project_conversation_message( product_name=product_name, user_id=user_id, project_id=project_uuid, - permission="read", + permission="write", ) return await conversations_service.create_message( app, + product_name=product_name, user_id=user_id, + project_id=project_uuid, conversation_id=conversation_id, content=content, type_=message_type, @@ -217,10 +226,12 @@ async def update_project_conversation_message( product_name=product_name, user_id=user_id, project_id=project_uuid, - permission="read", + permission="write", ) return await conversations_service.update_message( app, + product_name=product_name, + project_id=project_uuid, conversation_id=conversation_id, message_id=message_id, updates=ConversationMessagePatchDB(content=content), @@ -241,10 +252,15 @@ async def delete_project_conversation_message( product_name=product_name, user_id=user_id, project_id=project_uuid, - permission="read", + permission="write", ) await conversations_service.delete_message( - app, conversation_id=conversation_id, message_id=message_id + app, + product_name=product_name, + user_id=user_id, + project_id=project_uuid, + conversation_id=conversation_id, + message_id=message_id, ) diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py b/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py index 102707b2821e..310ecb91022e 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_api_create.py @@ -17,7 +17,8 @@ from models_library.utils.fastapi_encoders import jsonable_encoder from models_library.workspaces import UserWorkspaceWithAccessRights from pydantic import TypeAdapter -from servicelib.aiohttp.long_running_tasks.server import TaskProgress +from servicelib.long_running_tasks.models import TaskProgress +from servicelib.long_running_tasks.task import TaskRegistry from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON from servicelib.redis import with_project_locked from servicelib.rest_constants import RESPONSE_MODEL_POLICY @@ -26,6 +27,7 @@ ProjectNodeCreate, ) from simcore_postgres_database.webserver_models import ProjectType as ProjectTypeDB +from yarl import URL from ..application_settings import get_application_settings from ..catalog import catalog_service @@ -37,10 +39,9 @@ copy_data_folders_from_project, get_project_total_size_simcore_s3, ) -from ..users.api import get_user_fullname from ..workspaces.api import check_user_workspace_access, get_user_workspace from ..workspaces.errors import WorkspaceAccessForbiddenError -from . import _folders_repository, _projects_service +from . import _folders_repository, _projects_repository, _projects_service from ._metadata_service import set_project_ancestors from ._permalink_service import update_or_pop_permalink_in_project from ._projects_repository_legacy import ProjectDBAPI @@ -62,7 +63,7 @@ ] -log = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) CopyFileCoro: TypeAlias = Coroutine[Any, Any, None] CopyProjectNodesCoro: TypeAlias = Coroutine[Any, Any, dict[NodeID, ProjectNodeCreate]] @@ -92,7 +93,7 @@ async def _prepare_project_copy( ) if project_data_size >= max_bytes: raise web.HTTPUnprocessableEntity( - reason=f"Source project data size is {project_data_size.human_readable()}." + text=f"Source project data size is {project_data_size.human_readable()}." f"This is larger than the maximum {max_bytes.human_readable()} allowed for copying." "TIP: Please reduce the study size or contact application support." ) @@ -161,10 +162,10 @@ async def _copy_files_from_source_project( product_name: str, task_progress: TaskProgress, ): - _projects_repository = ProjectDBAPI.get_from_app_context(app) + _projects_repository_legacy = ProjectDBAPI.get_from_app_context(app) needs_lock_source_project: bool = ( - await _projects_repository.get_project_type( + await _projects_repository_legacy.get_project_type( TypeAdapter(ProjectID).validate_python(source_project["uuid"]) ) != ProjectTypeDB.TEMPLATE @@ -180,7 +181,7 @@ async def _copy() -> None: user_id=user_id, product_name=product_name, ): - task_progress.update( + await task_progress.update( message=( async_job_composed_result.status.progress.message.description if async_job_composed_result.status.progress.message @@ -202,9 +203,7 @@ async def _copy() -> None: get_redis_lock_manager_client_sdk(app), project_uuid=source_project["uuid"], status=ProjectStatus.CLONING, - owner=Owner( - user_id=user_id, **await get_user_fullname(app, user_id=user_id) - ), + owner=Owner(user_id=user_id), notification_cb=_projects_service.create_user_notification_cb( user_id, ProjectID(f"{source_project['uuid']}"), app ), @@ -249,9 +248,11 @@ async def _compose_project_data( async def create_project( # pylint: disable=too-many-arguments,too-many-branches,too-many-statements # noqa: C901, PLR0913 - task_progress: TaskProgress, + progress: TaskProgress, *, - request: web.Request, + app: web.Application, + request_url: URL, + request_headers: dict[str, str], new_project_was_hidden_before_data_was_copied: bool, from_study: ProjectID | None, as_template: bool, @@ -263,7 +264,7 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche simcore_user_agent: str, parent_project_uuid: ProjectID | None, parent_node_id: NodeID | None, -) -> None: +) -> web.HTTPCreated: """Implements TaskProtocol for 'create_projects' handler Arguments: @@ -278,28 +279,33 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche predefined_project -- project in request body Raises: - web.HTTPCreated: succeeded web.HTTPBadRequest: web.HTTPNotFound: web.HTTPUnauthorized: """ - assert request.app # nosec + _logger.info( + "create_project for '%s' with %s %s %s", + f"{user_id=}", + f"{predefined_project=}", + f"{product_name=}", + f"{from_study=}", + ) - _projects_repository = ProjectDBAPI.get_from_app_context(request.app) + _projects_repository_legacy = ProjectDBAPI.get_from_app_context(app) new_project: ProjectDict = {} copy_file_coro = None project_nodes = None try: - task_progress.update(message="creating new study...") + await progress.update(message="creating new study...") workspace_id = None folder_id = None if predefined_project: if workspace_id := predefined_project.get("workspaceId", None): await check_user_workspace_access( - request.app, + app, user_id=user_id, workspace_id=workspace_id, product_name=product_name, @@ -308,7 +314,7 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche if folder_id := predefined_project.get("folderId", None): # Check user has access to folder await folders_folders_repository.get_for_user_or_workspace( - request.app, + app, folder_id=folder_id, product_name=product_name, user_id=user_id if workspace_id is None else None, @@ -324,13 +330,13 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche project_node_coro, copy_file_coro, ) = await _prepare_project_copy( - request.app, + app, user_id=user_id, product_name=product_name, src_project_uuid=from_study, as_template=as_template, deep_copy=copy_data, - task_progress=task_progress, + task_progress=progress, ) if project_node_coro: project_nodes = await project_node_coro @@ -338,7 +344,7 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche # 1.2 does project belong to some folder? workspace_id = new_project["workspaceId"] prj_to_folder_db = await _folders_repository.get_project_to_folder( - request.app, + app, project_id=from_study, private_workspace_user_id_or_none=( user_id if workspace_id is None else None @@ -357,14 +363,14 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche if predefined_project: # 2. overrides with optional body and re-validate new_project, project_nodes = await _compose_project_data( - request.app, + app, user_id=user_id, new_project=new_project, predefined_project=predefined_project, ) # 3.1 save new project in DB - new_project = await _projects_repository.insert_project( + new_project = await _projects_repository_legacy.insert_project( project=jsonable_encoder(new_project), user_id=user_id, product_name=product_name, @@ -374,18 +380,18 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche ) # add parent linking if needed await set_project_ancestors( - request.app, + app, user_id=user_id, project_uuid=new_project["uuid"], parent_project_uuid=parent_project_uuid, parent_node_id=parent_node_id, ) - task_progress.update() + await progress.update() # 3.2 move project to proper folder if folder_id: await _folders_repository.insert_project_to_folder( - request.app, + app, project_id=new_project["uuid"], folder_id=folder_id, private_workspace_user_id_or_none=( @@ -400,43 +406,44 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche # 5. unhide the project if needed since it is now complete if not new_project_was_hidden_before_data_was_copied: - await _projects_repository.set_hidden_flag( - new_project["uuid"], hidden=False + await _projects_repository.patch_project( + app, + project_uuid=new_project["uuid"], + new_partial_project_data={"hidden": False}, ) # update the network information in director-v2 await dynamic_scheduler_service.update_projects_networks( - request.app, project_id=ProjectID(new_project["uuid"]) + app, project_id=ProjectID(new_project["uuid"]) ) - task_progress.update() + await progress.update() # This is a new project and every new graph needs to be reflected in the pipeline tables await director_v2_service.create_or_update_pipeline( - request.app, + app, user_id, new_project["uuid"], product_name, product_api_base_url, ) # get the latest state of the project (lastChangeDate for instance) - new_project, _ = await _projects_repository.get_project_dict_and_type( + new_project, _ = await _projects_repository_legacy.get_project_dict_and_type( project_uuid=new_project["uuid"] ) # Appends state new_project = await _projects_service.add_project_states_for_user( - user_id=user_id, - project=new_project, - is_template=as_template, - app=request.app, + user_id=user_id, project=new_project, app=app ) - task_progress.update() + await progress.update() # Adds permalink - await update_or_pop_permalink_in_project(request, new_project) + await update_or_pop_permalink_in_project( + app, request_url, request_headers, new_project + ) # Adds folderId user_specific_project_data_db = ( - await _projects_repository.get_user_specific_project_data_db( + await _projects_repository_legacy.get_user_specific_project_data_db( project_uuid=new_project["uuid"], private_workspace_user_id_or_none=( user_id if workspace_id is None else None @@ -448,7 +455,7 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche # Overwrite project access rights if workspace_id: workspace: UserWorkspaceWithAccessRights = await get_user_workspace( - request.app, + app, user_id=user_id, workspace_id=workspace_id, product_name=product_name, @@ -459,11 +466,22 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche for gid, access in workspace.access_rights.items() } + _project_product_name = await _projects_repository_legacy.get_project_product( + project_uuid=new_project["uuid"] + ) + assert ( + _project_product_name == product_name # nosec + ), "Project product name mismatch" + if _project_product_name != product_name: + raise web.HTTPBadRequest( + text=f"Project product name mismatch {product_name=} {_project_product_name=}" + ) + data = ProjectGet.from_domain_model(new_project).model_dump( **RESPONSE_MODEL_POLICY ) - raise web.HTTPCreated( + return web.HTTPCreated( text=json_dumps({"data": data}), content_type=MIMETYPE_APPLICATION_JSON, ) @@ -480,7 +498,7 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche except (ParentProjectNotFoundError, ParentNodeNotFoundError) as exc: if project_uuid := new_project.get("uuid"): await _projects_service.submit_delete_project_task( - app=request.app, + app=app, project_uuid=project_uuid, user_id=user_id, simcore_user_agent=simcore_user_agent, @@ -488,15 +506,28 @@ async def create_project( # pylint: disable=too-many-arguments,too-many-branche raise web.HTTPNotFound(text=f"{exc}") from exc except asyncio.CancelledError: - log.warning( + _logger.warning( "cancelled create_project for '%s'. Cleaning up", f"{user_id=}", ) if project_uuid := new_project.get("uuid"): await _projects_service.submit_delete_project_task( - app=request.app, + app=app, project_uuid=project_uuid, user_id=user_id, simcore_user_agent=simcore_user_agent, ) raise + + +def register_create_project_task(app: web.Application) -> None: + TaskRegistry.register( + create_project, + allowed_errors=( + web.HTTPUnprocessableEntity, + web.HTTPBadRequest, + web.HTTPNotFound, + web.HTTPForbidden, + ), + app=app, + ) diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_api_delete.py b/services/web/server/src/simcore_service_webserver/projects/_crud_api_delete.py index 866609110f30..423c94712dbb 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_api_delete.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_api_delete.py @@ -12,11 +12,13 @@ from aiohttp import web from models_library.projects import ProjectID from models_library.users import UserID +from servicelib.utils import fire_and_forget_task +from ..constants import APP_FIRE_AND_FORGET_TASKS_KEY from ..director_v2 import director_v2_service from ..storage.api import delete_data_folders_of_project -from ..users.api import FullNameDict from ..users.exceptions import UserNotFoundError +from . import _projects_repository from ._access_rights_service import check_user_project_permission from ._projects_repository_legacy import ProjectDBAPI from .exceptions import ( @@ -37,13 +39,12 @@ class RemoveProjectServicesCallable(Protocol): # NOTE: this function was tmp added here to avoid refactoring all projects_api in a single PR async def __call__( self, - user_id: int, - project_uuid: str, + user_id: UserID, + project_uuid: ProjectID, app: web.Application, simcore_user_agent: str, *, notify_users: bool = True, - user_name: FullNameDict | None = None, ) -> None: ... @@ -70,7 +71,11 @@ async def mark_project_as_deleted( # NOTE: if any of the steps below fail, it might results in a # services/projects/data that might be incosistent. The GC should # be able to detect that and resolve it. - await db.set_hidden_flag(project_uuid, hidden=True) + await _projects_repository.patch_project( + app, + project_uuid=project_uuid, + new_partial_project_data={"hidden": True}, + ) async def delete_project( @@ -102,7 +107,7 @@ async def delete_project( # - raises ProjectNotFoundError, UserNotFoundError, ProjectLockError await remove_project_dynamic_services( user_id=user_id, - project_uuid=f"{project_uuid}", + project_uuid=project_uuid, app=app, simcore_user_agent=simcore_user_agent, notify_users=False, @@ -120,12 +125,12 @@ async def delete_project( except ProjectLockError as err: raise ProjectDeleteError( - project_uuid=project_uuid, reason=f"Project currently in use {err}" + project_uuid=project_uuid, details=f"Project currently in use {err}" ) from err except (ProjectInvalidRightsError, ProjectNotFoundError, UserNotFoundError) as err: raise ProjectDeleteError( - project_uuid=project_uuid, reason=f"Invalid project state {err}" + project_uuid=project_uuid, details=f"Invalid project state {err}" ) from err @@ -184,8 +189,7 @@ def _log_state_when_done(fut: asyncio.Future): ) # ------ - - task = asyncio.create_task( + task = fire_and_forget_task( delete_project( app, project_uuid, @@ -193,12 +197,11 @@ def _log_state_when_done(fut: asyncio.Future): simcore_user_agent, remove_project_dynamic_services, ), - name=_DELETE_PROJECT_TASK_NAME.format(project_uuid, user_id), + task_suffix_name=_DELETE_PROJECT_TASK_NAME.format(project_uuid, user_id), + fire_and_forget_tasks_collection=app[APP_FIRE_AND_FORGET_TASKS_KEY], ) - assert task.get_name() == _DELETE_PROJECT_TASK_NAME.format( # nosec - project_uuid, user_id - ) + assert task in get_scheduled_tasks(project_uuid, user_id) # nosec task.add_done_callback(_log_state_when_done) return task @@ -209,5 +212,7 @@ def get_scheduled_tasks(project_uuid: ProjectID, user_id: UserID) -> list[asynci return [ task for task in asyncio.all_tasks() - if task.get_name() == _DELETE_PROJECT_TASK_NAME.format(project_uuid, user_id) + if task.get_name().endswith( + _DELETE_PROJECT_TASK_NAME.format(project_uuid, user_id) + ) ] diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py b/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py index 15cf85ed5911..de3c36898e23 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py @@ -10,25 +10,21 @@ from aiohttp import web from models_library.folders import FolderID, FolderQuery, FolderScope -from models_library.projects import ProjectID, ProjectTemplateType +from models_library.projects import ProjectTemplateType from models_library.rest_ordering import OrderBy from models_library.users import UserID from models_library.workspaces import WorkspaceID, WorkspaceQuery, WorkspaceScope from pydantic import NonNegativeInt from servicelib.utils import logged_gather -from simcore_postgres_database.models.projects import ProjectType -from simcore_postgres_database.webserver_models import ( - ProjectTemplateType as ProjectTemplateTypeDB, -) -from simcore_postgres_database.webserver_models import ProjectType as ProjectTypeDB -from ..catalog import catalog_service from ..folders import _folders_repository +from ..users import users_service from ..workspaces.api import check_user_workspace_access from . import _projects_service from ._access_rights_repository import batch_get_project_access_rights from ._projects_repository import batch_get_trashed_by_primary_gid from ._projects_repository_legacy import ProjectDBAPI +from ._projects_repository_legacy_utils import convert_to_schema_names from .models import ProjectDict, ProjectTypeAPI @@ -54,7 +50,6 @@ async def _aggregate_data_to_projects_from_other_sources( app: web.Application, *, db_projects: list[ProjectDict], - db_project_types: list[ProjectTypeDB], user_id: UserID, ) -> list[ProjectDict]: """ @@ -62,7 +57,7 @@ async def _aggregate_data_to_projects_from_other_sources( """ # updating `project.trashed_by_primary_gid` trashed_by_primary_gid_values = await batch_get_trashed_by_primary_gid( - app, projects_uuids=[ProjectID(p["uuid"]) for p in db_projects] + app, projects_uuids=[p["uuid"] for p in db_projects] ) _batch_update("trashed_by_primary_gid", trashed_by_primary_gid_values, db_projects) @@ -71,19 +66,16 @@ async def _aggregate_data_to_projects_from_other_sources( project_to_access_rights = await batch_get_project_access_rights( app=app, projects_uuids_with_workspace_id=[ - (ProjectID(p["uuid"]), p["workspaceId"]) for p in db_projects + (p["uuid"], p["workspaceId"]) for p in db_projects ], ) # udpating `project.state` update_state_per_project = [ _projects_service.add_project_states_for_user( - user_id=user_id, - project=prj, - is_template=prj_type == ProjectTypeDB.TEMPLATE, - app=app, + user_id=user_id, project=prj, app=app ) - for prj, prj_type in zip(db_projects, db_project_types, strict=False) + for prj in db_projects ] updated_projects: list[ProjectDict] = await _paralell_update( @@ -91,11 +83,29 @@ async def _aggregate_data_to_projects_from_other_sources( ) for project in updated_projects: - project["accessRights"] = project_to_access_rights[project["uuid"]] + project["accessRights"] = project_to_access_rights[f"{project['uuid']}"] return updated_projects +async def _legacy_convert_db_projects_to_api_projects( + app: web.Application, + db, + db_projects: list[dict[str, Any]], +) -> list[dict]: + """ + Converts db schema projects to API schema (legacy postprocessing). + """ + api_projects: list[dict] = [] + for db_prj in db_projects: + db_prj_dict = db_prj + db_prj_dict.pop("product_name", None) + db_prj_dict["tags"] = await db.get_tags_by_project(project_id=f"{db_prj['id']}") + user_email = await users_service.get_user_email_legacy(app, db_prj["prj_owner"]) + api_projects.append(convert_to_schema_names(db_prj_dict, user_email)) + return api_projects + + async def list_projects( # pylint: disable=too-many-arguments app: web.Application, user_id: UserID, @@ -120,12 +130,6 @@ async def list_projects( # pylint: disable=too-many-arguments ) -> tuple[list[ProjectDict], int]: db = ProjectDBAPI.get_from_app_context(app) - user_available_services: list[dict] = ( - await catalog_service.get_services_for_user_in_product( - app, user_id, product_name, only_key_versions=True - ) - ) - workspace_is_private = True if workspace_id: await check_user_workspace_access( @@ -147,7 +151,7 @@ async def list_projects( # pylint: disable=too-many-arguments workspace_id=workspace_id, ) - db_projects, db_project_types, total_number_projects = await db.list_projects_dicts( + db_projects, total_number_projects = await db.list_projects_dicts( product_name=product_name, user_id=user_id, workspace_query=( @@ -164,10 +168,7 @@ async def list_projects( # pylint: disable=too-many-arguments ), # attrs filter_by_project_type=ProjectTypeAPI.to_project_type_db(project_type), - filter_by_template_type=( - ProjectTemplateTypeDB(template_type) if template_type else None - ), - filter_by_services=user_available_services, + filter_by_template_type=template_type, filter_trashed=trashed, filter_hidden=show_hidden, # composed attrs @@ -180,20 +181,26 @@ async def list_projects( # pylint: disable=too-many-arguments order_by=order_by, ) - projects = await _aggregate_data_to_projects_from_other_sources( - app, db_projects=db_projects, db_project_types=db_project_types, user_id=user_id + api_projects = await _legacy_convert_db_projects_to_api_projects( + app, db, db_projects + ) + + final_projects = await _aggregate_data_to_projects_from_other_sources( + app, db_projects=api_projects, user_id=user_id ) - return projects, total_number_projects + return final_projects, total_number_projects -async def list_projects_full_depth( +async def list_projects_full_depth( # pylint: disable=too-many-arguments app: web.Application, *, user_id: UserID, product_name: str, # attrs filter trashed: bool | None, + filter_by_project_type: ProjectTypeAPI, + filter_by_template_type: ProjectTemplateType | None, # pagination offset: NonNegativeInt, limit: int, @@ -204,20 +211,16 @@ async def list_projects_full_depth( ) -> tuple[list[ProjectDict], int]: db = ProjectDBAPI.get_from_app_context(app) - user_available_services: list[dict] = ( - await catalog_service.get_services_for_user_in_product( - app, user_id, product_name, only_key_versions=True - ) - ) - - db_projects, db_project_types, total_number_projects = await db.list_projects_dicts( + db_projects, total_number_projects = await db.list_projects_dicts( product_name=product_name, user_id=user_id, workspace_query=WorkspaceQuery(workspace_scope=WorkspaceScope.ALL), folder_query=FolderQuery(folder_scope=FolderScope.ALL), filter_trashed=trashed, - filter_by_services=user_available_services, - filter_by_project_type=ProjectType.STANDARD, + filter_by_project_type=ProjectTypeAPI.to_project_type_db( + filter_by_project_type + ), + filter_by_template_type=filter_by_template_type, search_by_multi_columns=search_by_multi_columns, search_by_project_name=search_by_project_name, offset=offset, @@ -225,8 +228,12 @@ async def list_projects_full_depth( order_by=order_by, ) - projects = await _aggregate_data_to_projects_from_other_sources( - app, db_projects=db_projects, db_project_types=db_project_types, user_id=user_id + api_projects = await _legacy_convert_db_projects_to_api_projects( + app, db, db_projects + ) + + final_projects = await _aggregate_data_to_projects_from_other_sources( + app, db_projects=api_projects, user_id=user_id ) - return projects, total_number_projects + return final_projects, total_number_projects diff --git a/services/web/server/src/simcore_service_webserver/projects/_folders_repository.py b/services/web/server/src/simcore_service_webserver/projects/_folders_repository.py index 6c1755eb2450..8270bdcb7ba9 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_folders_repository.py +++ b/services/web/server/src/simcore_service_webserver/projects/_folders_repository.py @@ -13,7 +13,7 @@ from sqlalchemy.ext.asyncio import AsyncConnection from sqlalchemy.sql import select -from ..db.plugin import get_asyncpg_engine, get_database_engine +from ..db.plugin import get_asyncpg_engine, get_database_engine_legacy _logger = logging.getLogger(__name__) @@ -38,7 +38,7 @@ async def insert_project_to_folder( folder_id: FolderID, private_workspace_user_id_or_none: UserID | None, ) -> ProjectToFolderDB: - async with get_database_engine(app).acquire() as conn: + async with get_database_engine_legacy(app).acquire() as conn: result = await conn.execute( projects_to_folders.insert() .values( @@ -71,7 +71,7 @@ async def get_project_to_folder( & (projects_to_folders.c.user_id == private_workspace_user_id_or_none) ) - async with get_database_engine(app).acquire() as conn: + async with get_database_engine_legacy(app).acquire() as conn: result = await conn.execute(stmt) row = await result.first() if row is None: @@ -85,7 +85,7 @@ async def delete_project_to_folder( folder_id: FolderID, private_workspace_user_id_or_none: UserID | None, ) -> None: - async with get_database_engine(app).acquire() as conn: + async with get_database_engine_legacy(app).acquire() as conn: await conn.execute( projects_to_folders.delete().where( (projects_to_folders.c.project_uuid == f"{project_id}") diff --git a/services/web/server/src/simcore_service_webserver/projects/_folders_service.py b/services/web/server/src/simcore_service_webserver/projects/_folders_service.py index 88659d68ac5f..f68b2b15c4c4 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_folders_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_folders_service.py @@ -39,7 +39,7 @@ async def move_project_into_folder( raise ProjectInvalidRightsError( user_id=user_id, project_uuid=project_id, - reason=f"User does not have write access to project {project_id}", + details=f"User does not have write access to project {project_id}", ) workspace_is_private = False diff --git a/services/web/server/src/simcore_service_webserver/projects/_groups_repository.py b/services/web/server/src/simcore_service_webserver/projects/_groups_repository.py index 00f7d467054a..bcdbd668a32b 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_groups_repository.py +++ b/services/web/server/src/simcore_service_webserver/projects/_groups_repository.py @@ -101,7 +101,7 @@ async def get_project_group( row = await result.first() if row is None: raise ProjectGroupNotFoundError( - reason=f"Project {project_id} group {group_id} not found" + details=f"Project {project_id} group {group_id} not found" ) return ProjectGroupGetDB.model_validate(row) @@ -136,7 +136,7 @@ async def replace_project_group( row = await result.first() if row is None: raise ProjectGroupNotFoundError( - reason=f"Project {project_id} group {group_id} not found" + details=f"Project {project_id} group {group_id} not found" ) return ProjectGroupGetDB.model_validate(row) diff --git a/services/web/server/src/simcore_service_webserver/projects/_groups_service.py b/services/web/server/src/simcore_service_webserver/projects/_groups_service.py index 4ad89126a491..ee663ba7a520 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_groups_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_groups_service.py @@ -12,7 +12,7 @@ from models_library.users import UserID from pydantic import BaseModel, EmailStr, TypeAdapter -from ..users import api as users_service +from ..users import users_service from . import _groups_repository from ._access_rights_service import check_user_project_permission from ._groups_models import ProjectGroupGetDB @@ -121,7 +121,7 @@ async def replace_project_group( raise ProjectInvalidRightsError( user_id=user_id, project_uuid=project_id, - reason=f"User does not have access to modify owner project group in project {project_id}", + details=f"User does not have access to modify owner project group in project {project_id}", ) project_group_db: ProjectGroupGetDB = ( @@ -168,7 +168,7 @@ async def delete_project_group( raise ProjectInvalidRightsError( user_id=user_id, project_uuid=project_id, - reason=f"User does not have access to modify owner project group in project {project_id}", + details=f"User does not have access to modify owner project group in project {project_id}", ) await _groups_repository.delete_project_group( @@ -252,3 +252,20 @@ async def create_project_group_without_checking_permissions( write=write, delete=delete, ) + + +async def list_project_groups_by_project_without_checking_permissions( + app: web.Application, + *, + project_id: ProjectID, +) -> list[ProjectGroupGet]: + project_groups_db: list[ProjectGroupGetDB] = ( + await _groups_repository.list_project_groups(app=app, project_id=project_id) + ) + + project_groups_api: list[ProjectGroupGet] = [ + ProjectGroupGet.model_validate(group.model_dump()) + for group in project_groups_db + ] + + return project_groups_api diff --git a/services/web/server/src/simcore_service_webserver/projects/_jobs_repository.py b/services/web/server/src/simcore_service_webserver/projects/_jobs_repository.py index 3b060be9a23b..539b1085036c 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_jobs_repository.py +++ b/services/web/server/src/simcore_service_webserver/projects/_jobs_repository.py @@ -67,6 +67,7 @@ async def set_project_as_job( *, project_uuid: ProjectID, job_parent_resource_name: str, + storage_assets_deleted: bool, ) -> None: async with transaction_context(self.engine, connection) as conn: stmt = ( @@ -74,10 +75,14 @@ async def set_project_as_job( .values( project_uuid=f"{project_uuid}", job_parent_resource_name=job_parent_resource_name, + storage_assets_deleted=storage_assets_deleted, ) .on_conflict_do_update( index_elements=["project_uuid", "job_parent_resource_name"], - set_={"job_parent_resource_name": job_parent_resource_name}, + set_={ + "job_parent_resource_name": job_parent_resource_name, + "storage_assets_deleted": storage_assets_deleted, + }, ) ) @@ -175,6 +180,7 @@ async def list_projects_marked_as_jobs( *_PROJECT_DB_COLS, projects.c.workbench, base_query.c.job_parent_resource_name, + base_query.c.storage_assets_deleted, ) .select_from( base_query.join( @@ -190,7 +196,7 @@ async def list_projects_marked_as_jobs( .offset(pagination_offset) ) - # Step 5: Execute queries + # Step 7: Execute queries async with pass_or_acquire_connection(self.engine, connection) as conn: total_count = await conn.scalar(total_query) assert isinstance(total_count, int) # nosec @@ -201,3 +207,41 @@ async def list_projects_marked_as_jobs( ) return total_count, projects_list + + async def get_project_marked_as_job( + self, + connection: AsyncConnection | None = None, + *, + project_uuid: ProjectID, + job_parent_resource_name: str, + ) -> ProjectJobDBGet | None: + """ + Returns the project associated with the given project_uuid and job_parent_resource_name + """ + query = ( + sa.select( + *_PROJECT_DB_COLS, + projects.c.workbench, + projects_to_jobs.c.job_parent_resource_name, + projects_to_jobs.c.storage_assets_deleted, + ) + .select_from( + projects_to_jobs.join( + projects, + projects_to_jobs.c.project_uuid == projects.c.uuid, + ) + ) + .where( + projects_to_jobs.c.project_uuid == f"{project_uuid}", + projects_to_jobs.c.job_parent_resource_name == job_parent_resource_name, + projects.c.workspace_id.is_(None), + ) + .limit(1) + ) + + async with pass_or_acquire_connection(self.engine, connection) as conn: + result = await conn.execute(query) + row = result.first() + if row is None: + return None + return TypeAdapter(ProjectJobDBGet).validate_python(row) diff --git a/services/web/server/src/simcore_service_webserver/projects/_jobs_service.py b/services/web/server/src/simcore_service_webserver/projects/_jobs_service.py index 6fa7a88fa703..f8f17a28ef90 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_jobs_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_jobs_service.py @@ -6,10 +6,11 @@ from models_library.projects import ProjectID from models_library.users import UserID from pydantic import AfterValidator, validate_call -from simcore_service_webserver.projects.models import ProjectJobDBGet from ._access_rights_service import check_user_project_permission from ._jobs_repository import ProjectJobsRepository +from .exceptions import ProjectNotFoundError +from .models import ProjectJobDBGet _logger = logging.getLogger(__name__) @@ -31,6 +32,7 @@ async def set_project_as_job( job_parent_resource_name: Annotated[ str, AfterValidator(_validate_job_parent_resource_name) ], + storage_assets_deleted: bool, ) -> None: await check_user_project_permission( @@ -44,7 +46,9 @@ async def set_project_as_job( repo = ProjectJobsRepository.create_from_app(app) await repo.set_project_as_job( - project_uuid=project_uuid, job_parent_resource_name=job_parent_resource_name + project_uuid=project_uuid, + job_parent_resource_name=job_parent_resource_name, + storage_assets_deleted=storage_assets_deleted, ) @@ -78,3 +82,38 @@ async def list_my_projects_marked_as_jobs( filter_by_job_parent_resource_name_prefix=filter_by_job_parent_resource_name_prefix, filter_any_custom_metadata=filter_any_custom_metadata, ) + + +@validate_call(config={"arbitrary_types_allowed": True}) +async def get_project_marked_as_job( + app: web.Application, + *, + product_name: ProductName, + user_id: UserID, + project_uuid: ProjectID, + job_parent_resource_name: Annotated[ + str, AfterValidator(_validate_job_parent_resource_name) + ], +) -> ProjectJobDBGet: + """ + Retrieves the project associated with the given project_uuid and job_parent_resource_name. + Raises: + web.HTTPNotFound: if no project is found. + """ + await check_user_project_permission( + app, + project_id=project_uuid, + user_id=user_id, + product_name=product_name, + permission="read", + ) + repo = ProjectJobsRepository.create_from_app(app) + project_id = await repo.get_project_marked_as_job( + project_uuid=project_uuid, + job_parent_resource_name=job_parent_resource_name, + ) + if not project_id: + raise ProjectNotFoundError( + project_uuid=project_uuid, + ) + return project_id diff --git a/services/web/server/src/simcore_service_webserver/projects/_metadata_repository.py b/services/web/server/src/simcore_service_webserver/projects/_metadata_repository.py index d943e72e6f9d..2def2375f0fe 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_metadata_repository.py +++ b/services/web/server/src/simcore_service_webserver/projects/_metadata_repository.py @@ -16,6 +16,7 @@ DBProjectInvalidParentNodeError, DBProjectInvalidParentProjectError, DBProjectNotFoundError, + ProjectMetadata, ) from simcore_postgres_database.utils_projects_nodes import ( ProjectNodesNodeNotFoundError, @@ -95,6 +96,19 @@ async def get_project_custom_metadata( return TypeAdapter(MetadataDict).validate_python(metadata.custom or {}) +@_handle_projects_metadata_exceptions +async def get_project_metadata_or_none( + engine: Engine, project_uuid: ProjectID +) -> ProjectMetadata | None: + async with engine.acquire() as connection: + try: + return await utils_projects_metadata.get( + connection, project_uuid=project_uuid + ) + except DBProjectNotFoundError: + return None + + @_handle_projects_metadata_exceptions async def set_project_custom_metadata( engine: Engine, diff --git a/services/web/server/src/simcore_service_webserver/projects/_metadata_service.py b/services/web/server/src/simcore_service_webserver/projects/_metadata_service.py index 3d77fa6a74f0..0620f9c58e05 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_metadata_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_metadata_service.py @@ -8,7 +8,7 @@ from models_library.users import UserID from pydantic import TypeAdapter -from ..db.plugin import get_database_engine +from ..db.plugin import get_database_engine_legacy from . import _metadata_repository from ._access_rights_service import validate_project_ownership from .exceptions import ProjectNotFoundError @@ -23,7 +23,7 @@ async def get_project_custom_metadata_for_user( await validate_project_ownership(app, user_id=user_id, project_uuid=project_uuid) return await _metadata_repository.get_project_custom_metadata( - engine=get_database_engine(app), project_uuid=project_uuid + engine=get_database_engine_legacy(app), project_uuid=project_uuid ) @@ -32,7 +32,7 @@ async def get_project_custom_metadata_or_empty_dict( ) -> MetadataDict: try: output = await _metadata_repository.get_project_custom_metadata( - engine=get_database_engine(app), project_uuid=project_uuid + engine=get_database_engine_legacy(app), project_uuid=project_uuid ) except ProjectNotFoundError: # This is a valid case when the project is not found @@ -50,7 +50,7 @@ async def set_project_custom_metadata( await validate_project_ownership(app, user_id=user_id, project_uuid=project_uuid) return await _metadata_repository.set_project_custom_metadata( - engine=get_database_engine(app), + engine=get_database_engine_legacy(app), project_uuid=project_uuid, custom_metadata=value, ) @@ -65,7 +65,7 @@ async def _project_has_ancestors( await validate_project_ownership(app, user_id=user_id, project_uuid=project_uuid) return await _metadata_repository.project_has_ancestors( - engine=get_database_engine(app), project_uuid=project_uuid + engine=get_database_engine_legacy(app), project_uuid=project_uuid ) @@ -90,11 +90,11 @@ async def set_project_ancestors_from_custom_metadata( # let's try to get the parent project UUID parent_project_uuid = await _metadata_repository.get_project_id_from_node_id( - get_database_engine(app), node_id=parent_node_id + get_database_engine_legacy(app), node_id=parent_node_id ) await _metadata_repository.set_project_ancestors( - get_database_engine(app), + get_database_engine_legacy(app), project_uuid=project_uuid, parent_project_uuid=parent_project_uuid, parent_node_id=parent_node_id, @@ -111,7 +111,7 @@ async def set_project_ancestors( await validate_project_ownership(app, user_id=user_id, project_uuid=project_uuid) await _metadata_repository.set_project_ancestors( - get_database_engine(app), + get_database_engine_legacy(app), project_uuid=project_uuid, parent_project_uuid=parent_project_uuid, parent_node_id=parent_node_id, diff --git a/services/web/server/src/simcore_service_webserver/projects/_nodes_models_adapters.py b/services/web/server/src/simcore_service_webserver/projects/_nodes_models_adapters.py new file mode 100644 index 000000000000..f5baf7b776cf --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/projects/_nodes_models_adapters.py @@ -0,0 +1,70 @@ +""" +Collection of free function adapters between Node-like pydantic models + +- The tricky part here is to deal with alias in Node model which are not present in the DB models + +""" + +from datetime import datetime +from typing import Any +from uuid import UUID + +from models_library.projects_nodes import Node +from simcore_postgres_database.utils_projects_nodes import ( + ProjectNode, + ProjectNodeCreate, +) + + +def node_from_project_node_create(project_node_create: ProjectNodeCreate) -> Node: + """ + Adapter: Converts a ProjectNodeCreate instance to a Node model. + """ + exclude_fields = {"node_id", "required_resources"} + + assert set(ProjectNodeCreate.model_fields).issuperset(exclude_fields) # nosec + + node_data: dict[str, Any] = project_node_create.model_dump( + exclude=exclude_fields, + exclude_none=True, + exclude_unset=True, + ) + return Node.model_validate(node_data, by_name=True) + + +def node_from_project_node(project_node: ProjectNode) -> Node: + """ + Adapter: Converts a ProjectNode instance to a Node model. + """ + exclude_fields = {"node_id", "required_resources", "created", "modified"} + assert set(ProjectNode.model_fields).issuperset(exclude_fields) # nosec + + node_data: dict[str, Any] = project_node.model_dump( + exclude=exclude_fields, + exclude_none=True, + exclude_unset=True, + ) + return Node.model_validate(node_data, by_name=True) + + +def project_node_create_from_node(node: Node, node_id: UUID) -> ProjectNodeCreate: + """ + Adapter: Converts a Node model and node_id to a ProjectNodeCreate instance. + """ + node_data: dict[str, Any] = node.model_dump(by_alias=False, mode="json") + return ProjectNodeCreate(node_id=node_id, **node_data) + + +def project_node_from_node( + node: Node, node_id: UUID, created: datetime, modified: datetime +) -> ProjectNode: + """ + Adapter: Converts a Node model, node_id, created, and modified to a ProjectNode instance. + """ + node_data: dict[str, Any] = node.model_dump(by_alias=False, mode="json") + return ProjectNode( + node_id=node_id, + created=created, + modified=modified, + **node_data, + ) diff --git a/services/web/server/src/simcore_service_webserver/projects/_nodes_repository.py b/services/web/server/src/simcore_service_webserver/projects/_nodes_repository.py index e50603602657..f969f8de9833 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_nodes_repository.py +++ b/services/web/server/src/simcore_service_webserver/projects/_nodes_repository.py @@ -1,9 +1,10 @@ from aiohttp import web from models_library.projects import ProjectID from models_library.services_types import ServiceKey, ServiceVersion -from simcore_postgres_database.utils_projects_nodes import ProjectNodesRepo +from simcore_postgres_database.utils_projects_nodes import ProjectNode, ProjectNodesRepo +from simcore_postgres_database.utils_repos import pass_or_acquire_connection -from ..db.plugin import get_database_engine +from ..db.plugin import get_asyncpg_engine async def get_project_nodes_services( @@ -11,8 +12,17 @@ async def get_project_nodes_services( ) -> list[tuple[ServiceKey, ServiceVersion]]: repo = ProjectNodesRepo(project_uuid=project_uuid) - async with get_database_engine(app).acquire() as conn: + async with pass_or_acquire_connection(get_asyncpg_engine(app)) as conn: nodes = await repo.list(conn) # removes duplicates by preserving order return list(dict.fromkeys((node.key, node.version) for node in nodes)) + + +async def get_project_nodes( + app: web.Application, *, project_uuid: ProjectID +) -> list[ProjectNode]: + repo = ProjectNodesRepo(project_uuid=project_uuid) + + async with pass_or_acquire_connection(get_asyncpg_engine(app)) as conn: + return await repo.list(conn) diff --git a/services/web/server/src/simcore_service_webserver/projects/_nodes_service.py b/services/web/server/src/simcore_service_webserver/projects/_nodes_service.py index 0206e1315cc8..5354b496ca5f 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_nodes_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_nodes_service.py @@ -24,6 +24,7 @@ model_validator, ) from servicelib.utils import logged_gather +from simcore_postgres_database.utils_projects_nodes import ProjectNode from ..application_settings import get_application_settings from ..storage.api import get_download_link, get_files_in_node_folder @@ -81,6 +82,12 @@ async def get_project_nodes_services( ) +async def get_project_nodes( + app: web.Application, *, project_uuid: ProjectID +) -> list[ProjectNode]: + return await _nodes_repository.get_project_nodes(app, project_uuid=project_uuid) + + # # PREVIEWS # diff --git a/services/web/server/src/simcore_service_webserver/projects/_permalink_service.py b/services/web/server/src/simcore_service_webserver/projects/_permalink_service.py index e6fa6e61a8b2..d3c52a985012 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_permalink_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_permalink_service.py @@ -5,6 +5,7 @@ from aiohttp import web from models_library.api_schemas_webserver.permalinks import ProjectPermalink from models_library.projects import ProjectID +from yarl import URL from .exceptions import PermalinkFactoryError, PermalinkNotAllowedError from .models import ProjectDict @@ -15,9 +16,12 @@ class CreateLinkCoroutine(Protocol): async def __call__( - self, request: web.Request, project_uuid: ProjectID - ) -> ProjectPermalink: - ... + self, + app: web.Application, + request_url: URL, + request_headers: dict[str, str], + project_uuid: ProjectID, + ) -> ProjectPermalink: ... def register_factory(app: web.Application, factory_coro: CreateLinkCoroutine): @@ -39,13 +43,16 @@ def _get_factory(app: web.Application) -> CreateLinkCoroutine: async def _create_permalink( - request: web.Request, project_uuid: ProjectID + app: web.Application, + request_url: URL, + request_headers: dict[str, str], + project_uuid: ProjectID, ) -> ProjectPermalink: - create_coro: CreateLinkCoroutine = _get_factory(request.app) + create_coro: CreateLinkCoroutine = _get_factory(app) try: permalink: ProjectPermalink = await asyncio.wait_for( - create_coro(request=request, project_uuid=project_uuid), + create_coro(app, request_url, request_headers, project_uuid), timeout=_PERMALINK_CREATE_TIMEOUT_S, ) return permalink @@ -55,7 +62,10 @@ async def _create_permalink( async def update_or_pop_permalink_in_project( - request: web.Request, project: ProjectDict + app: web.Application, + request_url: URL, + request_headers: dict[str, str], + project: ProjectDict, ) -> ProjectPermalink | None: """Updates permalink entry in project @@ -64,7 +74,9 @@ async def update_or_pop_permalink_in_project( If fails, it pops it from project (so it is not set in the pydantic model. SEE ProjectGet.permalink) """ try: - permalink = await _create_permalink(request, project_uuid=project["uuid"]) + permalink = await _create_permalink( + app, request_url, request_headers, project_uuid=project["uuid"] + ) assert permalink # nosec project["permalink"] = permalink @@ -78,12 +90,12 @@ async def update_or_pop_permalink_in_project( async def aggregate_permalink_in_project( - request: web.Request, project: ProjectDict + app: web.Application, url: URL, headers: dict[str, str], project: ProjectDict ) -> ProjectDict: """ Adapter to use in parallel aggregation of fields in a project dataset """ - await update_or_pop_permalink_in_project(request, project) + await update_or_pop_permalink_in_project(app, url, headers, project) return project diff --git a/services/web/server/src/simcore_service_webserver/projects/_project_document_service.py b/services/web/server/src/simcore_service_webserver/projects/_project_document_service.py new file mode 100644 index 000000000000..a6a8ab7216be --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/projects/_project_document_service.py @@ -0,0 +1,203 @@ +"""Utility functions for project document management. + +This module contains common utilities for building and versioning project documents. +""" + +import logging +import re +from typing import cast + +from aiohttp import web +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs +from models_library.api_schemas_webserver.projects import ( + ProjectDocument, + ProjectDocumentVersion, +) +from models_library.api_schemas_webserver.socketio import SocketIORoomStr +from models_library.projects import ProjectID, ProjectTemplateType +from models_library.projects import ProjectType as ProjectTypeAPI +from servicelib.logging_utils import log_context +from servicelib.redis import ( + PROJECT_DB_UPDATE_REDIS_LOCK_KEY, + exclusive, + increment_and_return_project_document_version, +) + +from ..redis import ( + get_redis_document_manager_client_sdk, + get_redis_lock_manager_client_sdk, +) +from ..resource_manager.registry import get_registry +from ..resource_manager.service import list_opened_project_ids +from ..socketio._utils import get_socket_server +from . import _projects_repository + +_logger = logging.getLogger(__name__) + + +async def create_project_document_and_increment_version( + app: web.Application, project_uuid: ProjectID +) -> tuple[ProjectDocument, ProjectDocumentVersion]: + """Build project document and increment version with Redis lock protection. + + This function is protected by Redis exclusive lock because: + - the project document and its version must be kept in sync + + Args: + app: The web application instance + project_uuid: UUID of the project + + Returns: + Tuple containing the project document and its version number + """ + + @exclusive( + get_redis_lock_manager_client_sdk(app), + lock_key=PROJECT_DB_UPDATE_REDIS_LOCK_KEY.format(project_uuid), + blocking=True, + blocking_timeout=None, # NOTE: this is a blocking call, a timeout has undefined effects + ) + async def _create_project_document_and_increment_version() -> ( + tuple[ProjectDocument, int] + ): + """This function is protected because + - the project document and its version must be kept in sync + """ + # Get the full project with workbench for document creation + project_with_workbench = await _projects_repository.get_project_with_workbench( + app=app, project_uuid=project_uuid + ) + # Create project document + project_document = ProjectDocument( + uuid=project_with_workbench.uuid, + workspace_id=project_with_workbench.workspace_id, + name=project_with_workbench.name, + description=project_with_workbench.description, + thumbnail=project_with_workbench.thumbnail, + last_change_date=project_with_workbench.last_change_date, + classifiers=project_with_workbench.classifiers, + dev=project_with_workbench.dev, + quality=project_with_workbench.quality, + workbench=project_with_workbench.workbench, + ui=project_with_workbench.ui, + type=cast(ProjectTypeAPI, project_with_workbench.type), + template_type=cast( + ProjectTemplateType, project_with_workbench.template_type + ), + ) + # Increment document version + redis_client_sdk = get_redis_document_manager_client_sdk(app) + document_version = await increment_and_return_project_document_version( + redis_client=redis_client_sdk, project_uuid=project_uuid + ) + + return project_document, document_version + + return await _create_project_document_and_increment_version() + + +async def remove_project_documents_as_admin(app: web.Application) -> None: + """Admin function to clean up project documents for projects with no connected users. + + This function scans through all project documents in the Redis DOCUMENTS database, + checks if there are any users currently connected to the project room via socketio, + and removes documents that have no connected users. + """ + with log_context( + _logger, + logging.INFO, + msg="Project document cleanup started", + ): + # Get Redis document manager client to access the DOCUMENTS database + redis_client = get_redis_document_manager_client_sdk(app) + + # Pattern to match project document keys - looking for keys that contain project UUIDs + project_document_pattern = "projects:*:version" + + # Get socketio server instance + sio = get_socket_server(app) + + # Get known opened projects ids based on Redis resources table + registry = get_registry(app) + known_opened_project_ids = await list_opened_project_ids(registry) + known_opened_project_ids_set = set(known_opened_project_ids) + + projects_removed = 0 + + # Scan through all project document keys + async for key in redis_client.redis.scan_iter( + match=project_document_pattern, count=1000 + ): + # Extract project UUID from the key pattern "projects:{project_uuid}:version" + key_str = key.decode("utf-8") if isinstance(key, bytes) else key + match = re.match(r"projects:(?P[0-9a-f-]+):version", key_str) + + if not match: + continue + + project_uuid_str = match.group("project_uuid") + project_uuid = ProjectID(project_uuid_str) + project_room = SocketIORoomStr.from_project_id(project_uuid) + + # 1. CHECK - Check if the project UUID is in the known opened projects + if project_uuid in known_opened_project_ids_set: + _logger.debug( + "Project %s is in Redis Resources table (which means Project is opened), keeping document", + project_uuid, + ) + continue + + # 2. CHECK - Check if there are any users connected to this project room + try: + # Get all session IDs (socket IDs) in the project room + room_sessions = list( + sio.manager.get_participants(namespace="/", room=project_room) + ) + + # If no users are connected to this project room, remove the document + if not room_sessions: + await redis_client.redis.delete(key_str) + projects_removed += 1 + _logger.info( + "Removed project document for project %s (no connected users)", + project_uuid, + ) + else: + # Create a synthetic exception for this unexpected state + unexpected_state_error = RuntimeError( + f"Project {project_uuid} has {len(room_sessions)} connected users but is not in Redis Resources table" + ) + _logger.error( + **create_troubleshooting_log_kwargs( + user_error_msg=f"Project {project_uuid} has {len(room_sessions)} connected users in the socket io room (This is not expected, as project resource is not in the Redis Resources table), keeping document just in case", + error=unexpected_state_error, + error_context={ + "project_uuid": str(project_uuid), + "project_room": project_room, + "key_str": key_str, + "connected_users_count": len(room_sessions), + "room_sessions": room_sessions[ + :5 + ], # Limit to first 5 sessions for debugging + }, + tip="This indicates a potential race condition or inconsistency between the Redis Resources table and socketio room state. Check if the project was recently closed but users are still connected, or if there's a synchronization issue between services.", + ) + ) + continue + + except (KeyError, AttributeError, ValueError) as exc: + _logger.exception( + **create_troubleshooting_log_kwargs( + user_error_msg=f"Failed to check room participants for project {project_uuid}", + error=exc, + error_context={ + "project_uuid": str(project_uuid), + "project_room": project_room, + "key_str": key_str, + }, + tip="Check if socketio server is properly initialized and the room exists. This could indicate a socketio manager issue or invalid room format.", + ) + ) + continue + + _logger.info("Completed: removed %d project documents", projects_removed) diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_repository.py b/services/web/server/src/simcore_service_webserver/projects/_projects_repository.py index 705e4970ee7a..83c357c36570 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_repository.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_repository.py @@ -1,5 +1,5 @@ import logging -from collections.abc import Callable +from collections.abc import Callable, Iterable from datetime import datetime from typing import cast @@ -25,7 +25,7 @@ from ..db.plugin import get_asyncpg_engine from .exceptions import ProjectNotFoundError -from .models import ProjectDBGet +from .models import ProjectDBGet, ProjectWithWorkbenchDBGet _logger = logging.getLogger(__name__) @@ -115,6 +115,23 @@ async def get_project( return ProjectDBGet.model_validate(row) +async def get_project_with_workbench( + app: web.Application, + connection: AsyncConnection | None = None, + *, + project_uuid: ProjectID, +) -> ProjectWithWorkbenchDBGet: + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + query = sql.select(*PROJECT_DB_COLS, projects.c.workbench).where( + projects.c.uuid == f"{project_uuid}" + ) + result = await conn.execute(query) + row = result.one_or_none() + if row is None: + raise ProjectNotFoundError(project_uuid=project_uuid) + return ProjectWithWorkbenchDBGet.model_validate(row) + + async def batch_get_project_name( app: web.Application, connection: AsyncConnection | None = None, @@ -151,6 +168,27 @@ async def batch_get_project_name( return [rows.get(project_uuid) for project_uuid in projects_uuids_str] +async def batch_get_projects( + app: web.Application, + connection: AsyncConnection | None = None, + *, + project_uuids: Iterable[ProjectID], +) -> dict[ProjectID, ProjectDBGet]: + if not project_uuids: + return {} + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + query = ( + sql.select(projects) + .select_from(projects) + .where(projects.c.uuid.in_([f"{uuid}" for uuid in project_uuids])) + ) + result = await conn.stream(query) + return { + ProjectID(row.uuid): ProjectDBGet.model_validate(row) + async for row in result + } + + def _select_trashed_by_primary_gid_query() -> sql.Select: return sql.select( projects.c.uuid, diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py b/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py index 0ed76cf44b22..f55b8031dbb1 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy.py @@ -15,11 +15,17 @@ from aiopg.sa import Engine from aiopg.sa.connection import SAConnection from aiopg.sa.result import ResultProxy, RowProxy +from common_library.logging.logging_base import get_log_record_extra from models_library.basic_types import IDStr from models_library.folders import FolderQuery, FolderScope from models_library.groups import GroupID from models_library.products import ProductName -from models_library.projects import ProjectID, ProjectIDStr +from models_library.projects import ( + ProjectID, + ProjectIDStr, + ProjectListAtDB, + ProjectTemplateType, +) from models_library.projects_comments import CommentID, ProjectsCommentsDB from models_library.projects_nodes import Node from models_library.projects_nodes_io import NodeID, NodeIDStr @@ -35,8 +41,7 @@ from models_library.workspaces import WorkspaceQuery, WorkspaceScope from pydantic import TypeAdapter from pydantic.types import PositiveInt -from servicelib.aiohttp.application_keys import APP_AIOPG_ENGINE_KEY -from servicelib.logging_utils import get_log_record_extra, log_context +from servicelib.logging_utils import log_context from simcore_postgres_database.aiopg_errors import UniqueViolation from simcore_postgres_database.models.groups import user_to_groups from simcore_postgres_database.models.project_to_groups import project_to_groups @@ -58,11 +63,11 @@ ProjectNodesRepo, ) from simcore_postgres_database.webserver_models import ( - ProjectTemplateType, ProjectType, projects, users, ) +from simcore_service_webserver.constants import APP_AIOPG_ENGINE_KEY from sqlalchemy import func, literal_column, sql from sqlalchemy.dialects.postgresql import BOOLEAN, INTEGER from sqlalchemy.dialects.postgresql import insert as pg_insert @@ -71,6 +76,8 @@ from tenacity.asyncio import AsyncRetrying from tenacity.retry import retry_if_exception_type +from ..application_settings import get_application_settings +from ..models import ClientSessionID from ..utils import now_str from ._comments_repository import ( create_project_comment, @@ -80,6 +87,7 @@ total_project_comments, update_project_comment, ) +from ._project_document_service import create_project_document_and_increment_version from ._projects_repository import PROJECT_DB_COLS from ._projects_repository_legacy_utils import ( ANY_USER_ID_SENTINEL, @@ -89,8 +97,8 @@ convert_to_schema_names, create_project_access_rights, patch_workbench, - update_workbench, ) +from ._socketio_service import notify_project_document_updated from .exceptions import ( ProjectDeleteError, ProjectInvalidRightsError, @@ -113,6 +121,7 @@ field=IDStr("last_change_date"), direction=OrderDirection.DESC ) + # pylint: disable=too-many-public-methods # NOTE: https://github.com/ITISFoundation/osparc-simcore/issues/3516 @@ -431,7 +440,6 @@ def _create_shared_workspace_query( is_search_by_multi_columns: bool, user_groups: list[GroupID], ) -> sql.Select | None: - if workspace_query.workspace_scope is not WorkspaceScope.PRIVATE: assert workspace_query.workspace_scope in ( # nosec WorkspaceScope.SHARED, @@ -573,7 +581,6 @@ async def list_projects_dicts( # pylint: disable=too-many-arguments,too-many-st # attribute filters filter_by_project_type: ProjectType | None = None, filter_by_template_type: ProjectTemplateType | None = None, - filter_by_services: list[dict] | None = None, filter_published: bool | None = None, filter_hidden: bool | None = False, filter_trashed: bool | None = False, @@ -585,7 +592,7 @@ async def list_projects_dicts( # pylint: disable=too-many-arguments,too-many-st limit: int | None = None, # order order_by: OrderBy = DEFAULT_ORDER_BY, - ) -> tuple[list[ProjectDict], list[ProjectType], int]: + ) -> tuple[list[dict[str, Any]], int]: async with self.engine.acquire() as conn: user_groups_proxy: list[RowProxy] = await self._list_user_groups( conn, user_id @@ -668,16 +675,18 @@ async def list_projects_dicts( # pylint: disable=too-many-arguments,too-many-st projects.c.id, ) - prjs, prj_types = await self._execute_without_permission_check( - conn, - user_id=user_id, - select_projects_query=combined_query.offset(offset).limit(limit), - filter_by_services=filter_by_services, - ) + prjs_output = [] + async for row in conn.execute(combined_query.offset(offset).limit(limit)): + # NOTE: Historically, projects were returned as a dictionary. I have created a model that + # validates the DB row, but this model includes some default values inside the Workbench Node model. + # Therefore, if we use this model, it will return those default values, which is not backward-compatible + # with the frontend. The frontend would need to check and adapt how it handles default values in + # Workbench nodes, which are currently not returned if not set in the DB. + ProjectListAtDB.model_validate(row) + prjs_output.append(dict(row.items())) return ( - prjs, - prj_types, + prjs_output, cast(int, total_count), ) @@ -804,86 +813,6 @@ async def get_pure_project_access_rights_without_workspace( ) return UserProjectAccessRightsDB.model_validate(row) - async def replace_project( - self, - new_project_data: ProjectDict, - user_id: UserID, - *, - product_name: str, - project_uuid: str, - ) -> ProjectDict: - """ - replaces a project from a user - this method completely replaces a user project with new_project_data only keeping - the old entries from the project workbench if they exists in the new project workbench. - NOTE: This method does not allow to add or remove nodes. use add_project_node - or remove_project_node to achieve this. - - Raises: - ProjectInvalidRightsError - ProjectInvalidUsageError in case nodes are added/removed, use add_project_node/remove_project_node - """ - - async with AsyncExitStack() as stack: - stack.enter_context( - log_context( - _logger, - logging.DEBUG, - msg=f"Replace {project_uuid=} for {user_id=}", - extra=get_log_record_extra(user_id=user_id), - ) - ) - db_connection = await stack.enter_async_context(self.engine.acquire()) - await stack.enter_async_context(db_connection.begin()) - - current_project: dict = await self._get_project( - db_connection, - project_uuid, - exclude_foreign=["tags"], - for_update=True, - ) - - # uuid can ONLY be set upon creation - if current_project["uuid"] != new_project_data["uuid"]: - raise ProjectInvalidRightsError( - user_id=user_id, project_uuid=new_project_data["uuid"] - ) - # ensure the prj owner is always in the access rights - owner_primary_gid = await self._get_user_primary_group_gid( - db_connection, current_project[projects.c.prj_owner.key] - ) - new_project_data.setdefault("accessRights", {}).update( - create_project_access_rights( - owner_primary_gid, ProjectAccessRights.OWNER - ) - ) - new_project_data = update_workbench(current_project, new_project_data) - # update timestamps - new_project_data["lastChangeDate"] = now_str() - - # now update it - result = await db_connection.execute( - # pylint: disable=no-value-for-parameter - projects.update() - .values(**convert_to_db_names(new_project_data)) - .where(projects.c.id == current_project[projects.c.id.key]) - .returning(literal_column("*")) - ) - project = await result.fetchone() - assert project # nosec - await self.upsert_project_linked_product( - ProjectID(project_uuid), product_name, conn=db_connection - ) - - user_email = await self._get_user_email(db_connection, project.prj_owner) - - tags = await self._get_tags_by_project( - db_connection, project_id=project[projects.c.id] - ) - return convert_to_schema_names(project, user_email, tags=tags) - msg = "linter unhappy without this" - raise RuntimeError(msg) - async def get_project_product(self, project_uuid: ProjectID) -> ProductName: async with self.engine.acquire() as conn: result = await conn.execute( @@ -896,7 +825,7 @@ async def get_project_product(self, project_uuid: ProjectID) -> ProductName: raise ProjectNotFoundError(project_uuid=project_uuid) return cast(str, row[0]) - async def update_project_owner_without_checking_permissions( + async def update_project_owner_without_checking_permissions( # <-- Used by Garbage Collector self, project_uuid: ProjectIDStr, *, @@ -919,17 +848,6 @@ async def update_project_owner_without_checking_permissions( result_row_count: int = result.rowcount assert result_row_count == 1 # nosec - async def update_project_last_change_timestamp(self, project_uuid: ProjectIDStr): - async with self.engine.acquire() as conn: - result = await conn.execute( - # pylint: disable=no-value-for-parameter - projects.update() - .values(last_change_date=now_str()) - .where(projects.c.uuid == f"{project_uuid}") - ) - if result.rowcount == 0: - raise ProjectNotFoundError(project_uuid=project_uuid) - async def delete_project(self, user_id: int, project_uuid: str): _logger.info( "Deleting project with %s for user with %s", @@ -955,6 +873,7 @@ async def update_project_node_data( node_id: NodeID, product_name: str | None, new_node_data: dict[str, Any], + client_session_id: ClientSessionID | None, ) -> tuple[ProjectDict, dict[NodeIDStr, Any]]: with log_context( _logger, @@ -963,14 +882,15 @@ async def update_project_node_data( extra=get_log_record_extra(user_id=user_id), ): partial_workbench_data: dict[NodeIDStr, Any] = { - NodeIDStr(f"{node_id}"): new_node_data, + f"{node_id}": new_node_data, } - return await self._update_project_workbench( + return await self._update_project_workbench_with_lock_and_notify( partial_workbench_data, user_id=user_id, - project_uuid=f"{project_uuid}", + project_uuid=project_uuid, product_name=product_name, allow_workbench_changes=False, + client_session_id=client_session_id, ) async def update_project_multiple_node_data( @@ -980,6 +900,7 @@ async def update_project_multiple_node_data( project_uuid: ProjectID, product_name: str | None, partial_workbench_data: dict[NodeIDStr, dict[str, Any]], + client_session_id: ClientSessionID | None, ) -> tuple[ProjectDict, dict[NodeIDStr, Any]]: """ Raises: @@ -991,13 +912,71 @@ async def update_project_multiple_node_data( msg=f"update multiple nodes on {project_uuid=} for {user_id=}", extra=get_log_record_extra(user_id=user_id), ): - return await self._update_project_workbench( + return await self._update_project_workbench_with_lock_and_notify( partial_workbench_data, user_id=user_id, - project_uuid=f"{project_uuid}", + project_uuid=project_uuid, product_name=product_name, allow_workbench_changes=False, + client_session_id=client_session_id, + ) + + async def _update_project_workbench_with_lock_and_notify( + self, + partial_workbench_data: dict[NodeIDStr, Any], + *, + user_id: UserID, + project_uuid: ProjectID, + product_name: str | None = None, + allow_workbench_changes: bool, + client_session_id: ClientSessionID | None, + ) -> tuple[ProjectDict, dict[NodeIDStr, Any]]: + """ + Updates project workbench with Redis lock and user notification. + + This method performs the following operations atomically: + 1. Updates the project workbench in the database + 2. Retrieves the updated project with workbench + 3. Creates a project document + 4. Increments the document version + 5. Notifies users about the project update + + Note: + This function is decorated with Redis exclusive lock to ensure + thread-safe operations on the project document. + """ + + # Get user's primary group ID for notification + async with self.engine.acquire() as conn: + user_primary_gid = await self._get_user_primary_group_gid(conn, user_id) + + # Update the workbench + updated_project, changed_entries = await self._update_project_workbench( + partial_workbench_data, + user_id=user_id, + project_uuid=f"{project_uuid}", + product_name=product_name, + allow_workbench_changes=allow_workbench_changes, + ) + + app_settings = get_application_settings(self._app) + if app_settings.WEBSERVER_REALTIME_COLLABORATION is not None: + ( + project_document, + document_version, + ) = await create_project_document_and_increment_version( + self._app, project_uuid + ) + + await notify_project_document_updated( + app=self._app, + project_id=project_uuid, + user_primary_gid=user_primary_gid, + client_session_id=client_session_id, + version=document_version, + document=project_document, ) + return updated_project, changed_entries async def _update_project_workbench( self, @@ -1077,6 +1056,7 @@ async def add_project_node( node: ProjectNodeCreate, old_struct_node: Node, product_name: str, + client_session_id: ClientSessionID | None, ) -> None: # NOTE: permission check is done currently in update_project_workbench! partial_workbench_data: dict[NodeIDStr, Any] = { @@ -1085,29 +1065,35 @@ async def add_project_node( exclude_unset=True, ), } - await self._update_project_workbench( + project_nodes_repo = ProjectNodesRepo(project_uuid=project_id) + async with self.engine.acquire() as conn: + await project_nodes_repo.add(conn, nodes=[node]) + await self._update_project_workbench_with_lock_and_notify( partial_workbench_data, user_id=user_id, - project_uuid=f"{project_id}", + project_uuid=project_id, product_name=product_name, allow_workbench_changes=True, + client_session_id=client_session_id, ) - project_nodes_repo = ProjectNodesRepo(project_uuid=project_id) - async with self.engine.acquire() as conn: - await project_nodes_repo.add(conn, nodes=[node]) async def remove_project_node( - self, user_id: UserID, project_id: ProjectID, node_id: NodeID + self, + user_id: UserID, + project_id: ProjectID, + node_id: NodeID, + client_session_id: ClientSessionID | None, ) -> None: # NOTE: permission check is done currently in update_project_workbench! partial_workbench_data: dict[NodeIDStr, Any] = { NodeIDStr(f"{node_id}"): None, } - await self._update_project_workbench( + await self._update_project_workbench_with_lock_and_notify( partial_workbench_data, user_id=user_id, - project_uuid=f"{project_id}", + project_uuid=project_id, allow_workbench_changes=True, + client_session_id=client_session_id, ) project_nodes_repo = ProjectNodesRepo(project_uuid=project_id) async with self.engine.acquire() as conn: @@ -1252,6 +1238,13 @@ async def remove_tag( project["tags"].remove(tag_id) return convert_to_schema_names(project, user_email) + async def get_tags_by_project(self, project_id: str) -> list[int]: + async with self.engine.acquire() as conn: + query = sa.select(projects_tags.c.tag_id).where( + projects_tags.c.project_id == project_id + ) + return [row.tag_id async for row in conn.execute(query)] + # # Project Comments # @@ -1360,15 +1353,6 @@ async def is_hidden(self, project_uuid: ProjectID) -> bool: ) return bool(result) - async def set_hidden_flag(self, project_uuid: ProjectID, *, hidden: bool): - async with self.engine.acquire() as conn: - stmt = ( - projects.update() - .values(hidden=hidden) - .where(projects.c.uuid == f"{project_uuid}") - ) - await conn.execute(stmt) - # # Project TYPE column # @@ -1404,7 +1388,7 @@ async def check_project_has_only_one_product(self, project_uuid: ProjectID) -> N # reduce time to develop by not implementing something that might never be necessary raise ProjectDeleteError( project_uuid=project_uuid, - reason="Project has more than one linked product. This needs manual intervention. Please contact oSparc support.", + details="Project has more than one linked product. This needs manual intervention. Please contact oSparc support.", ) diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy_utils.py b/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy_utils.py index a6ef69f93611..5b4824526258 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy_utils.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_repository_legacy_utils.py @@ -1,4 +1,3 @@ -import asyncio import logging from collections.abc import Mapping from copy import deepcopy @@ -9,17 +8,20 @@ import sqlalchemy as sa from aiopg.sa.connection import SAConnection from aiopg.sa.result import RowProxy -from models_library.projects import ProjectAtDB, ProjectID, ProjectTemplateType +from models_library.projects import ProjectID, ProjectType from models_library.projects_nodes import Node from models_library.projects_nodes_io import NodeIDStr -from models_library.users import UserID from models_library.utils.change_case import camel_to_snake, snake_to_camel from pydantic import ValidationError from simcore_postgres_database.models.project_to_groups import project_to_groups -from simcore_postgres_database.models.projects_to_products import projects_to_products -from simcore_postgres_database.webserver_models import ProjectType, projects +from simcore_postgres_database.webserver_models import ( + ProjectTemplateType as ProjectTemplateTypeDB, +) +from simcore_postgres_database.webserver_models import ProjectType as ProjectTypeDB +from simcore_postgres_database.webserver_models import ( + projects, +) from sqlalchemy.dialects.postgresql import insert as pg_insert -from sqlalchemy.sql.selectable import CompoundSelect, Select from ..db.models import GroupType, groups, projects_tags, user_to_groups, users from ..users.exceptions import UserNotFoundError @@ -27,12 +29,11 @@ from ._projects_repository import PROJECT_DB_COLS from .exceptions import ( NodeNotFoundError, - ProjectInvalidRightsError, ProjectInvalidUsageError, ProjectNotFoundError, ) from .models import ProjectDict -from .utils import find_changed_node_keys, project_uses_available_services +from .utils import find_changed_node_keys logger = logging.getLogger(__name__) @@ -93,9 +94,9 @@ def convert_to_schema_names( if col_name == "prj_owner": # this entry has to be converted to the owner e-mail address converted_value = user_email - if col_name == "type" and isinstance(col_value, ProjectType): + if col_name == "type" and isinstance(col_value, ProjectTypeDB): converted_value = col_value.value - if col_name == "template_type" and isinstance(col_value, ProjectTemplateType): + if col_name == "template_type" and isinstance(col_value, ProjectTemplateTypeDB): converted_value = col_value.value if col_name in SCHEMA_NON_NULL_KEYS and col_value is None: @@ -186,65 +187,6 @@ async def _upsert_tags_in_project( .on_conflict_do_nothing() ) - async def _execute_without_permission_check( - self, - conn: SAConnection, - user_id: UserID, - *, - select_projects_query: Select | CompoundSelect, - filter_by_services: list[dict] | None = None, - ) -> tuple[list[dict[str, Any]], list[ProjectType]]: - api_projects: list[dict] = [] # API model-compatible projects - db_projects: list[dict] = [] # DB model-compatible projects - project_types: list[ProjectType] = [] - async for row in conn.execute(select_projects_query): - assert isinstance(row, RowProxy) # nosec - try: - await asyncio.get_event_loop().run_in_executor( - None, ProjectAtDB.model_validate, row - ) - - except ProjectInvalidRightsError: - continue - - except ValidationError as exc: - logger.warning( - "project %s failed validation, please check. error: %s", - f"{row.id=}", - exc, - ) - continue - - prj: dict[str, Any] = dict(row.items()) - prj.pop("product_name", None) - - if ( - filter_by_services is not None - # This checks only old projects that are not in the projects_to_products table. - and row[projects_to_products.c.product_name] is None - and not await project_uses_available_services(prj, filter_by_services) - ): - logger.warning( - "Project %s will not be listed for user %s since it has no access rights" - " for one or more of the services that includes.", - f"{row.id=}", - f"{user_id=}", - ) - continue - db_projects.append(prj) - - # NOTE: DO NOT nest _get_tags_by_project in async loop above !!! - # FIXME: temporary avoids inner async loops issue https://github.com/aio-libs/aiopg/issues/535 - for db_prj in db_projects: - db_prj["tags"] = await self._get_tags_by_project( - conn, project_id=db_prj["id"] - ) - user_email = await self._get_user_email(conn, db_prj["prj_owner"]) - api_projects.append(convert_to_schema_names(db_prj, user_email)) - project_types.append(db_prj["type"]) - - return (api_projects, project_types) - async def _get_project( self, connection: SAConnection, diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py index 4f5dac07a556..03ceba3b3a82 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_service.py @@ -9,38 +9,55 @@ import asyncio import collections +import contextlib import datetime import logging from collections import defaultdict -from collections.abc import Generator +from collections.abc import Iterable from contextlib import suppress from decimal import Decimal from pprint import pformat from typing import Any, Final, cast -from uuid import UUID, uuid4 +from uuid import uuid4 from aiohttp import web -from common_library.json_serialization import json_dumps, json_loads +from common_library.json_serialization import json_dumps +from common_library.logging.logging_base import get_log_record_extra from models_library.api_schemas_clusters_keeper.ec2_instances import EC2InstanceTypeGet from models_library.api_schemas_directorv2.dynamic_services import ( + DynamicServiceGet, GetProjectInactivityResponse, ) from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( DynamicServiceStart, DynamicServiceStop, ) -from models_library.api_schemas_webserver.projects import ProjectPatch +from models_library.api_schemas_webserver.projects import ( + ProjectGet, + ProjectPatch, +) +from models_library.api_schemas_webserver.projects_nodes import ( + NodeGet, + NodeGetUnknown, +) +from models_library.api_schemas_webserver.socketio import SocketIORoomStr from models_library.basic_types import KeyIDStr from models_library.errors import ErrorDict from models_library.groups import GroupID from models_library.products import ProductName -from models_library.projects import Project, ProjectID, ProjectIDStr +from models_library.projects import Project, ProjectID from models_library.projects_access import Owner -from models_library.projects_nodes import Node, NodeState, PartialNode +from models_library.projects_nodes import ( + Node, + NodeShareState, + NodeShareStatus, + NodeState, + PartialNode, +) from models_library.projects_nodes_io import NodeID, NodeIDStr, PortLink from models_library.projects_state import ( - ProjectLocked, ProjectRunningState, + ProjectShareState, ProjectState, ProjectStatus, RunningState, @@ -61,14 +78,13 @@ from models_library.utils.fastapi_encoders import jsonable_encoder from models_library.wallets import ZERO_CREDITS, WalletID, WalletInfo from models_library.workspaces import UserWorkspaceWithAccessRights -from pydantic import ByteSize, TypeAdapter -from servicelib.aiohttp.application_keys import APP_FIRE_AND_FORGET_TASKS_KEY +from pydantic import ByteSize, PositiveInt, TypeAdapter from servicelib.common_headers import ( UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, X_FORWARDED_PROTO, X_SIMCORE_USER_AGENT, ) -from servicelib.logging_utils import get_log_record_extra, log_context +from servicelib.logging_utils import log_context from servicelib.rabbitmq import RemoteMethodNotRegisteredError, RPCServerError from servicelib.rabbitmq.rpc_interfaces.catalog import services as catalog_rpc from servicelib.rabbitmq.rpc_interfaces.clusters_keeper.ec2_instances import ( @@ -84,7 +100,8 @@ is_project_locked, with_project_locked, ) -from servicelib.utils import fire_and_forget_task, logged_gather +from servicelib.rest_constants import RESPONSE_MODEL_POLICY +from servicelib.utils import fire_and_forget_task, limited_gather, logged_gather from simcore_postgres_database.models.users import UserRole from simcore_postgres_database.utils_projects_nodes import ( ProjectNodeCreate, @@ -94,14 +111,16 @@ from ..application_settings import get_application_settings from ..catalog import catalog_service +from ..constants import APP_FIRE_AND_FORGET_TASKS_KEY from ..director_v2 import director_v2_service from ..dynamic_scheduler import api as dynamic_scheduler_service +from ..models import ClientSessionID from ..products import products_web from ..rabbitmq import get_rabbitmq_rpc_client from ..redis import get_redis_lock_manager_client_sdk +from ..resource_manager.models import UserSession from ..resource_manager.user_sessions import ( PROJECT_ID_KEY, - UserSessionID, managed_resource, ) from ..resource_usage import service as rut_api @@ -111,19 +130,20 @@ send_message_to_standard_group, send_message_to_user, ) +from ..socketio.server import get_socket_server from ..storage import api as storage_service -from ..users.api import FullNameDict, get_user, get_user_fullname, get_user_role -from ..users.exceptions import UserNotFoundError -from ..users.preferences_api import ( +from ..user_preferences import user_preferences_service +from ..user_preferences.user_preferences_service import ( PreferredWalletIdFrontendUserPreference, - UserDefaultWalletNotFoundError, - get_frontend_user_preference, ) +from ..users import users_service +from ..users.exceptions import UserDefaultWalletNotFoundError, UserNotFoundError from ..wallets import api as wallets_service from ..wallets.errors import WalletNotEnoughCreditsError from ..workspaces import _workspaces_repository as workspaces_workspaces_repository from . import ( _crud_api_delete, + _groups_service, _nodes_service, _projects_nodes_repository, _projects_repository, @@ -134,8 +154,10 @@ has_user_project_access_rights, ) from ._nodes_utils import set_reservation_same_as_limit, validate_new_service_resources +from ._project_document_service import create_project_document_and_increment_version from ._projects_repository_legacy import APP_PROJECT_DBAPI, ProjectDBAPI from ._projects_repository_legacy_utils import PermissionStr +from ._socketio_service import notify_project_document_updated from .exceptions import ( ClustersKeeperNotAvailableError, DefaultPricingUnitNotFoundError, @@ -143,6 +165,7 @@ InvalidEC2TypeInResourcesSpecsError, InvalidKeysInResourcesSpecsError, NodeNotFoundError, + NodeShareStateCannotBeComputedError, ProjectInvalidRightsError, ProjectLockError, ProjectNodeConnectionsMissingError, @@ -151,24 +174,69 @@ ProjectOwnerNotFoundInTheProjectAccessRightsError, ProjectStartsTooManyDynamicNodesError, ProjectTooManyProjectOpenedError, + ProjectTooManyUserSessionsError, ProjectTypeAndTemplateIncompatibilityError, ) -from .models import ProjectDict, ProjectPatchInternalExtended +from .models import ProjectDBGet, ProjectDict, ProjectPatchInternalExtended from .settings import ProjectsSettings, get_plugin_settings from .utils import extract_dns_without_default_port -log = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) -PROJECT_REDIS_LOCK_KEY: str = "project:{}" +async def patch_project_and_notify_users( + app: web.Application, + *, + project_uuid: ProjectID, + patch_project_data: dict[str, Any], + user_primary_gid: GroupID, + client_session_id: ClientSessionID | None, +) -> None: + """ + Patches a project and notifies users involved in the project with version control. + + This function performs the following operations atomically: + 1. Patches the project in the database + 2. Retrieves the updated project with workbench + 3. Creates a project document + 4. Increments the document version + 5. Notifies users about the project update + + Args: + app: The web application instance + project_uuid: The project UUID to patch + patch_project_data: Dictionary containing the project data to patch + user_primary_gid: Primary group ID of the user making the change + + Note: + This function is decorated with Redis exclusive lock to ensure + thread-safe operations on the project document. + """ -def _is_node_dynamic(node_key: str) -> bool: - return "/dynamic/" in node_key + await _projects_repository.patch_project( + app=app, + project_uuid=project_uuid, + new_partial_project_data=patch_project_data, + ) + app_settings = get_application_settings(app) + if app_settings.WEBSERVER_REALTIME_COLLABORATION is not None: + ( + project_document, + document_version, + ) = await create_project_document_and_increment_version(app, project_uuid) + await notify_project_document_updated( + app=app, + project_id=project_uuid, + user_primary_gid=user_primary_gid, + client_session_id=client_session_id, + version=document_version, + document=project_document, + ) -# -# GET project ----------------------------------------------------- -# + +def _is_node_dynamic(node_key: str) -> bool: + return "/dynamic/" in node_key async def get_project_for_user( @@ -197,7 +265,7 @@ async def get_project_for_user( ) workspace_is_private = user_project_access.workspace_id is None - project, project_type = await db.get_project_dict_and_type( + project, _project_type = await db.get_project_dict_and_type( project_uuid, ) @@ -211,10 +279,7 @@ async def get_project_for_user( # adds state if it is not a template if include_state: project = await add_project_states_for_user( - user_id=user_id, - project=project, - is_template=project_type is ProjectType.TEMPLATE, - app=app, + user_id=user_id, project=project, app=app ) # adds `trashed_by_primary_gid` @@ -249,17 +314,17 @@ async def get_project_for_user( async def get_project_type( app: web.Application, project_uuid: ProjectID ) -> ProjectType: - db: ProjectDBAPI = app[APP_PROJECT_DBAPI] - assert db # nosec - return await db.get_project_type(project_uuid) + db_legacy: ProjectDBAPI = app[APP_PROJECT_DBAPI] + assert db_legacy # nosec + return await db_legacy.get_project_type(project_uuid) async def get_project_dict_legacy( app: web.Application, project_uuid: ProjectID ) -> ProjectDict: - db: ProjectDBAPI = app[APP_PROJECT_DBAPI] - assert db # nosec - project, _ = await db.get_project_dict_and_type( + db_legacy: ProjectDBAPI = app[APP_PROJECT_DBAPI] + assert db_legacy # nosec + project, _ = await db_legacy.get_project_dict_and_type( f"{project_uuid}", ) return project @@ -275,6 +340,17 @@ async def batch_get_project_name( return [name if name else "Unknown" for name in get_project_names] +async def batch_get_projects( + app: web.Application, + *, + project_uuids: Iterable[ProjectID], +) -> dict[ProjectID, ProjectDBGet]: + return await _projects_repository.batch_get_projects( + app=app, + project_uuids=project_uuids, + ) + + # # UPDATE project ----------------------------------------------------- # @@ -283,24 +359,33 @@ async def batch_get_project_name( async def update_project_last_change_timestamp( app: web.Application, project_uuid: ProjectID ): - db: ProjectDBAPI = app[APP_PROJECT_DBAPI] - assert db # nosec - await db.update_project_last_change_timestamp(ProjectIDStr(f"{project_uuid}")) + await _projects_repository.patch_project( + app=app, + project_uuid=project_uuid, + new_partial_project_data={}, # <-- no changes, just update timestamp + ) -async def patch_project( +async def patch_project_for_user( app: web.Application, *, user_id: UserID, project_uuid: ProjectID, project_patch: ProjectPatch | ProjectPatchInternalExtended, product_name: ProductName, + client_session_id: ClientSessionID | None, ): + # client_session_id (str | None): The session ID of the frontend client making the request. + # This is used to distinguish between multiple sessions a user may have open. + # In scenarios with optimistic UI updates, if a change is made from one session, + # that session can ignore the notification it published to all sessions (including other users), + # preventing redundant updates in the originating session. + patch_project_data = project_patch.to_domain_model() - db: ProjectDBAPI = app[APP_PROJECT_DBAPI] + db_legacy: ProjectDBAPI = app[APP_PROJECT_DBAPI] # 1. Get project - project_db = await db.get_project_db(project_uuid=project_uuid) + project_db = await db_legacy.get_project_db(project_uuid=project_uuid) # 2. Check user permissions _user_project_access_rights = await check_user_project_permission( @@ -322,20 +407,22 @@ async def patch_project( "write": True, "delete": True, } - user: dict = await get_user(app, project_db.prj_owner) - _prj_owner_primary_group = f"{user['primary_gid']}" + prj_owner_user: dict = await users_service.get_user(app, project_db.prj_owner) + _prj_owner_primary_group = f"{prj_owner_user['primary_gid']}" if _prj_owner_primary_group not in new_prj_access_rights: raise ProjectOwnerNotFoundInTheProjectAccessRightsError if new_prj_access_rights[_prj_owner_primary_group] != _prj_required_permissions: raise ProjectOwnerNotFoundInTheProjectAccessRightsError - # 4. If patching template type + # 4. Get user primary group ID + current_user: dict = await users_service.get_user(app, user_id) + + # 5. If patching template type if new_template_type := patch_project_data.get("template_type"): - # 4.1 Check if user is a tester - current_user: dict = await get_user(app, user_id) + # 5.1 Check if user is a tester if UserRole(current_user["role"]) < UserRole.TESTER: raise InsufficientRoleForProjectTemplateTypeUpdateError - # 4.2 Check the compatibility of the template type with the project + # 5.2 Check the compatibility of the template type with the project if project_db.type == ProjectType.STANDARD and new_template_type is not None: raise ProjectTypeAndTemplateIncompatibilityError( project_uuid=project_uuid, @@ -349,19 +436,16 @@ async def patch_project( project_template=new_template_type, ) - # 5. Patch the project - await _projects_repository.patch_project( - app=app, + # 6. Patch the project & Notify users involved in the project + await patch_project_and_notify_users( + app, project_uuid=project_uuid, - new_partial_project_data=patch_project_data, + patch_project_data=patch_project_data, + user_primary_gid=current_user["primary_gid"], + client_session_id=client_session_id, ) -# -# DELETE project ----------------------------------------------------- -# - - async def delete_project_by_user( app: web.Application, *, @@ -380,7 +464,7 @@ async def delete_project_by_user( await task -def get_delete_project_task( +def _get_delete_project_task( project_uuid: ProjectID, user_id: UserID ) -> asyncio.Task | None: if tasks := _crud_api_delete.get_scheduled_tasks(project_uuid, user_id): @@ -411,7 +495,7 @@ async def submit_delete_project_task( await _crud_api_delete.mark_project_as_deleted(app, project_uuid, user_id) # Ensures ONE delete task per (project,user) pair - task = get_delete_project_task(project_uuid, user_id) + task = _get_delete_project_task(project_uuid, user_id) if not task: task = _crud_api_delete.schedule_task( app, @@ -419,16 +503,11 @@ async def submit_delete_project_task( user_id, simcore_user_agent, remove_project_dynamic_services, - log, + _logger, ) return task -# -# PROJECT NODES ----------------------------------------------------- -# - - async def _get_default_pricing_and_hardware_info( app: web.Application, product_name: str, @@ -520,7 +599,7 @@ def _by_type_name(ec2: EC2InstanceTypeGet) -> bool: service_to_resources[1].resources["RAM"].limit ), ) - log.debug( + _logger.debug( "the most hungry service is %s", f"{scalable_service_name=}:{hungry_service_resources}", ) @@ -617,6 +696,7 @@ def _check_required_input(required_input_key: KeyIDStr) -> None: if output_entry is None: unset_outputs_in_upstream.append((source_output_key, source_node.label)) + assert isinstance(node.inputs_required, list) # nosec for required_input in node.inputs_required: _check_required_input(required_input) @@ -633,7 +713,7 @@ def _check_required_input(required_input_key: KeyIDStr) -> None: ) -async def _start_dynamic_service( # noqa: C901 +async def _start_dynamic_service( # pylint: disable=too-many-statements # noqa: C901 request: web.Request, *, service_key: ServiceKey, @@ -646,19 +726,17 @@ async def _start_dynamic_service( # noqa: C901 graceful_start: bool = False, ) -> None: if not _is_node_dynamic(service_key): + # not dynamic, nothing to do return - # this is a dynamic node, let's gather its resources and start it - - db: ProjectDBAPI = ProjectDBAPI.get_from_app_context(request.app) - + db = ProjectDBAPI.get_from_app_context(request.app) try: await _check_project_node_has_all_required_inputs( request.app, db, user_id, project_uuid, node_uuid ) except ProjectNodeRequiredInputsNotSetError as e: if graceful_start: - log.info( + _logger.info( "Did not start '%s' because of missing required inputs: %s", node_uuid, e, @@ -667,7 +745,7 @@ async def _start_dynamic_service( # noqa: C901 raise save_state = False - user_role: UserRole = await get_user_role(request.app, user_id=user_id) + user_role = await users_service.get_user_role(request.app, user_id=user_id) if user_role > UserRole.GUEST: save_state = await has_user_project_access_rights( request.app, project_id=project_uuid, user_id=user_id, permission="write" @@ -683,7 +761,8 @@ async def _start_dynamic_service( # noqa: C901 ) ), ) - async def _() -> None: + async def _safe_service_start() -> None: + """In case of concurrent requests, this guarantees that only one service can be started at a time""" project_running_nodes = await dynamic_scheduler_service.list_dynamic_services( request.app, user_id=user_id, project_id=project_uuid ) @@ -707,11 +786,13 @@ async def _() -> None: request.app, project_id=project_uuid ) if project_wallet is None: - user_default_wallet_preference = await get_frontend_user_preference( - request.app, - user_id=user_id, - product_name=product_name, - preference_class=PreferredWalletIdFrontendUserPreference, + user_default_wallet_preference = ( + await user_preferences_service.get_frontend_user_preference( + request.app, + user_id=user_id, + product_name=product_name, + preference_class=PreferredWalletIdFrontendUserPreference, + ) ) if user_default_wallet_preference is None: raise UserDefaultWalletNotFoundError(uid=user_id) @@ -779,7 +860,7 @@ async def _() -> None: and wallet.available_credits <= ZERO_CREDITS ): raise WalletNotEnoughCreditsError( - reason=f"Wallet '{wallet.name}' has {wallet.available_credits} credits." + details=f"Wallet '{wallet.name}' has {wallet.available_credits} credits." ) pricing_info = PricingInfo( @@ -799,7 +880,7 @@ async def _() -> None: service_version=service_version, ) - service_resources: ServiceResourcesDict = await get_project_node_resources( + service_resources = await get_project_node_resources( request.app, user_id=user_id, project_id=project_uuid, @@ -832,7 +913,13 @@ async def _() -> None: ), ) - await _() + project = await get_project_for_user( + request.app, f"{project_uuid}", user_id, include_state=True + ) + + await notify_project_node_update(request.app, project, node_uuid, errors=None) + + await _safe_service_start() async def add_project_node( @@ -844,8 +931,9 @@ async def add_project_node( service_key: ServiceKey, service_version: ServiceVersion, service_id: str | None, + client_session_id: ClientSessionID | None, ) -> NodeID: - log.debug( + _logger.debug( "starting node %s:%s in project %s for user %s", service_key, service_version, @@ -866,9 +954,9 @@ async def add_project_node( default_resources = await catalog_service.get_service_resources( request.app, user_id, service_key, service_version ) - db: ProjectDBAPI = ProjectDBAPI.get_from_app_context(request.app) - assert db # nosec - await db.add_project_node( + db_legacy: ProjectDBAPI = ProjectDBAPI.get_from_app_context(request.app) + assert db_legacy # nosec + await db_legacy.add_project_node( user_id, ProjectID(project["uuid"]), ProjectNodeCreate( @@ -886,6 +974,7 @@ async def add_project_node( } ), product_name, + client_session_id=client_session_id, ) # also ensure the project is updated by director-v2 since services @@ -979,8 +1068,9 @@ async def delete_project_node( node_uuid: NodeIDStr, product_name: ProductName, product_api_base_url: str, + client_session_id: ClientSessionID | None, ) -> None: - log.debug( + _logger.debug( "deleting node %s in project %s for user %s", node_uuid, project_uuid, user_id ) @@ -1018,9 +1108,11 @@ async def delete_project_node( ) # remove the node from the db - db: ProjectDBAPI = request.app[APP_PROJECT_DBAPI] - assert db # nosec - await db.remove_project_node(user_id, project_uuid, NodeID(node_uuid)) + db_legacy: ProjectDBAPI = request.app[APP_PROJECT_DBAPI] + assert db_legacy # nosec + await db_legacy.remove_project_node( + user_id, project_uuid, NodeID(node_uuid), client_session_id=client_session_id + ) # also ensure the project is updated by director-v2 since services product_name = products_web.get_product_name(request) await director_v2_service.create_or_update_pipeline( @@ -1034,9 +1126,11 @@ async def delete_project_node( async def update_project_linked_product( app: web.Application, project_id: ProjectID, product_name: str ) -> None: - with log_context(log, level=logging.DEBUG, msg="updating project linked product"): - db: ProjectDBAPI = app[APP_PROJECT_DBAPI] - await db.upsert_project_linked_product(project_id, product_name) + with log_context( + _logger, level=logging.DEBUG, msg="updating project linked product" + ): + db_legacy: ProjectDBAPI = app[APP_PROJECT_DBAPI] + await db_legacy.upsert_project_linked_product(project_id, product_name) async def update_project_node_state( @@ -1045,16 +1139,17 @@ async def update_project_node_state( project_id: ProjectID, node_id: NodeID, new_state: str, + client_session_id: ClientSessionID | None, ) -> dict: - log.debug( + _logger.debug( "updating node %s current state in project %s for user %s", node_id, project_id, user_id, ) - db: ProjectDBAPI = app[APP_PROJECT_DBAPI] - product_name = await db.get_project_product(project_id) + db_legacy = ProjectDBAPI.get_from_app_context(app) + product_name = await db_legacy.get_project_product(project_id) await check_user_project_permission( app, project_id=project_id, @@ -1065,12 +1160,13 @@ async def update_project_node_state( # Delete this once workbench is removed from the projects table # See: https://github.com/ITISFoundation/osparc-simcore/issues/7046 - updated_project, _ = await db.update_project_node_data( + await db_legacy.update_project_node_data( user_id=user_id, project_uuid=project_id, node_id=node_id, product_name=None, new_node_data={"state": {"currentStatus": new_state}}, + client_session_id=client_session_id, ) await _projects_nodes_repository.update( @@ -1081,15 +1177,14 @@ async def update_project_node_state( state=NodeState(current_status=RunningState(new_state)) ), ) - - return await add_project_states_for_user( - user_id=user_id, project=updated_project, is_template=False, app=app + return await get_project_for_user( + app, user_id=user_id, project_uuid=f"{project_id}", include_state=True ) async def is_project_hidden(app: web.Application, project_id: ProjectID) -> bool: - db: ProjectDBAPI = app[APP_PROJECT_DBAPI] - return await db.is_hidden(project_id) + db_legacy: ProjectDBAPI = app[APP_PROJECT_DBAPI] + return await db_legacy.is_hidden(project_id) async def patch_project_node( @@ -1101,12 +1196,13 @@ async def patch_project_node( project_id: ProjectID, node_id: NodeID, partial_node: PartialNode, + client_session_id: ClientSessionID | None, ) -> None: _node_patch_exclude_unset: dict[str, Any] = partial_node.model_dump( mode="json", exclude_unset=True, by_alias=True ) - _projects_repository = ProjectDBAPI.get_from_app_context(app) + _projects_repository_legacy = ProjectDBAPI.get_from_app_context(app) # 1. Check user permissions await check_user_project_permission( @@ -1114,12 +1210,12 @@ async def patch_project_node( project_id=project_id, user_id=user_id, product_name=product_name, - permission="write", # NOTE: MD: before only read was sufficient, double check this + permission="write", ) # 2. If patching service key or version make sure it's valid if _node_patch_exclude_unset.get("key") or _node_patch_exclude_unset.get("version"): - _project, _ = await _projects_repository.get_project_dict_and_type( + _project, _ = await _projects_repository_legacy.get_project_dict_and_type( project_uuid=f"{project_id}" ) _project_node_data = _project["workbench"][f"{node_id}"] @@ -1138,12 +1234,13 @@ async def patch_project_node( ) # 3. Patch the project node - updated_project, _ = await _projects_repository.update_project_node_data( + updated_project, _ = await _projects_repository_legacy.update_project_node_data( user_id=user_id, project_uuid=project_id, node_id=node_id, product_name=product_name, new_node_data=_node_patch_exclude_unset, + client_session_id=client_session_id, ) await _projects_nodes_repository.update( @@ -1153,7 +1250,7 @@ async def patch_project_node( partial_node=partial_node, ) - # 4. Make calls to director-v2 to keep data in sync (ex. comp_tasks DB table) + # 4. Make calls to director-v2 to keep data in sync (ex. comp_* DB tables) await director_v2_service.create_or_update_pipeline( app, user_id, @@ -1166,11 +1263,11 @@ async def patch_project_node( app, project_id=project_id ) - # 5. Updates project states for user, if inputs/outputs have been changed + updated_project = await add_project_states_for_user( + user_id=user_id, project=updated_project, app=app + ) + # 5. if inputs/outputs have been changed all depending nodes shall be notified if {"inputs", "outputs"} & _node_patch_exclude_unset.keys(): - updated_project = await add_project_states_for_user( - user_id=user_id, project=updated_project, is_template=False, app=app - ) for node_uuid in updated_project["workbench"]: await notify_project_node_update( app, updated_project, node_uuid, errors=None @@ -1187,11 +1284,12 @@ async def update_project_node_outputs( node_id: NodeID, new_outputs: dict | None, new_run_hash: str | None, + client_session_id: ClientSessionID | None, ) -> tuple[dict, list[str]]: """ Updates outputs of a given node in a project with 'data' """ - log.debug( + _logger.debug( "updating node %s outputs in project %s for user %s with %s: run_hash [%s]", node_id, project_id, @@ -1202,8 +1300,8 @@ async def update_project_node_outputs( ) new_outputs = new_outputs or {} - db: ProjectDBAPI = app[APP_PROJECT_DBAPI] - product_name = await db.get_project_product(project_id) + db_legacy = ProjectDBAPI.get_from_app_context(app) + product_name = await db_legacy.get_project_product(project_id) await check_user_project_permission( app, project_id=project_id, @@ -1212,12 +1310,13 @@ async def update_project_node_outputs( permission="write", # NOTE: MD: before only read was sufficient, double check this ) - updated_project, changed_entries = await db.update_project_node_data( + updated_project, changed_entries = await db_legacy.update_project_node_data( user_id=user_id, project_uuid=project_id, node_id=node_id, product_name=None, new_node_data={"outputs": new_outputs, "runHash": new_run_hash}, + client_session_id=client_session_id, ) await _projects_nodes_repository.update( @@ -1229,13 +1328,13 @@ async def update_project_node_outputs( ), ) - log.debug( + _logger.debug( "patched project %s, following entries changed: %s", project_id, pformat(changed_entries), ) updated_project = await add_project_states_for_user( - user_id=user_id, project=updated_project, is_template=False, app=app + user_id=user_id, project=updated_project, app=app ) # changed entries come in the form of {node_uuid: {outputs: {changed_key1: value1, changed_key2: value2}}} @@ -1253,8 +1352,8 @@ async def list_node_ids_in_project( project_uuid: ProjectID, ) -> set[NodeID]: """Returns a set with all the node_ids from a project's workbench""" - db: ProjectDBAPI = app[APP_PROJECT_DBAPI] - return await db.list_node_ids_in_project(project_uuid) + db_legacy: ProjectDBAPI = app[APP_PROJECT_DBAPI] + return await db_legacy.list_node_ids_in_project(project_uuid) async def is_node_id_present_in_any_project_workbench( @@ -1262,22 +1361,61 @@ async def is_node_id_present_in_any_project_workbench( node_id: NodeID, ) -> bool: """If the node_id is presnet in one of the projects' workbenche returns True""" - db: ProjectDBAPI = app[APP_PROJECT_DBAPI] - return await db.node_id_exists(node_id) + db_legacy: ProjectDBAPI = app[APP_PROJECT_DBAPI] + return await db_legacy.node_id_exists(node_id) -async def _safe_retrieve( - app: web.Application, node_id: NodeID, port_keys: list[str] -) -> None: - try: - await dynamic_scheduler_service.retrieve_inputs(app, node_id, port_keys) - except RPCServerError as exc: - log.warning( - "Unable to call :retrieve endpoint on service %s, keys: [%s]: error: [%s]", - node_id, - port_keys, - exc, +async def _get_node_share_state( + app: web.Application, + *, + project_uuid: ProjectID, + node_id: NodeID, + computational_pipeline_running: bool | None, + user_primrary_groupid: GroupID, +) -> NodeShareState: + node = await _projects_nodes_repository.get( + app, project_id=project_uuid, node_id=node_id + ) + + if _is_node_dynamic(node.key): + # if the service is dynamic and running it is locked if it is not collaborative + service = await dynamic_scheduler_service.get_dynamic_service( + app, node_id=node_id + ) + + if isinstance(service, DynamicServiceGet | NodeGet): + # service is running + is_collaborative_service = False + if isinstance(service, DynamicServiceGet): + # only dynamic-sidecar powered services can be collaborative + is_collaborative_service = service.is_collaborative + + return NodeShareState( + locked=not is_collaborative_service, + current_user_groupids=[ + await users_service.get_user_primary_group_id( + app, TypeAdapter(UserID).validate_python(service.user_id) + ) + ], + status=NodeShareStatus.OPENED, + ) + if isinstance(service, NodeGetUnknown): + # service state is unknown, raise + raise NodeShareStateCannotBeComputedError( + project_uuid=project_uuid, node_uuid=node_id + ) + return NodeShareState(locked=False) + + # if the service is computational and no pipeline is running it is not locked + if computational_pipeline_running: + return NodeShareState( + locked=True, + current_user_groupids=[ + user_primrary_groupid, + ], + status=NodeShareStatus.OPENED, ) + return NodeShareState(locked=False) async def _trigger_connected_service_retrieve( @@ -1286,7 +1424,7 @@ async def _trigger_connected_service_retrieve( project_id = project["uuid"] if await is_project_locked(get_redis_lock_manager_client_sdk(app), project_id): # NOTE: we log warn since this function is fire&forget and raise an exception would not be anybody to handle it - log.warning( + _logger.warning( "Skipping service retrieval because project with %s is currently locked." "Operation triggered by %s", f"{project_id=}", @@ -1320,10 +1458,10 @@ async def _trigger_connected_service_retrieve( # call /retrieve on the nodes update_tasks = [ - _safe_retrieve(app, NodeID(node), keys) + dynamic_scheduler_service.retrieve_inputs(app, NodeID(node), keys) for node, keys in nodes_keys_to_update.items() ] - await logged_gather(*update_tasks) + await logged_gather(*update_tasks, reraise=False) async def post_trigger_connected_service_retrieve( @@ -1345,13 +1483,8 @@ async def post_trigger_connected_service_retrieve( ) -# -# OPEN PROJECT ------------------------------------------------------------------- -# - - -async def _user_has_another_client_open( - users_sessions_ids: list[UserSessionID], app: web.Application +async def _user_has_another_active_session( + users_sessions_ids: list[UserSession], app: web.Application ) -> bool: # NOTE if there is an active socket in use, that means the client is active for u in users_sessions_ids: @@ -1362,12 +1495,12 @@ async def _user_has_another_client_open( async def _clean_user_disconnected_clients( - users_sessions_ids: list[UserSessionID], app: web.Application + users_sessions_ids: list[UserSession], app: web.Application ): for u in users_sessions_ids: with managed_resource(u.user_id, u.client_session_id, app) as user_session: if await user_session.get_socket_id() is None: - log.debug( + _logger.debug( "removing disconnected project of user %s/%s", u.user_id, u.client_session_id, @@ -1375,6 +1508,35 @@ async def _clean_user_disconnected_clients( await user_session.remove(PROJECT_ID_KEY) +async def _leave_project_room( + *, + app: web.Application, + user_id: UserID, + client_session_id: ClientSessionID, + project_uuid: ProjectID, + user_session, +) -> None: + """Helper function to leave a project room via socketio""" + socket_id = await user_session.get_socket_id() + if socket_id is not None: + _logger.debug( + "User %s/%s is leaving project room %s with socket_id %s", + user_id, + client_session_id, + project_uuid, + socket_id, + ) + sio = get_socket_server(app) + await sio.leave_room(socket_id, SocketIORoomStr.from_project_id(project_uuid)) + else: + _logger.error( + "User %s/%s has no socket_id, cannot leave project room %s", + user_id, + client_session_id, + project_uuid, + ) + + def create_user_notification_cb( user_id: UserID, project_uuid: ProjectID, app: web.Application ): @@ -1387,9 +1549,12 @@ async def _notification_cb() -> None: async def try_open_project_for_user( user_id: UserID, project_uuid: ProjectID, - client_session_id: str, + client_session_id: ClientSessionID, app: web.Application, - max_number_of_studies_per_user: int | None, + *, + max_number_of_opened_projects_per_user: int | None, + allow_multiple_sessions: bool, + max_number_of_user_sessions_per_project: PositiveInt | None, ) -> bool: """ Raises: @@ -1404,16 +1569,24 @@ async def try_open_project_for_user( get_redis_lock_manager_client_sdk(app), project_uuid=project_uuid, status=ProjectStatus.OPENING, - owner=Owner( - user_id=user_id, **await get_user_fullname(app, user_id=user_id) - ), + owner=Owner(user_id=user_id), notification_cb=None, ) async def _open_project() -> bool: with managed_resource(user_id, client_session_id, app) as user_session: - # NOTE: if max_number_of_studies_per_user is set, the same - # project shall still be openable if the tab was closed - if max_number_of_studies_per_user is not None and ( + # check if the project is already opened + if ( + current_project_ids := await user_session.find(PROJECT_ID_KEY) + ) and f"{project_uuid}" in current_project_ids: + _logger.debug( + "project %s is already opened by user %s/%s", + project_uuid, + user_id, + client_session_id, + ) + return True + # Enforce per-user open project limit + if max_number_of_opened_projects_per_user is not None and ( len( { uuid @@ -1423,42 +1596,88 @@ async def _open_project() -> bool: if uuid != f"{project_uuid}" } ) - >= max_number_of_studies_per_user + >= max_number_of_opened_projects_per_user ): raise ProjectTooManyProjectOpenedError( - max_num_projects=max_number_of_studies_per_user + max_num_projects=max_number_of_opened_projects_per_user, + user_id=user_id, + project_uuid=project_uuid, + client_session_id=client_session_id, ) - # Assign project_id to current_session - current_session: UserSessionID = user_session.get_id() - sessions_with_project: list[UserSessionID] = ( - await user_session.find_users_of_resource( - app, PROJECT_ID_KEY, f"{project_uuid}" - ) + # try to assign project_id to current_session + sessions_with_project = await user_session.find_users_of_resource( + app, PROJECT_ID_KEY, f"{project_uuid}" ) - if not sessions_with_project: - # no one has the project so we assign it + if max_number_of_user_sessions_per_project is not None and ( + len(sessions_with_project) + >= max_number_of_user_sessions_per_project + ): + # we need to check has an inactive session in which case we can steal the project + this_user_other_sessions = [ + s + for s in sessions_with_project + if s.user_id == user_id and s != user_session + ] + for session in this_user_other_sessions: + with managed_resource( + session.user_id, session.client_session_id, app + ) as other_user_session: + if await other_user_session.get_socket_id() is None: + # this user has an inactive session, we can steal the project + _logger.debug( + "stealing project %s from user %s/%s", + project_uuid, + session.user_id, + session.client_session_id, + ) + await user_session.add( + PROJECT_ID_KEY, f"{project_uuid}" + ) + await other_user_session.remove(PROJECT_ID_KEY) + + return True + + raise ProjectTooManyUserSessionsError( + max_num_sessions=max_number_of_user_sessions_per_project, + user_id=user_id, + project_uuid=project_uuid, + client_session_id=client_session_id, + ) + if not sessions_with_project or ( + allow_multiple_sessions + and ( + max_number_of_user_sessions_per_project is None + or ( + len(sessions_with_project) + < max_number_of_user_sessions_per_project + ) + ) + ): + # if there are no sessions with this project, or the number of sessions is less than the maximum allowed await user_session.add(PROJECT_ID_KEY, f"{project_uuid}") return True - # Otherwise if this is the only user (NOTE: a session = user_id + client_seesion_id !) - user_ids: set[int] = {s.user_id for s in sessions_with_project} - if user_ids.issubset({user_id}): - other_sessions_with_project = [ - usid - for usid in sessions_with_project - if usid != current_session - ] - if not await _user_has_another_client_open( - other_sessions_with_project, - app, - ): - # steal the project - await user_session.add(PROJECT_ID_KEY, f"{project_uuid}") - await _clean_user_disconnected_clients( - sessions_with_project, app - ) - return True + # NOTE: Special case for backwards compatibility, allow to close a tab and open the project again in a new tab + if not allow_multiple_sessions: + current_session = user_session.get_id() + user_ids: set[int] = {s.user_id for s in sessions_with_project} + if user_ids.issubset({user_id}): + other_sessions_with_project = [ + usid + for usid in sessions_with_project + if usid != current_session + ] + if not await _user_has_another_active_session( + other_sessions_with_project, + app, + ): + # steal the project + await user_session.add(PROJECT_ID_KEY, f"{project_uuid}") + await _clean_user_disconnected_clients( + sessions_with_project, app + ) + return True return False @@ -1469,161 +1688,201 @@ async def _open_project() -> bool: return False -# -# CLOSE PROJECT ------------------------------------------------------------------- -# - - -async def try_close_project_for_user( - user_id: int, - project_uuid: str, - client_session_id: str, +async def close_project_for_user( + user_id: UserID, + project_uuid: ProjectID, + client_session_id: ClientSessionID, app: web.Application, simcore_user_agent: str, + *, + wait_for_service_closed: bool = False, ): with managed_resource(user_id, client_session_id, app) as user_session: - current_session: UserSessionID = user_session.get_id() - all_sessions_with_project: list[UserSessionID] = ( - await user_session.find_users_of_resource( - app, key=PROJECT_ID_KEY, value=project_uuid - ) + current_user_session = user_session.get_id() + all_user_sessions_with_project = await user_session.find_users_of_resource( + app, key=PROJECT_ID_KEY, value=f"{project_uuid}" ) # first check whether other sessions registered this project - if current_session not in all_sessions_with_project: + if current_user_session not in all_user_sessions_with_project: # nothing to do, I do not have this project registered - log.warning( - "%s is not registered as resource of %s. Skipping close project", - f"{project_uuid=}", - f"{user_id}", - extra=get_log_record_extra(user_id=user_id), - ) return # remove the project from our list of opened ones - log.debug( - "removing project [%s] from user [%s] resources", project_uuid, user_id - ) await user_session.remove(key=PROJECT_ID_KEY) + # remove the clent session from the project room + await _leave_project_room( + app=app, + user_id=user_id, + client_session_id=client_session_id, + project_uuid=project_uuid, + user_session=user_session, + ) + # check it is not opened by someone else - all_sessions_with_project.remove(current_session) - log.debug("remaining user_to_session_ids: %s", all_sessions_with_project) - if not all_sessions_with_project: + all_user_sessions_with_project.remove(current_user_session) + _logger.debug("remaining user_to_session_ids: %s", all_user_sessions_with_project) + if not all_user_sessions_with_project: # NOTE: depending on the garbage collector speed, it might already be removing it - fire_and_forget_task( - remove_project_dynamic_services( - user_id, project_uuid, app, simcore_user_agent - ), - task_suffix_name=f"remove_project_dynamic_services_{user_id=}_{project_uuid=}", - fire_and_forget_tasks_collection=app[APP_FIRE_AND_FORGET_TASKS_KEY], + remove_services_task = remove_project_dynamic_services( + user_id, project_uuid, app, simcore_user_agent ) + if wait_for_service_closed: + # wait for the task to finish + await remove_services_task + else: + fire_and_forget_task( + remove_services_task, + task_suffix_name=f"remove_project_dynamic_services_{user_id=}_{project_uuid=}", + fire_and_forget_tasks_collection=app[APP_FIRE_AND_FORGET_TASKS_KEY], + ) else: - log.error( - "project [%s] is used by other users: [%s]. This should not be possible", - project_uuid, - {user_session.user_id for user_session in all_sessions_with_project}, + # when the project is still opened by other users, we just notify the project state update + project = await get_project_for_user( + app, + f"{project_uuid}", + user_id, + include_state=True, ) + await notify_project_state_update(app, project) -# -# PROJECT STATE ------------------------------------------------------------------- -# - - -async def _get_project_lock_state( +async def _get_project_share_state( user_id: int, project_uuid: str, app: web.Application, -) -> ProjectLocked: +) -> ProjectShareState: """returns the lock state of a project 1. If a project is locked for any reason, first return the project as locked and STATUS defined by lock - 2. If a client_session_id is passed, then first check to see if the project is currently opened by this very user/tab combination, if yes returns the project as Locked and OPENED. 3. If any other user than user_id is using the project (even disconnected before the TTL is finished) then the project is Locked and OPENED. 4. If the same user is using the project with a valid socket id (meaning a tab is currently active) then the project is Locked and OPENED. 5. If the same user is using the project with NO socket id (meaning there is no current tab active) then the project is Unlocked and OPENED. which means the user can open it again. """ - log.debug( - "getting project [%s] lock state for user [%s]...", - f"{project_uuid=}", - f"{user_id=}", - ) - prj_locked_state: ProjectLocked | None = await get_project_locked_state( + app_settings = get_application_settings(app) + prj_locked_state = await get_project_locked_state( get_redis_lock_manager_client_sdk(app), project_uuid ) - if prj_locked_state: - log.debug( - "project [%s] is locked: %s", f"{project_uuid=}", f"{prj_locked_state=}" - ) - return prj_locked_state - - # let's now check if anyone has the project in use somehow with managed_resource(user_id, None, app) as rt: - user_session_id_list: list[UserSessionID] = await rt.find_users_of_resource( + user_sessions_with_project = await rt.find_users_of_resource( app, PROJECT_ID_KEY, project_uuid ) - set_user_ids = {user_session.user_id for user_session in user_session_id_list} - assert ( # nosec - len(set_user_ids) <= 1 - ) # nosec # NOTE: A project can only be opened by one user in one tab at the moment + if prj_locked_state: + _logger.debug( + "project [%s] is currently locked: %s", + f"{project_uuid=}", + f"{prj_locked_state=}", + ) + + if app_settings.WEBSERVER_REALTIME_COLLABORATION: + return ProjectShareState( + status=prj_locked_state.status, + locked=prj_locked_state.status + in [ + ProjectStatus.CLONING, + ProjectStatus.EXPORTING, + ProjectStatus.MAINTAINING, + ] + or ( + ( + app_settings.WEBSERVER_REALTIME_COLLABORATION.RTC_MAX_NUMBER_OF_USERS + is not None + ) + and ( + len(user_sessions_with_project) + < app_settings.WEBSERVER_REALTIME_COLLABORATION.RTC_MAX_NUMBER_OF_USERS + ) + ), + current_user_groupids=( + [ + await users_service.get_user_primary_group_id( + app, prj_locked_state.owner.user_id + ) + ] + if prj_locked_state.owner + else [] + ), + ) + # "old" behavior to remove once RTC is fully implemented + return ProjectShareState( + status=prj_locked_state.status, + locked=prj_locked_state.value, + current_user_groupids=( + [ + await users_service.get_user_primary_group_id( + app, prj_locked_state.owner.user_id + ) + ] + if prj_locked_state.owner + else [] + ), + ) - if not set_user_ids: + if not user_sessions_with_project: # no one has the project, so it is unlocked and closed. - log.debug("project [%s] is not in use", f"{project_uuid=}") - return ProjectLocked(value=False, status=ProjectStatus.CLOSED) + _logger.debug("project [%s] is not in use", f"{project_uuid=}") + return ProjectShareState( + status=ProjectStatus.CLOSED, locked=False, current_user_groupids=[] + ) + + # let's now check if anyone has the project in use somehow + active_user_ids = { + user_session.user_id for user_session in user_sessions_with_project + } - log.debug( + _logger.debug( "project [%s] might be used by the following users: [%s]", f"{project_uuid=}", - f"{set_user_ids=}", - ) - usernames: list[FullNameDict] = [ - await get_user_fullname(app, user_id=uid) for uid in set_user_ids - ] - # let's check if the project is opened by the same user, maybe already opened or closed in a orphaned session - if set_user_ids.issubset({user_id}) and not await _user_has_another_client_open( - user_session_id_list, app - ): - # in this case the project is re-openable by the same user until it gets closed - log.debug( - "project [%s] is in use by the same user [%s] that is currently disconnected, so it is unlocked for this specific user and opened", - f"{project_uuid=}", - f"{set_user_ids=}", - ) - return ProjectLocked( - value=False, - owner=Owner(user_id=next(iter(set_user_ids)), **usernames[0]), - status=ProjectStatus.OPENED, - ) - # the project is opened in another tab or browser, or by another user, both case resolves to the project being locked, and opened - log.debug( - "project [%s] is in use by another user [%s], so it is locked", - f"{project_uuid=}", - f"{set_user_ids=}", - ) - return ProjectLocked( - value=True, - owner=Owner(user_id=next(iter(set_user_ids)), **usernames[0]), - status=ProjectStatus.OPENED, + f"{active_user_ids=}", ) + if app_settings.WEBSERVER_REALTIME_COLLABORATION is None: # noqa: SIM102 + # let's check if the project is opened by the same user, maybe already opened or closed in a orphaned session + if active_user_ids.issubset( + {user_id} + ) and not await _user_has_another_active_session( + user_sessions_with_project, app + ): + # in this case the project is re-openable by the same user until it gets closed + _logger.debug( + "project [%s] is in use by the same user [%s] that is currently disconnected, so it is unlocked for this specific user and opened", + f"{project_uuid=}", + f"{active_user_ids=}", + ) + return ProjectShareState( + status=ProjectStatus.OPENED, + locked=False, + current_user_groupids=await limited_gather( + *[ + users_service.get_user_primary_group_id(app, user_id=uid) + for uid in active_user_ids + ], + limit=10, + ), + ) -async def get_project_states_for_user( - user_id: int, project_uuid: str, app: web.Application -) -> ProjectState: - # for templates: the project is never locked and never opened. also the running state is always unknown - running_state = RunningState.UNKNOWN - lock_state, computation_task = await logged_gather( - _get_project_lock_state(user_id, project_uuid, app), - director_v2_service.get_computation_task(app, user_id, UUID(project_uuid)), - ) - if computation_task: - # get the running state - running_state = computation_task.state + # compute lock state + if app_settings.WEBSERVER_REALTIME_COLLABORATION is None: + locked = True + elif app_settings.WEBSERVER_REALTIME_COLLABORATION.RTC_MAX_NUMBER_OF_USERS is None: + locked = False + else: + locked = ( + len(user_sessions_with_project) + >= app_settings.WEBSERVER_REALTIME_COLLABORATION.RTC_MAX_NUMBER_OF_USERS + ) - return ProjectState( - locked=lock_state, state=ProjectRunningState(value=running_state) + return ProjectShareState( + status=ProjectStatus.OPENED, + locked=locked, + current_user_groupids=await limited_gather( + *[ + users_service.get_user_primary_group_id(app, user_id=uid) + for uid in active_user_ids + ], + limit=10, + ), ) @@ -1631,49 +1890,81 @@ async def add_project_states_for_user( *, user_id: int, project: ProjectDict, - is_template: bool, app: web.Application, ) -> ProjectDict: - log.debug( + _logger.debug( "adding project states for %s with project %s", f"{user_id=}", f"{project['uuid']=}", ) - # for templates: the project is never locked and never opened. also the running state is always unknown - lock_state = await _get_project_lock_state(user_id, project["uuid"], app) - running_state = RunningState.UNKNOWN + project_share_state, user_computation_task = await asyncio.gather( + _get_project_share_state(user_id, project["uuid"], app), + director_v2_service.get_computation_task(app, user_id, project["uuid"]), + ) + # retrieve the project computational state + # if the user has no computation task, we assume the project is not running + project_running_state = ( + user_computation_task.state + if user_computation_task + else RunningState.NOT_STARTED + ) + computational_node_states = ( + user_computation_task.pipeline_details.node_states + if user_computation_task + else {} + ) - if not is_template and ( - computation_task := await director_v2_service.get_computation_task( - app, user_id, project["uuid"] - ) - ): - # get the running state - running_state = computation_task.state - # get the nodes individual states - for ( - node_id, - node_state, - ) in computation_task.pipeline_details.node_states.items(): - prj_node = project["workbench"].get(str(node_id)) - if prj_node is None: - continue - node_state_dict = json_loads( - node_state.model_dump_json(by_alias=True, exclude_unset=True) + # compose the node states + is_pipeline_running = await director_v2_service.is_pipeline_running( + app, user_id, project["uuid"] + ) + user_primary_group_id = await users_service.get_user_primary_group_id(app, user_id) + for node_uuid, node in project["workbench"].items(): + assert isinstance(node_uuid, str) # nosec + assert isinstance(node, dict) # nosec + + node_lock_state = None + with contextlib.suppress(NodeShareStateCannotBeComputedError): + node_lock_state = await _get_node_share_state( + app, + project_uuid=project["uuid"], + node_id=NodeID(node_uuid), + computational_pipeline_running=is_pipeline_running, + user_primrary_groupid=user_primary_group_id, + ) + if NodeID(node_uuid) in computational_node_states: + node_state = computational_node_states[NodeID(node_uuid)].model_copy( + update={"lock_state": node_lock_state} + ) + else: + # if the node is not in the computational state, we create a new one + service_is_running = node_lock_state and ( + node_lock_state.status is NodeShareStatus.OPENED + ) + node_state = NodeState( + current_status=( + RunningState.STARTED + if service_is_running + else RunningState.NOT_STARTED + ), + lock_state=node_lock_state, ) - prj_node.setdefault("state", {}).update(node_state_dict) - prj_node_progress = node_state_dict.get("progress", None) or 0 - prj_node.update({"progress": round(prj_node_progress * 100.0)}) + + # upgrade the project + node.setdefault("state", {}).update( + node_state.model_dump(mode="json", by_alias=True, exclude_unset=True) + ) + if "progress" in node["state"] and node["state"]["progress"] is not None: + # ensure progress is a percentage + node["progress"] = round(node["state"]["progress"] * 100.0) project["state"] = ProjectState( - locked=lock_state, state=ProjectRunningState(value=running_state) + share_state=project_share_state, + state=ProjectRunningState(value=project_running_state), ).model_dump(by_alias=True, exclude_unset=True) return project -# -# SERVICE DEPRECATION ---------------------------- -# async def is_service_deprecated( app: web.Application, user_id: UserID, @@ -1713,11 +2004,6 @@ async def is_project_node_deprecated( raise NodeNotFoundError(project_uuid=project["uuid"], node_uuid=f"{node_id}") -# -# SERVICE RESOURCES ----------------------------------- -# - - async def get_project_node_resources( app: web.Application, user_id: UserID, @@ -1789,11 +2075,6 @@ async def update_project_node_resources( ) from exc -# -# PROJECT DYNAMIC SERVICES ----------------------------------------------------- -# - - async def run_project_dynamic_services( request: web.Request, project: dict, @@ -1864,13 +2145,12 @@ async def run_project_dynamic_services( async def remove_project_dynamic_services( - user_id: int, - project_uuid: str, + user_id: UserID, + project_uuid: ProjectID, app: web.Application, simcore_user_agent: str, *, notify_users: bool = True, - user_name: FullNameDict | None = None, ) -> None: """ @@ -1878,26 +2158,20 @@ async def remove_project_dynamic_services( :raises ProjectLockError """ - # NOTE: during the closing process, which might take awhile, - # the project is locked so no one opens it at the same time - log.debug( + _logger.debug( "removing project interactive services for project [%s] and user [%s]", project_uuid, user_id, ) - user_name_data: FullNameDict = user_name or await get_user_fullname( - app, user_id=user_id - ) - user_role: UserRole | None = None try: - user_role = await get_user_role(app, user_id=user_id) + user_role = await users_service.get_user_role(app, user_id=user_id) except UserNotFoundError: user_role = None save_state = await has_user_project_access_rights( - app, project_id=ProjectID(project_uuid), user_id=user_id, permission="write" + app, project_id=project_uuid, user_id=user_id, permission="write" ) if user_role is None or user_role <= UserRole.GUEST: save_state = False @@ -1907,9 +2181,9 @@ async def remove_project_dynamic_services( get_redis_lock_manager_client_sdk(app), project_uuid=project_uuid, status=ProjectStatus.CLOSING, - owner=Owner(user_id=user_id, **user_name_data), + owner=Owner(user_id=user_id), notification_cb=( - create_user_notification_cb(user_id, ProjectID(project_uuid), app) + create_user_notification_cb(user_id, project_uuid, app) if notify_users else None ), @@ -1933,9 +2207,31 @@ async def _locked_stop_dynamic_serivces_in_project() -> None: await _locked_stop_dynamic_serivces_in_project() -# -# NOTIFICATIONS & LOCKS ----------------------------------------------------- -# +_CONCURRENT_NOTIFICATIONS_LIMIT: Final[int] = 10 + + +async def _send_message_to_project_groups( + app: web.Application, + project_id: ProjectID, + message: SocketMessageDict, +) -> None: + project_group_get_list = await _groups_service.list_project_groups_by_project_without_checking_permissions( + app, project_id=project_id + ) + rooms_to_notify = [item.gid for item in project_group_get_list if item.read is True] + + await limited_gather( + *( + send_message_to_standard_group( + app, + room, + message, + ) + for room in rooms_to_notify + ), + log=_logger, + limit=_CONCURRENT_NOTIFICATIONS_LIMIT, + ) async def notify_project_state_update( @@ -1945,11 +2241,15 @@ async def notify_project_state_update( ) -> None: if await is_project_hidden(app, ProjectID(project["uuid"])): return + output_project_model = ProjectGet.from_domain_model(project) + assert output_project_model.state # nosec message = SocketMessageDict( event_type=SOCKET_IO_PROJECT_UPDATED_EVENT, data={ "project_uuid": project["uuid"], - "data": project["state"], + "data": output_project_model.state.model_dump( + **RESPONSE_MODEL_POLICY, + ), }, ) @@ -1958,14 +2258,9 @@ async def notify_project_state_update( app, user_id=notify_only_user, message=message, - ignore_queue=True, ) else: - rooms_to_notify: Generator[GroupID, None, None] = ( - gid for gid, rights in project["accessRights"].items() if rights["read"] - ) - for room in rooms_to_notify: - await send_message_to_standard_group(app, group_id=room, message=message) + await _send_message_to_project_groups(app, project["uuid"], message) async def notify_project_node_update( @@ -1977,24 +2272,17 @@ async def notify_project_node_update( if await is_project_hidden(app, ProjectID(project["uuid"])): return - rooms_to_notify: list[GroupID] = [ - gid for gid, rights in project["accessRights"].items() if rights["read"] - ] - message = SocketMessageDict( event_type=SOCKET_IO_NODE_UPDATED_EVENT, data={ "project_id": project["uuid"], "node_id": f"{node_id}", - # as GET projects/{project_id}/nodes/{node_id} "data": project["workbench"][f"{node_id}"], - # as GET projects/{project_id}/nodes/{node_id}/errors "errors": errors, }, ) - for room in rooms_to_notify: - await send_message_to_standard_group(app, room, message) + await _send_message_to_project_groups(app, project["uuid"], message) async def retrieve_and_notify_project_locked_state( diff --git a/services/web/server/src/simcore_service_webserver/projects/_projects_service_delete.py b/services/web/server/src/simcore_service_webserver/projects/_projects_service_delete.py index bd8d57886e44..a5fd6e0074f7 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_projects_service_delete.py +++ b/services/web/server/src/simcore_service_webserver/projects/_projects_service_delete.py @@ -48,7 +48,7 @@ async def batch_stop_services_in_project( ), _projects_service.remove_project_dynamic_services( user_id=user_id, - project_uuid=f"{project_uuid}", + project_uuid=project_uuid, app=app, simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, notify_users=False, @@ -67,6 +67,7 @@ async def delete_project_as_admin( try: # 1. hide with _monitor_step(state, name="hide"): + # NOTE: We do not need to use PROJECT_DB_UPDATE_REDIS_LOCK_KEY lock, as hidden field is not passed to frontend project = await _projects_repository.patch_project( app, project_uuid=project_uuid, @@ -94,13 +95,13 @@ async def delete_project_as_admin( except ProjectLockError as err: raise ProjectDeleteError( project_uuid=project_uuid, - reason=f"Cannot delete project {project_uuid} because it is currently in use. Details: {err}", + details=f"Cannot delete project {project_uuid} because it is currently in use. Details: {err}", state=state, ) from err except Exception as err: raise ProjectDeleteError( project_uuid=project_uuid, - reason=f"Unexpected error. Deletion sequence: {state=}", + details=f"Unexpected error. Deletion sequence: {state=}", state=state, ) from err diff --git a/services/web/server/src/simcore_service_webserver/projects/_security_service.py b/services/web/server/src/simcore_service_webserver/projects/_security_service.py index f3c176b4de87..b090e4ef3ff4 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_security_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_security_service.py @@ -2,7 +2,7 @@ from aiohttp import web from simcore_postgres_database.models.users import UserRole -from ..security.api import get_access_model +from ..security import security_service from ._projects_repository_legacy import ProjectDBAPI from .api import check_user_project_permission @@ -52,7 +52,7 @@ def setup_projects_access(app: web.Application): """ security - access : Inject permissions to rest API resources """ - hrba = get_access_model(app) + hrba = security_service.get_access_model(app) hrba.roles[UserRole.GUEST].check[ "project.workbench.node.inputs.update" diff --git a/services/web/server/src/simcore_service_webserver/projects/_socketio_service.py b/services/web/server/src/simcore_service_webserver/projects/_socketio_service.py new file mode 100644 index 000000000000..97b39eece447 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/projects/_socketio_service.py @@ -0,0 +1,56 @@ +from typing import Final + +from aiohttp import web +from models_library.api_schemas_webserver.projects import ProjectDocument +from models_library.groups import GroupID +from models_library.projects import ProjectID +from models_library.socketio import SocketMessageDict +from pydantic import AliasGenerator, BaseModel, ConfigDict +from pydantic.alias_generators import to_camel + +from ..models import ClientSessionID +from ..socketio.messages import send_message_to_project_room + +SOCKET_IO_PROJECT_DOCUMENT_UPDATED_EVENT: Final[str] = "projectDocument:updated" + + +class BaseEvent(BaseModel): + model_config = ConfigDict( + populate_by_name=True, + from_attributes=True, + alias_generator=AliasGenerator( + serialization_alias=to_camel, + ), + ) + + +class ProjectDocumentEvent(BaseEvent): + project_id: ProjectID + user_primary_gid: GroupID + client_session_id: ClientSessionID | None + version: int + document: ProjectDocument + + +async def notify_project_document_updated( + app: web.Application, + *, + project_id: ProjectID, + user_primary_gid: GroupID, + client_session_id: ClientSessionID | None, + version: int, + document: ProjectDocument, +) -> None: + notification_message = SocketMessageDict( + event_type=SOCKET_IO_PROJECT_DOCUMENT_UPDATED_EVENT, + data={ + **ProjectDocumentEvent( + project_id=project_id, + user_primary_gid=user_primary_gid, + client_session_id=client_session_id, + version=version, + document=document, + ).model_dump(mode="json", by_alias=True), + }, + ) + await send_message_to_project_room(app, project_id, notification_message) diff --git a/services/web/server/src/simcore_service_webserver/projects/_trash_service.py b/services/web/server/src/simcore_service_webserver/projects/_trash_service.py index 926345a3a356..6bf52973e253 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_trash_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_trash_service.py @@ -11,9 +11,9 @@ from models_library.rest_pagination import MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE from models_library.users import UserID from models_library.workspaces import WorkspaceID -from servicelib.aiohttp.application_keys import APP_FIRE_AND_FORGET_TASKS_KEY from servicelib.utils import fire_and_forget_task +from ..constants import APP_FIRE_AND_FORGET_TASKS_KEY from ..director_v2 import director_v2_service from ..dynamic_scheduler import api as dynamic_scheduler_service from . import _crud_api_read @@ -26,7 +26,7 @@ ProjectRunningConflictError, ProjectsBatchDeleteError, ) -from .models import ProjectDict, ProjectPatchInternalExtended +from .models import ProjectDict, ProjectPatchInternalExtended, ProjectTypeAPI _logger = logging.getLogger(__name__) @@ -88,7 +88,7 @@ async def trash_project( product_name=product_name, ) - await _projects_service.patch_project( + await _projects_service.patch_project_for_user( app, user_id=user_id, product_name=product_name, @@ -98,6 +98,7 @@ async def trash_project( trashed_explicitly=explicit, trashed_by=user_id, ), + client_session_id=None, ) @@ -109,7 +110,7 @@ async def untrash_project( project_id: ProjectID, ) -> None: # NOTE: check_user_project_permission is inside projects_api.patch_project - await _projects_service.patch_project( + await _projects_service.patch_project_for_user( app, user_id=user_id, product_name=product_name, @@ -117,6 +118,7 @@ async def untrash_project( project_patch=ProjectPatchInternalExtended( trashed_at=None, trashed_explicitly=False, trashed_by=None ), + client_session_id=None, ) @@ -172,6 +174,8 @@ async def list_explicitly_trashed_projects( user_id=user_id, product_name=product_name, trashed=True, + filter_by_project_type=ProjectTypeAPI.user, + filter_by_template_type=None, offset=page_params.offset, limit=page_params.limit, order_by=OrderBy(field=IDStr("trashed"), direction=OrderDirection.ASC), @@ -220,7 +224,7 @@ async def delete_explicitly_trashed_project( raise ProjectNotTrashedError( project_uuid=project_id, user_id=user_id, - reason="Cannot delete trashed project since it does not fit current criteria", + details="Cannot delete trashed project since it does not fit current criteria", ) await _projects_service.delete_project_by_user( diff --git a/services/web/server/src/simcore_service_webserver/projects/_wallets_service.py b/services/web/server/src/simcore_service_webserver/projects/_wallets_service.py index e671b7eac6ea..db5c3e505b9b 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_wallets_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_wallets_service.py @@ -16,7 +16,7 @@ ) from ..rabbitmq import get_rabbitmq_rpc_client -from ..users import api as users_service +from ..users import users_service from ..wallets import _api as wallets_service from ._projects_repository_legacy import ProjectDBAPI from .exceptions import ( @@ -26,7 +26,7 @@ ) -async def get_project_wallet(app, project_id: ProjectID): +async def get_project_wallet(app, project_id: ProjectID) -> WalletGet | None: db: ProjectDBAPI = ProjectDBAPI.get_from_app_context(app) wallet_db: WalletDB | None = await db.get_project_wallet(project_uuid=project_id) diff --git a/services/web/server/src/simcore_service_webserver/projects/_workspaces_service.py b/services/web/server/src/simcore_service_webserver/projects/_workspaces_service.py index fdf40f273717..bb57236b800a 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_workspaces_service.py +++ b/services/web/server/src/simcore_service_webserver/projects/_workspaces_service.py @@ -8,9 +8,10 @@ from simcore_postgres_database.utils_repos import transaction_context from ..db.plugin import get_asyncpg_engine -from ..users.api import get_user +from ..models import ClientSessionID +from ..users import users_service from ..workspaces.api import check_user_workspace_access -from . import _folders_repository, _groups_repository, _projects_repository +from . import _folders_repository, _groups_repository, _projects_service from ._access_rights_service import get_user_project_access_rights from .exceptions import ProjectInvalidRightsError @@ -24,6 +25,7 @@ async def move_project_into_workspace( project_id: ProjectID, workspace_id: WorkspaceID | None, product_name: ProductName, + client_session_id: ClientSessionID | None = None, ) -> None: # 1. User needs to have delete permission on project project_access_rights = await get_user_project_access_rights( @@ -51,15 +53,16 @@ async def move_project_into_workspace( ) # 4. Update workspace ID on the project resource - await _projects_repository.patch_project( + user = await users_service.get_user(app, user_id=user_id) + await _projects_service.patch_project_and_notify_users( app=app, - connection=conn, project_uuid=project_id, - new_partial_project_data={"workspace_id": workspace_id}, + patch_project_data={"workspace_id": workspace_id}, + user_primary_gid=user["primary_gid"], + client_session_id=client_session_id, ) # 5. Remove all project permissions, leave only the user who moved the project - user = await get_user(app, user_id=user_id) await _groups_repository.delete_all_project_groups( app, connection=conn, project_id=project_id ) diff --git a/services/web/server/src/simcore_service_webserver/projects/api.py b/services/web/server/src/simcore_service_webserver/projects/api.py index 96bb5948527f..35abd6b8ef68 100644 --- a/services/web/server/src/simcore_service_webserver/projects/api.py +++ b/services/web/server/src/simcore_service_webserver/projects/api.py @@ -14,6 +14,7 @@ batch_get_project_name, delete_project_by_user, get_project_dict_legacy, + patch_project_and_notify_users, ) __all__: tuple[str, ...] = ( @@ -25,6 +26,7 @@ "has_user_project_access_rights", "list_projects", "delete_project_by_user", + "patch_project_and_notify_users", ) diff --git a/services/web/server/src/simcore_service_webserver/projects/exceptions.py b/services/web/server/src/simcore_service_webserver/projects/exceptions.py index 0b1fe9caaf56..1b5599e60cab 100644 --- a/services/web/server/src/simcore_service_webserver/projects/exceptions.py +++ b/services/web/server/src/simcore_service_webserver/projects/exceptions.py @@ -4,6 +4,7 @@ from typing import Any from models_library.projects import ProjectID +from models_library.projects_nodes_io import NodeID from models_library.users import UserID from servicelib.redis import ProjectLockError @@ -70,12 +71,12 @@ def debug_message(self): class ProjectDeleteError(BaseProjectError): - msg_template = "Failed to complete deletion of '{project_uuid}': {reason}" + msg_template = "Failed to complete deletion of '{project_uuid}': {details}" - def __init__(self, *, project_uuid, reason, **ctx): + def __init__(self, *, project_uuid, details, **ctx): super().__init__(**ctx) self.project_uuid = project_uuid - self.reason = reason + self.details = details class ProjectsBatchDeleteError(BaseProjectError): @@ -107,7 +108,7 @@ class ProjectRunningConflictError(ProjectTrashError): class ProjectNotTrashedError(ProjectTrashError): msg_template = ( - "Cannot delete project {project_uuid} since it was not trashed first: {reason}" + "Cannot delete project {project_uuid} since it was not trashed first: {details}" ) @@ -120,6 +121,17 @@ def __init__(self, *, project_uuid: str, node_uuid: str, **ctx): self.project_uuid = project_uuid +class NodeShareStateCannotBeComputedError(BaseProjectError): + msg_template = ( + "Node '{node_uuid}' share state cannot be computed in project '{project_uuid}'" + ) + + def __init__(self, *, project_uuid: ProjectID | None, node_uuid: NodeID, **ctx): + super().__init__(**ctx) + self.node_uuid = node_uuid + self.project_uuid = project_uuid + + class ParentNodeNotFoundError(BaseProjectError): msg_template = "Parent node '{node_uuid}' not found" @@ -147,13 +159,21 @@ def __init__(self, *, user_id: UserID, project_uuid: ProjectID, **ctx): class ProjectTooManyProjectOpenedError(BaseProjectError): - msg_template = "You cannot open more than {max_num_projects} study/ies at once. Please close another study and retry." + msg_template = "You cannot open more than {max_num_projects} project/s at once. Please close another project and retry." def __init__(self, *, max_num_projects: int, **ctx): super().__init__(**ctx) self.max_num_projects = max_num_projects +class ProjectTooManyUserSessionsError(BaseProjectError): + msg_template = "You cannot open more than {max_num_sessions} session(s) for the same project at once. Please close another session and retry." + + def __init__(self, *, max_num_sessions: int, **ctx): + super().__init__(**ctx) + self.max_num_sessions = max_num_sessions + + class PermalinkNotAllowedError(BaseProjectError): ... @@ -249,7 +269,7 @@ class InvalidInputValue(WebServerBaseError): class ProjectGroupNotFoundError(BaseProjectError): - msg_template = "Project group not found. {reason}" + msg_template = "Project group not found. {details}" class ProjectInDebtCanNotChangeWalletError(BaseProjectError): diff --git a/services/web/server/src/simcore_service_webserver/projects/models.py b/services/web/server/src/simcore_service_webserver/projects/models.py index 03d595d741c6..84bf04f8d531 100644 --- a/services/web/server/src/simcore_service_webserver/projects/models.py +++ b/services/web/server/src/simcore_service_webserver/projects/models.py @@ -72,10 +72,16 @@ class ProjectDBGet(BaseModel): ) +class ProjectWithWorkbenchDBGet(ProjectDBGet): + # This model is used to read the project with its workbench + workbench: NodesDict + + class ProjectJobDBGet(ProjectDBGet): workbench: NodesDict job_parent_resource_name: str + storage_assets_deleted: bool class ProjectWithTrashExtra(ProjectDBGet): @@ -112,7 +118,7 @@ class ProjectPatchInternalExtended(ProjectPatch): trashed_by: UserID | None trashed_explicitly: bool - model_config = ConfigDict(populate_by_name=True, extra="forbid") + model_config = ConfigDict(validate_by_name=True, extra="forbid") def to_domain_model(self) -> dict[str, Any]: return remap_keys( diff --git a/services/web/server/src/simcore_service_webserver/projects/nodes_utils.py b/services/web/server/src/simcore_service_webserver/projects/nodes_utils.py index 4cf8a690aee8..00f9c25b8a38 100644 --- a/services/web/server/src/simcore_service_webserver/projects/nodes_utils.py +++ b/services/web/server/src/simcore_service_webserver/projects/nodes_utils.py @@ -1,6 +1,4 @@ import logging -from collections import deque -from collections.abc import Coroutine from typing import Any from aiohttp import web @@ -8,12 +6,11 @@ from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID from models_library.users import UserID -from servicelib.aiohttp.application_keys import APP_FIRE_AND_FORGET_TASKS_KEY from servicelib.logging_utils import log_decorator -from servicelib.utils import fire_and_forget_task, logged_gather +from servicelib.utils import logged_gather +from ..models import ClientSessionID from . import _projects_service -from .utils import get_frontend_node_outputs_changes log = logging.getLogger(__name__) @@ -42,6 +39,7 @@ async def update_node_outputs( outputs: dict, run_hash: str | None, node_errors: list[ErrorDict] | None, + client_session_id: ClientSessionID | None, *, ui_changed_keys: set[str] | None, ) -> None: @@ -53,6 +51,7 @@ async def update_node_outputs( node_uuid, new_outputs=outputs, new_run_hash=run_hash, + client_session_id=client_session_id, ) await _projects_service.notify_project_node_update( @@ -89,48 +88,3 @@ async def update_node_outputs( await _projects_service.post_trigger_connected_service_retrieve( app=app, project=project, updated_node_uuid=f"{node_uuid}", changed_keys=keys ) - - -async def update_frontend_outputs( - app: web.Application, - user_id: UserID, - project_uuid: ProjectID, - old_project: dict[str, Any], - new_project: dict[str, Any], -) -> None: - old_workbench = old_project["workbench"] - new_workbench = new_project["workbench"] - frontend_nodes_update_tasks: deque[Coroutine] = deque() - - for node_key, node in new_workbench.items(): - old_node = old_workbench.get(node_key) - if not old_node: - continue - - # check if there were any changes in the outputs of - # frontend services - # NOTE: for now only file-picker is handled - outputs_changes: set[str] = get_frontend_node_outputs_changes( - new_node=node, old_node=old_node - ) - - if len(outputs_changes) > 0: - frontend_nodes_update_tasks.append( - update_node_outputs( - app=app, - user_id=user_id, - project_uuid=project_uuid, - node_uuid=node_key, - outputs=node.get("outputs", {}), - run_hash=None, - node_errors=None, - ui_changed_keys=outputs_changes, - ) - ) - - for task_index, frontend_node_update_task in enumerate(frontend_nodes_update_tasks): - fire_and_forget_task( - frontend_node_update_task, - task_suffix_name=f"frontend_node_update_task_{task_index}", - fire_and_forget_tasks_collection=app[APP_FIRE_AND_FORGET_TASKS_KEY], - ) diff --git a/services/web/server/src/simcore_service_webserver/projects/plugin.py b/services/web/server/src/simcore_service_webserver/projects/plugin.py index 5028739d881b..8c7da0ff45f7 100644 --- a/services/web/server/src/simcore_service_webserver/projects/plugin.py +++ b/services/web/server/src/simcore_service_webserver/projects/plugin.py @@ -5,10 +5,11 @@ """ import logging +from typing import Final from aiohttp import web -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from ..application_setup import ModuleCategory, app_setup_func from ..constants import APP_SETTINGS_KEY from ..rabbitmq import setup_rabbitmq from ._controller import ( @@ -29,13 +30,22 @@ wallets_rest, workspaces_rest, ) +from ._controller.nodes_rest import register_stop_dynamic_service_task +from ._crud_api_create import register_create_project_task from ._projects_repository_legacy import setup_projects_db from ._security_service import setup_projects_access logger = logging.getLogger(__name__) +APP_PROJECTS_CLIENT_KEY: Final = web.AppKey("APP_PROJECTS_CLIENT_KEY", object) -@app_module_setup( + +def register_projects_long_running_tasks(app: web.Application) -> None: + register_create_project_task(app) + register_stop_dynamic_service_task(app) + + +@app_setup_func( "simcore_service_webserver.projects", ModuleCategory.ADDON, settings_name="WEBSERVER_PROJECTS", diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_documents_service.py b/services/web/server/src/simcore_service_webserver/projects/projects_documents_service.py new file mode 100644 index 000000000000..0aedc1597a0c --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/projects/projects_documents_service.py @@ -0,0 +1,8 @@ +from ._project_document_service import ( + remove_project_documents_as_admin, +) + +__all__: tuple[str, ...] = ("remove_project_documents_as_admin",) + + +# nopycln: file diff --git a/services/web/server/src/simcore_service_webserver/projects/utils.py b/services/web/server/src/simcore_service_webserver/projects/utils.py index 18a02a5fb3cf..63697ea6f172 100644 --- a/services/web/server/src/simcore_service_webserver/projects/utils.py +++ b/services/web/server/src/simcore_service_webserver/projects/utils.py @@ -93,6 +93,12 @@ def _replace_uuids(node: str | list | dict) -> str | list | dict: project_copy["ui"].get("slideshow", {}) ) + # exclude annotations UI info for conversations done in the source project + annotations = deepcopy(project_copy.get("ui", {}).get("annotations", {})) or {} + for ann_id, ann in annotations.items(): + if ann["type"] == "conversation": + project_copy["ui"]["annotations"].pop(ann_id) + if clean_output_data: for node_data in project_copy.get("workbench", {}).values(): for field in _FIELDS_TO_DELETE: @@ -189,41 +195,6 @@ def is_graph_equal( return True -async def project_uses_available_services( - project: dict[str, Any], available_services: list[dict[str, Any]] -) -> bool: - if not project["workbench"]: - # empty project - return True - # get project services - needed_services: set[tuple[str, str]] = { - (s["key"], s["version"]) for _, s in project["workbench"].items() - } - - # get available services - available_services_set: set[tuple[str, str]] = { - (s["key"], s["version"]) for s in available_services - } - - return needed_services.issubset(available_services_set) - - -def get_project_unavailable_services( - project: dict[str, Any], available_services: list[dict[str, Any]] -) -> set[tuple[str, str]]: - # get project services - required: set[tuple[str, str]] = { - (s["key"], s["version"]) for _, s in project["workbench"].items() - } - - # get available services - available: set[tuple[str, str]] = { - (s["key"], s["version"]) for s in available_services - } - - return required - available - - def extract_dns_without_default_port(url: URL) -> str: port = "" if url.port == 80 else f":{url.port}" return f"{url.host}{port}" diff --git a/services/web/server/src/simcore_service_webserver/publications/_rest.py b/services/web/server/src/simcore_service_webserver/publications/_rest.py index 63b9b64b61c3..8bbeec94aefc 100644 --- a/services/web/server/src/simcore_service_webserver/publications/_rest.py +++ b/services/web/server/src/simcore_service_webserver/publications/_rest.py @@ -3,17 +3,17 @@ from aiohttp import MultipartReader, hdrs, web from common_library.json_serialization import json_dumps from servicelib.aiohttp import status +from servicelib.aiohttp.request_keys import RQT_USERID_KEY from servicelib.mimetype_constants import ( MIMETYPE_APPLICATION_JSON, MIMETYPE_APPLICATION_ZIP, ) -from servicelib.request_keys import RQT_USERID_KEY from .._meta import API_VTAG as VTAG from ..login._emails_service import AttachmentTuple, send_email_from_template, themed from ..login.decorators import login_required -from ..login.login_repository_legacy import AsyncpgStorage, get_plugin_storage from ..products import products_web +from ..users import users_service from ._utils import json2html _logger = logging.getLogger(__name__) @@ -46,20 +46,20 @@ async def service_submission(request: web.Request): maxsize = 10 * 1024 * 1024 # 10MB actualsize = len(filedata) if actualsize > maxsize: - raise web.HTTPRequestEntityTooLarge(maxsize, actualsize) + raise web.HTTPRequestEntityTooLarge( + max_size=maxsize, actual_size=actualsize + ) filename = part.filename # type: ignore[union-attr] # PC, IP Whoever is in charge of this. please have a look continue raise web.HTTPUnsupportedMediaType( - reason=f"One part had an unexpected type: {part.headers[hdrs.CONTENT_TYPE]}" + text=f"One part had an unexpected type: {part.headers[hdrs.CONTENT_TYPE]}" ) support_email_address = product.support_email - db: AsyncpgStorage = get_plugin_storage(request.app) - user = await db.get_user({"id": request[RQT_USERID_KEY]}) - assert user # nosec - user_email = user.get("email") - assert user_email # nosec + user = await users_service.get_user_name_and_email( + request.app, user_id=request[RQT_USERID_KEY] + ) try: attachments = [ @@ -78,11 +78,11 @@ async def service_submission(request: web.Request): # send email await send_email_from_template( request, - from_=user_email, + from_=user.email, to=support_email_address, template=themed("templates/common", _EMAIL_TEMPLATE_NAME), context={ - "user": user_email, + "user": user.email, "data": json2html.convert( json=json_dumps(data), table_attributes='class="pure-table"' ), diff --git a/services/web/server/src/simcore_service_webserver/publications/plugin.py b/services/web/server/src/simcore_service_webserver/publications/plugin.py index a85b83cf3b84..8a755c62f5a4 100644 --- a/services/web/server/src/simcore_service_webserver/publications/plugin.py +++ b/services/web/server/src/simcore_service_webserver/publications/plugin.py @@ -3,9 +3,9 @@ import logging from aiohttp import web -from servicelib.aiohttp.application_keys import APP_SETTINGS_KEY -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from ..application_setup import ModuleCategory, app_setup_func +from ..constants import APP_SETTINGS_KEY from ..email.plugin import setup_email from ..products.plugin import setup_products from . import _rest @@ -13,7 +13,7 @@ _logger = logging.getLogger(__name__) -@app_module_setup( +@app_setup_func( __name__, ModuleCategory.ADDON, depends=["simcore_service_webserver.rest"], diff --git a/services/web/server/src/simcore_service_webserver/rabbitmq.py b/services/web/server/src/simcore_service_webserver/rabbitmq.py index c415c53057ff..fdfc7234aa8d 100644 --- a/services/web/server/src/simcore_service_webserver/rabbitmq.py +++ b/services/web/server/src/simcore_service_webserver/rabbitmq.py @@ -3,19 +3,22 @@ from typing import Final, cast from aiohttp import web +from models_library.api_schemas_webserver import get_webserver_rpc_namespace from models_library.errors import RABBITMQ_CLIENT_UNHEALTHY_MSG +from models_library.rabbitmq_basic_types import RPCNamespace from servicelib.aiohttp.application_keys import ( APP_RABBITMQ_CLIENT_KEY, APP_RABBITMQ_RPC_SERVER_KEY, ) -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup from servicelib.logging_utils import log_context from servicelib.rabbitmq import ( RabbitMQClient, RabbitMQRPCClient, + RPCRouter, wait_till_rabbitmq_responsive, ) +from .application_settings import get_application_settings from .rabbitmq_settings import RabbitSettings, get_plugin_settings from .rest.healthcheck import HealthCheck, HealthCheckError @@ -72,7 +75,7 @@ async def _rabbitmq_rpc_client_lifespan(app: web.Application): await rpc_client.close() -@app_module_setup( +@app_setup_func( __name__, ModuleCategory.ADDON, settings_name="WEBSERVER_RABBITMQ", @@ -94,3 +97,28 @@ def get_rabbitmq_client(app: web.Application) -> RabbitMQClient: def get_rabbitmq_rpc_server(app: web.Application) -> RabbitMQRPCClient: return cast(RabbitMQRPCClient, app[APP_RABBITMQ_RPC_SERVER_KEY]) + + +def get_rpc_namespace(app: web.Application) -> RPCNamespace: + settings = get_application_settings(app) + + assert settings.WEBSERVER_HOST # nosec + return get_webserver_rpc_namespace(settings.WEBSERVER_HOST) + + +def create_register_rpc_routes_on_startup(router: RPCRouter): + """ + This high-order function allows for more flexible router registration + by accepting a router instance and returning an on-startup event handler. + + Args: + router: The RPCRouter instance containing the routes to register + + """ + + async def _on_startup(app: web.Application): + rpc_server = get_rabbitmq_rpc_server(app) + rpc_namespace = get_rpc_namespace(app) + await rpc_server.register_router(router, rpc_namespace, app) + + return _on_startup diff --git a/services/web/server/src/simcore_service_webserver/redis.py b/services/web/server/src/simcore_service_webserver/redis.py index cd66a4e004d5..55ea0e6e6338 100644 --- a/services/web/server/src/simcore_service_webserver/redis.py +++ b/services/web/server/src/simcore_service_webserver/redis.py @@ -1,19 +1,19 @@ import logging +from typing import Final import redis.asyncio as aioredis from aiohttp import web -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup from servicelib.redis import RedisClientSDK, RedisClientsManager, RedisManagerDBConfig from settings_library.redis import RedisDatabase, RedisSettings from ._meta import APP_NAME +from .application_setup import ModuleCategory, app_setup_func from .constants import APP_SETTINGS_KEY _logger = logging.getLogger(__name__) -_APP_REDIS_CLIENTS_MANAGER = f"{__name__}.redis_clients_manager" - +APP_REDIS_CLIENT_KEY: Final = web.AppKey("APP_REDIS_CLIENT_KEY", RedisClientsManager) # SETTINGS -------------------------------------------------------------------------- @@ -32,7 +32,7 @@ async def setup_redis_client(app: web.Application): raises builtin ConnectionError """ redis_settings: RedisSettings = get_plugin_settings(app) - app[_APP_REDIS_CLIENTS_MANAGER] = manager = RedisClientsManager( + app[APP_REDIS_CLIENT_KEY] = manager = RedisClientsManager( databases_configs={ RedisManagerDBConfig(database=db) for db in ( @@ -42,6 +42,7 @@ async def setup_redis_client(app: web.Application): RedisDatabase.SCHEDULED_MAINTENANCE, RedisDatabase.USER_NOTIFICATIONS, RedisDatabase.ANNOUNCEMENTS, + RedisDatabase.DOCUMENTS, ) }, settings=redis_settings, @@ -61,7 +62,7 @@ async def setup_redis_client(app: web.Application): def _get_redis_client_sdk( app: web.Application, database: RedisDatabase ) -> RedisClientSDK: - redis_client: RedisClientsManager = app[_APP_REDIS_CLIENTS_MANAGER] + redis_client: RedisClientsManager = app[APP_REDIS_CLIENT_KEY] return redis_client.client(database) @@ -81,6 +82,10 @@ def get_redis_lock_manager_client_sdk(app: web.Application) -> RedisClientSDK: return _get_redis_client_sdk(app, RedisDatabase.LOCKS) +def get_redis_document_manager_client_sdk(app: web.Application) -> RedisClientSDK: + return _get_redis_client_sdk(app, RedisDatabase.DOCUMENTS) + + def get_redis_validation_code_client(app: web.Application) -> aioredis.Redis: redis_client: aioredis.Redis = _get_redis_client_sdk( app, RedisDatabase.VALIDATION_CODES @@ -112,7 +117,7 @@ def get_redis_announcements_client(app: web.Application) -> aioredis.Redis: # PLUGIN SETUP -------------------------------------------------------------------------- -@app_module_setup( +@app_setup_func( __name__, ModuleCategory.ADDON, settings_name="WEBSERVER_REDIS", logger=_logger ) def setup_redis(app: web.Application): diff --git a/services/web/server/src/simcore_service_webserver/resource_manager/_registry_utils.py b/services/web/server/src/simcore_service_webserver/resource_manager/_registry_utils.py new file mode 100644 index 000000000000..33452cc252ae --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/resource_manager/_registry_utils.py @@ -0,0 +1,18 @@ +import logging + +from models_library.projects import ProjectID + +from .registry import RedisResourceRegistry + +_logger = logging.getLogger(__name__) + + +async def list_opened_project_ids(registry: RedisResourceRegistry) -> list[ProjectID]: + """Lists all project IDs that are currently opened in active sessions.""" + opened_projects: list[ProjectID] = [] + all_session_alive, _ = await registry.get_all_resource_keys() + for alive_session in all_session_alive: + resources = await registry.get_resources(alive_session) + if projects_id := resources.get("project_id"): + opened_projects.append(ProjectID(projects_id)) + return opened_projects diff --git a/services/web/server/src/simcore_service_webserver/resource_manager/models.py b/services/web/server/src/simcore_service_webserver/resource_manager/models.py new file mode 100644 index 000000000000..6de3676498e9 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/resource_manager/models.py @@ -0,0 +1,58 @@ +from typing import Final, Self, TypeAlias, TypedDict + +from models_library.basic_types import UUIDStr +from models_library.users import UserID +from pydantic import BaseModel, ConfigDict +from pydantic.config import JsonDict + +ALIVE_SUFFIX: Final[str] = "alive" # points to a string type +RESOURCE_SUFFIX: Final[str] = "resources" # points to a hash (like a dict) type +RedisHashKey: TypeAlias = str + + +class UserSession(BaseModel): + """Parts of the key used in redis for a user-session""" + + user_id: UserID + client_session_id: str + + def to_redis_hash_key(self) -> RedisHashKey: + return ":".join(f"{k}={v}" for k, v in self.model_dump().items()) + + @classmethod + def from_redis_hash_key(cls, hash_key: RedisHashKey) -> Self: + key = dict(x.split("=", 1) for x in hash_key.split(":") if "=" in x) + return cls.model_validate(key) + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ + { + "user_id": 7, + "client_session_id": "c7fc4985-f96a-4be3-a8ed-5a43b1aa15e2", + }, + { + "user_id": 666, + "client_session_id": "*", + }, + ] + } + ) + + model_config = ConfigDict( + frozen=True, + json_schema_extra=_update_json_schema_extra, + ) + + +class ResourcesDict(TypedDict, total=False): + """Field-value pairs of {user_id}:{client_session_id}:resources key""" + + project_id: UUIDStr + socket_id: str + + +AliveSessions: TypeAlias = list[UserSession] +DeadSessions: TypeAlias = list[UserSession] diff --git a/services/web/server/src/simcore_service_webserver/resource_manager/plugin.py b/services/web/server/src/simcore_service_webserver/resource_manager/plugin.py index 803632e7d63c..4f3372c5511e 100644 --- a/services/web/server/src/simcore_service_webserver/resource_manager/plugin.py +++ b/services/web/server/src/simcore_service_webserver/resource_manager/plugin.py @@ -1,24 +1,30 @@ -""" resource manager subsystem +"""resource manager subsystem - Takes care of managing user generated resources such as: +Takes care of managing user generated resources such as: - - interactive services - - generated data +- interactive services + - generated data """ + import logging +from typing import Final from aiohttp import web -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from ..application_setup import ModuleCategory, app_setup_func from ..redis import setup_redis from ._constants import APP_CLIENT_SOCKET_REGISTRY_KEY, APP_RESOURCE_MANAGER_TASKS_KEY from .registry import RedisResourceRegistry _logger = logging.getLogger(__name__) +APP_RESOURCE_MANAGER_CLIENT_KEY: Final = web.AppKey( + "APP_RESOURCE_MANAGER_CLIENT_KEY", object +) + -@app_module_setup( +@app_setup_func( "simcore_service_webserver.resource_manager", ModuleCategory.SYSTEM, settings_name="WEBSERVER_RESOURCE_MANAGER", diff --git a/services/web/server/src/simcore_service_webserver/resource_manager/registry.py b/services/web/server/src/simcore_service_webserver/resource_manager/registry.py index 8ebd3b57ab69..3946ad018d06 100644 --- a/services/web/server/src/simcore_service_webserver/resource_manager/registry.py +++ b/services/web/server/src/simcore_service_webserver/resource_manager/registry.py @@ -17,14 +17,18 @@ import redis.asyncio as aioredis from aiohttp import web -from models_library.basic_types import UUIDStr from servicelib.redis import handle_redis_returns_union_types -from typing_extensions import ( # https://docs.pydantic.dev/latest/api/standard_library_types/#typeddict - TypedDict, -) from ..redis import get_redis_resources_client from ._constants import APP_CLIENT_SOCKET_REGISTRY_KEY +from .models import ( + ALIVE_SUFFIX, + RESOURCE_SUFFIX, + AliveSessions, + DeadSessions, + ResourcesDict, + UserSession, +) _logger = logging.getLogger(__name__) @@ -33,25 +37,6 @@ # Key: user_id=1:client_session_id=7f40353b-db02-4474-a44d-23ce6a6e428c:alive = 1 # Key: user_id=1:client_session_id=7f40353b-db02-4474-a44d-23ce6a6e428c:resources = {project_id: ... , socket_id: ...} # -_ALIVE_SUFFIX = "alive" # points to a string type -_RESOURCE_SUFFIX = "resources" # points to a hash (like a dict) type - - -class _UserRequired(TypedDict, total=True): - user_id: str | int - - -class UserSessionDict(_UserRequired): - """Parts of the key used in redis for a user-session""" - - client_session_id: str - - -class ResourcesDict(TypedDict, total=False): - """Field-value pairs of {user_id}:{client_session_id}:resources key""" - - project_id: UUIDStr - socket_id: str class RedisResourceRegistry: @@ -73,51 +58,42 @@ def app(self) -> web.Application: return self._app @classmethod - def _hash_key(cls, key: UserSessionDict) -> str: - hash_key: str = ":".join(f"{k}={v}" for k, v in key.items()) - return hash_key - - @classmethod - def _decode_hash_key(cls, hash_key: str) -> UserSessionDict: + def _decode_hash_key(cls, hash_key: str) -> UserSession: tmp_key = ( - hash_key[: -len(f":{_RESOURCE_SUFFIX}")] - if hash_key.endswith(f":{_RESOURCE_SUFFIX}") - else hash_key[: -len(f":{_ALIVE_SUFFIX}")] + hash_key[: -len(f":{RESOURCE_SUFFIX}")] + if hash_key.endswith(f":{RESOURCE_SUFFIX}") + else hash_key[: -len(f":{ALIVE_SUFFIX}")] ) key = dict(x.split("=") for x in tmp_key.split(":")) - return UserSessionDict(**key) # type: ignore + return UserSession(**key) # type: ignore @property def client(self) -> aioredis.Redis: client: aioredis.Redis = get_redis_resources_client(self.app) return client - async def set_resource( - self, key: UserSessionDict, resource: tuple[str, str] - ) -> None: - hash_key = f"{self._hash_key(key)}:{_RESOURCE_SUFFIX}" + async def set_resource(self, key: UserSession, resource: tuple[str, str]) -> None: + hash_key = f"{key.to_redis_hash_key()}:{RESOURCE_SUFFIX}" field, value = resource await handle_redis_returns_union_types( self.client.hset(hash_key, mapping={field: value}) ) - async def get_resources(self, key: UserSessionDict) -> ResourcesDict: - hash_key = f"{self._hash_key(key)}:{_RESOURCE_SUFFIX}" + async def get_resources(self, key: UserSession) -> ResourcesDict: + hash_key = f"{key.to_redis_hash_key()}:{RESOURCE_SUFFIX}" fields = await handle_redis_returns_union_types(self.client.hgetall(hash_key)) return ResourcesDict(**fields) - async def remove_resource(self, key: UserSessionDict, resource_name: str) -> None: - hash_key = f"{self._hash_key(key)}:{_RESOURCE_SUFFIX}" + async def remove_resource(self, key: UserSession, resource_name: str) -> None: + hash_key = f"{key.to_redis_hash_key()}:{RESOURCE_SUFFIX}" await handle_redis_returns_union_types( self.client.hdel(hash_key, resource_name) ) - async def find_resources( - self, key: UserSessionDict, resource_name: str - ) -> list[str]: + async def find_resources(self, key: UserSession, resource_name: str) -> list[str]: resources: list[str] = [] # the key might only be partialy complete - partial_hash_key = f"{self._hash_key(key)}:{_RESOURCE_SUFFIX}" + partial_hash_key = f"{key.to_redis_hash_key()}:{RESOURCE_SUFFIX}" async for scanned_key in self.client.scan_iter(match=partial_hash_key): if await handle_redis_returns_union_types( self.client.hexists(scanned_key, resource_name) @@ -129,44 +105,39 @@ async def find_resources( resources.append(key_value) return resources - async def find_keys(self, resource: tuple[str, str]) -> list[UserSessionDict]: - if not resource: - return [] - + async def find_keys(self, resource: tuple[str, str]) -> list[UserSession]: field, value = resource return [ self._decode_hash_key(hash_key) - async for hash_key in self.client.scan_iter(match=f"*:{_RESOURCE_SUFFIX}") + async for hash_key in self.client.scan_iter(match=f"*:{RESOURCE_SUFFIX}") if value == await handle_redis_returns_union_types(self.client.hget(hash_key, field)) ] - async def set_key_alive(self, key: UserSessionDict, timeout: int) -> None: + async def set_key_alive(self, key: UserSession, *, expiration_time: int) -> None: # setting the timeout to always expire, timeout > 0 - timeout = int(max(1, timeout)) - hash_key = f"{self._hash_key(key)}:{_ALIVE_SUFFIX}" - await self.client.set(hash_key, 1, ex=timeout) + expiration_time = int(max(1, expiration_time)) + hash_key = f"{key.to_redis_hash_key()}:{ALIVE_SUFFIX}" + await self.client.set(hash_key, 1, ex=expiration_time) - async def is_key_alive(self, key: UserSessionDict) -> bool: - hash_key = f"{self._hash_key(key)}:{_ALIVE_SUFFIX}" + async def is_key_alive(self, key: UserSession) -> bool: + hash_key = f"{key.to_redis_hash_key()}:{ALIVE_SUFFIX}" return bool(await self.client.exists(hash_key) > 0) - async def remove_key(self, key: UserSessionDict) -> None: + async def remove_key(self, key: UserSession) -> None: await self.client.delete( - f"{self._hash_key(key)}:{_RESOURCE_SUFFIX}", - f"{self._hash_key(key)}:{_ALIVE_SUFFIX}", + f"{key.to_redis_hash_key()}:{RESOURCE_SUFFIX}", + f"{key.to_redis_hash_key()}:{ALIVE_SUFFIX}", ) - async def get_all_resource_keys( - self, - ) -> tuple[list[UserSessionDict], list[UserSessionDict]]: + async def get_all_resource_keys(self) -> tuple[AliveSessions, DeadSessions]: alive_keys = [ self._decode_hash_key(hash_key) - async for hash_key in self.client.scan_iter(match=f"*:{_ALIVE_SUFFIX}") + async for hash_key in self.client.scan_iter(match=f"*:{ALIVE_SUFFIX}") ] dead_keys = [ self._decode_hash_key(hash_key) - async for hash_key in self.client.scan_iter(match=f"*:{_RESOURCE_SUFFIX}") + async for hash_key in self.client.scan_iter(match=f"*:{RESOURCE_SUFFIX}") if self._decode_hash_key(hash_key) not in alive_keys ] diff --git a/services/web/server/src/simcore_service_webserver/resource_manager/service.py b/services/web/server/src/simcore_service_webserver/resource_manager/service.py new file mode 100644 index 000000000000..d9a3f898e3b7 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/resource_manager/service.py @@ -0,0 +1,3 @@ +from ._registry_utils import list_opened_project_ids + +__all__ = ("list_opened_project_ids",) diff --git a/services/web/server/src/simcore_service_webserver/resource_manager/settings.py b/services/web/server/src/simcore_service_webserver/resource_manager/settings.py index b3b954667ad5..05075787c8bf 100644 --- a/services/web/server/src/simcore_service_webserver/resource_manager/settings.py +++ b/services/web/server/src/simcore_service_webserver/resource_manager/settings.py @@ -1,8 +1,9 @@ from aiohttp import web from pydantic import Field, PositiveInt -from servicelib.aiohttp.application_keys import APP_SETTINGS_KEY from settings_library.base import BaseCustomSettings +from ..constants import APP_SETTINGS_KEY + class ResourceManagerSettings(BaseCustomSettings): diff --git a/services/web/server/src/simcore_service_webserver/resource_manager/user_sessions.py b/services/web/server/src/simcore_service_webserver/resource_manager/user_sessions.py index d78f6e8f56aa..15253d645731 100644 --- a/services/web/server/src/simcore_service_webserver/resource_manager/user_sessions.py +++ b/services/web/server/src/simcore_service_webserver/resource_manager/user_sessions.py @@ -2,18 +2,20 @@ from collections.abc import Iterator from contextlib import contextmanager from dataclasses import dataclass +from functools import cached_property from typing import Final from aiohttp import web -from servicelib.logging_utils import get_log_record_extra, log_context +from common_library.logging.logging_base import get_log_record_extra +from models_library.users import UserID +from servicelib.logging_utils import log_context +from .models import ResourcesDict, UserSession from .registry import ( RedisResourceRegistry, - ResourcesDict, - UserSessionDict, get_registry, ) -from .settings import ResourceManagerSettings, get_plugin_settings +from .settings import get_plugin_settings _logger = logging.getLogger(__name__) @@ -26,16 +28,10 @@ def _get_service_deletion_timeout(app: web.Application) -> int: - settings: ResourceManagerSettings = get_plugin_settings(app) + settings = get_plugin_settings(app) return settings.RESOURCE_MANAGER_RESOURCE_TTL_S -@dataclass(order=True, frozen=True) -class UserSessionID: - user_id: int - client_session_id: str - - @dataclass class UserSessionResourcesRegistry: """ @@ -60,7 +56,7 @@ class UserSessionResourcesRegistry: """ - user_id: int + user_id: UserID client_session_id: str | None # Every tab that a user opens app: web.Application @@ -68,9 +64,10 @@ class UserSessionResourcesRegistry: def _registry(self) -> RedisResourceRegistry: return get_registry(self.app) - def _resource_key(self) -> UserSessionDict: - return UserSessionDict( - user_id=f"{self.user_id}", + @cached_property + def resource_key(self) -> UserSession: + return UserSession( + user_id=self.user_id, client_session_id=self.client_session_id or "*", ) @@ -84,12 +81,12 @@ async def set_socket_id(self, socket_id: str) -> None: ) await self._registry.set_resource( - self._resource_key(), (_SOCKET_ID_FIELDNAME, socket_id) + self.resource_key, (_SOCKET_ID_FIELDNAME, socket_id) ) # NOTE: hearthbeat is not emulated in tests, make sure that with very small GC intervals # the resources do not expire; this value is usually in the order of minutes timeout = max(3, _get_service_deletion_timeout(self.app)) - await self._registry.set_key_alive(self._resource_key(), timeout) + await self._registry.set_key_alive(self.resource_key, expiration_time=timeout) async def get_socket_id(self) -> str | None: _logger.debug( @@ -98,7 +95,7 @@ async def get_socket_id(self) -> str | None: self.client_session_id, ) - resources = await self._registry.get_resources(self._resource_key()) + resources = await self._registry.get_resources(self.resource_key) key: str | None = resources.get("socket_id", None) return key @@ -106,7 +103,7 @@ async def user_pressed_disconnect(self) -> None: """When the user disconnects expire as soon as possible the alive key to ensure garbage collection will trigger in the next 2 cycles.""" - await self._registry.set_key_alive(self._resource_key(), 1) + await self._registry.set_key_alive(self.resource_key, expiration_time=1) async def remove_socket_id(self) -> None: _logger.debug( @@ -116,16 +113,18 @@ async def remove_socket_id(self) -> None: extra=get_log_record_extra(user_id=self.user_id), ) - await self._registry.remove_resource(self._resource_key(), _SOCKET_ID_FIELDNAME) + await self._registry.remove_resource(self.resource_key, _SOCKET_ID_FIELDNAME) await self._registry.set_key_alive( - self._resource_key(), _get_service_deletion_timeout(self.app) + self.resource_key, + expiration_time=_get_service_deletion_timeout(self.app), ) async def set_heartbeat(self) -> None: """Extends TTL to avoid expiration of all resources under this session""" await self._registry.set_key_alive( - self._resource_key(), _get_service_deletion_timeout(self.app) + self.resource_key, + expiration_time=_get_service_deletion_timeout(self.app), ) async def find_socket_ids(self) -> list[str]: @@ -137,11 +136,10 @@ async def find_socket_ids(self) -> list[str]: extra=get_log_record_extra(user_id=self.user_id), ) - user_sockets: list[str] = await self._registry.find_resources( - {"user_id": f"{self.user_id}", "client_session_id": "*"}, + return await self._registry.find_resources( + UserSession(user_id=self.user_id, client_session_id="*"), _SOCKET_ID_FIELDNAME, ) - return user_sockets async def find_all_resources_of_user(self, key: str) -> list[str]: with log_context( @@ -150,10 +148,9 @@ async def find_all_resources_of_user(self, key: str) -> list[str]: msg=f"{self.user_id=} finding all {key} from registry", extra=get_log_record_extra(user_id=self.user_id), ): - resources: list[str] = await get_registry(self.app).find_resources( - {"user_id": f"{self.user_id}", "client_session_id": "*"}, key + return await get_registry(self.app).find_resources( + UserSession(user_id=self.user_id, client_session_id="*"), key ) - return resources async def find(self, resource_name: str) -> list[str]: _logger.debug( @@ -164,10 +161,7 @@ async def find(self, resource_name: str) -> list[str]: extra=get_log_record_extra(user_id=self.user_id), ) - resource_values: list[str] = await self._registry.find_resources( - self._resource_key(), resource_name - ) - return resource_values + return await self._registry.find_resources(self.resource_key, resource_name) async def add(self, key: str, value: str) -> None: _logger.debug( @@ -179,7 +173,7 @@ async def add(self, key: str, value: str) -> None: extra=get_log_record_extra(user_id=self.user_id), ) - await self._registry.set_resource(self._resource_key(), (key, value)) + await self._registry.set_resource(self.resource_key, (key, value)) async def remove(self, key: str) -> None: _logger.debug( @@ -190,43 +184,33 @@ async def remove(self, key: str) -> None: extra=get_log_record_extra(user_id=self.user_id), ) - await self._registry.remove_resource(self._resource_key(), key) + await self._registry.remove_resource(self.resource_key, key) @staticmethod async def find_users_of_resource( app: web.Application, key: str, value: str - ) -> list[UserSessionID]: + ) -> list[UserSession]: registry = get_registry(app) - registry_keys: list[UserSessionDict] = await registry.find_keys( - resource=(key, value) - ) - users_sessions_ids: list[UserSessionID] = [ - UserSessionID( - user_id=int(r["user_id"]), - client_session_id=r["client_session_id"], - ) - for r in registry_keys - ] - return users_sessions_ids + return await registry.find_keys(resource=(key, value)) - def get_id(self) -> UserSessionID: + def get_id(self) -> UserSession: if self.client_session_id is None: msg = f"Cannot build UserSessionID with missing {self.client_session_id=}" raise ValueError(msg) - return UserSessionID( + return UserSession( user_id=self.user_id, client_session_id=self.client_session_id ) @contextmanager def managed_resource( - user_id: str | int, client_session_id: str | None, app: web.Application + user_id: UserID, client_session_id: str | None, app: web.Application ) -> Iterator[UserSessionResourcesRegistry]: try: - registry = UserSessionResourcesRegistry(int(user_id), client_session_id, app) + registry = UserSessionResourcesRegistry(user_id, client_session_id, app) yield registry except Exception: - _logger.exception( + _logger.debug( "Error in web-socket for user:%s, session:%s", user_id, client_session_id, diff --git a/services/web/server/src/simcore_service_webserver/resource_usage/_constants.py b/services/web/server/src/simcore_service_webserver/resource_usage/_constants.py index 64586ad4c494..5b4741478d6c 100644 --- a/services/web/server/src/simcore_service_webserver/resource_usage/_constants.py +++ b/services/web/server/src/simcore_service_webserver/resource_usage/_constants.py @@ -1,9 +1,14 @@ from typing import Final +from common_library.user_messages import user_message + APP_RABBITMQ_CONSUMERS_KEY: Final[str] = f"{__name__}.rabbit_consumers" -MSG_RESOURCE_USAGE_TRACKER_SERVICE_UNAVAILABLE: Final[ - str -] = "Currently resource usage tracker service is unavailable, please try again later" +MSG_RESOURCE_USAGE_TRACKER_SERVICE_UNAVAILABLE: Final[str] = user_message( + "The resource usage tracking service is temporarily unavailable. Please try again in a few moments.", + _version=1, +) -MSG_RESOURCE_USAGE_TRACKER_NOT_FOUND: Final[str] = "Not Found" +MSG_RESOURCE_USAGE_TRACKER_NOT_FOUND: Final[str] = user_message( + "The requested resource usage information could not be found.", _version=1 +) diff --git a/services/web/server/src/simcore_service_webserver/resource_usage/_observer.py b/services/web/server/src/simcore_service_webserver/resource_usage/_observer.py index 114bc1df2982..d205fc43f668 100644 --- a/services/web/server/src/simcore_service_webserver/resource_usage/_observer.py +++ b/services/web/server/src/simcore_service_webserver/resource_usage/_observer.py @@ -4,6 +4,7 @@ from aiohttp import web from models_library.products import ProductName +from models_library.users import UserID from servicelib.aiohttp.observer import ( registed_observers_report, register_observer, @@ -18,7 +19,7 @@ async def _on_user_disconnected( - user_id: int, + user_id: UserID, client_session_id: str, app: web.Application, product_name: ProductName, @@ -38,8 +39,12 @@ async def _on_user_disconnected( async def _on_user_connected( - user_id: int, app: web.Application, product_name: str + user_id: UserID, + app: web.Application, + product_name: ProductName, + client_session_id: str, ) -> None: + assert client_session_id # nosec # Get all user wallets and subscribe user_wallet = await wallets_service.list_wallets_for_user( app, user_id=user_id, product_name=product_name diff --git a/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_admin_rest.py b/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_admin_rest.py index 63442df2ce45..1f847691768d 100644 --- a/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_admin_rest.py +++ b/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_admin_rest.py @@ -40,7 +40,7 @@ from .._meta import API_VTAG as VTAG from ..login.decorators import login_required -from ..models import RequestContext +from ..models import AuthenticatedRequestContext from ..security.decorators import permission_required from ..utils_aiohttp import envelope_json_response from . import _pricing_plans_admin_service as pricing_plans_admin_service @@ -86,7 +86,7 @@ async def wrapper(request: web.Request) -> web.StreamResponse: @permission_required("resource-usage.write") @_handle_pricing_plan_admin_exceptions async def list_pricing_plans_for_admin_user(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) query_params: PageQueryParameters = parse_request_query_parameters_as( PageQueryParameters, request ) @@ -169,7 +169,7 @@ def pricing_plan_get_to_admin( @permission_required("resource-usage.write") @_handle_pricing_plan_admin_exceptions async def get_pricing_plan_for_admin_user(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(PricingPlanGetPathParams, request) pricing_plan_get = await pricing_plans_admin_service.get_pricing_plan( @@ -190,7 +190,7 @@ async def get_pricing_plan_for_admin_user(request: web.Request): @permission_required("resource-usage.write") @_handle_pricing_plan_admin_exceptions async def create_pricing_plan(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) body_params = await parse_request_body_as(CreatePricingPlanBodyParams, request) _data = PricingPlanCreate( @@ -217,7 +217,7 @@ async def create_pricing_plan(request: web.Request): @permission_required("resource-usage.write") @_handle_pricing_plan_admin_exceptions async def update_pricing_plan(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(PricingPlanGetPathParams, request) body_params = await parse_request_body_as(UpdatePricingPlanBodyParams, request) @@ -254,7 +254,7 @@ class PricingUnitGetPathParams(BaseModel): @permission_required("resource-usage.write") @_handle_pricing_plan_admin_exceptions async def get_pricing_unit(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(PricingUnitGetPathParams, request) pricing_unit_get = await pricing_plans_admin_service.get_pricing_unit( @@ -284,7 +284,7 @@ async def get_pricing_unit(request: web.Request): @permission_required("resource-usage.write") @_handle_pricing_plan_admin_exceptions async def create_pricing_unit(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(PricingPlanGetPathParams, request) body_params = await parse_request_body_as(CreatePricingUnitBodyParams, request) @@ -323,7 +323,7 @@ async def create_pricing_unit(request: web.Request): @permission_required("resource-usage.write") @_handle_pricing_plan_admin_exceptions async def update_pricing_unit(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(PricingUnitGetPathParams, request) body_params = await parse_request_body_as(UpdatePricingUnitBodyParams, request) @@ -365,7 +365,7 @@ async def update_pricing_unit(request: web.Request): @permission_required("resource-usage.write") @_handle_pricing_plan_admin_exceptions async def list_connected_services_to_pricing_plan(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(PricingPlanGetPathParams, request) connected_services_list = ( @@ -396,7 +396,7 @@ async def list_connected_services_to_pricing_plan(request: web.Request): @permission_required("resource-usage.write") @_handle_pricing_plan_admin_exceptions async def connect_service_to_pricing_plan(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(PricingPlanGetPathParams, request) body_params = await parse_request_body_as( ConnectServiceToPricingPlanBodyParams, request diff --git a/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_rest.py b/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_rest.py index 1f4497e07cc9..db5ffdf38f46 100644 --- a/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_rest.py +++ b/services/web/server/src/simcore_service_webserver/resource_usage/_pricing_plans_rest.py @@ -19,7 +19,7 @@ from .._meta import API_VTAG as VTAG from ..login.decorators import login_required -from ..models import RequestContext +from ..models import AuthenticatedRequestContext from ..security.decorators import permission_required from ..utils_aiohttp import envelope_json_response from ..wallets.errors import WalletAccessForbiddenError @@ -60,7 +60,7 @@ class PricingPlanUnitGetPathParams(StrictRequestParameters): @permission_required("resource-usage.read") @_handle_resource_usage_exceptions async def get_pricing_plan_unit(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as( PricingPlanUnitGetPathParams, request ) @@ -91,7 +91,7 @@ async def get_pricing_plan_unit(request: web.Request): @permission_required("resource-usage.read") @_handle_resource_usage_exceptions async def list_pricing_plans(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) query_params: PageQueryParameters = parse_request_query_parameters_as( PageQueryParameters, request ) @@ -142,7 +142,7 @@ async def list_pricing_plans(request: web.Request): @permission_required("resource-usage.read") @_handle_resource_usage_exceptions async def get_pricing_plan(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(PricingPlanGetPathParams, request) pricing_plan_get = await pricing_plans_admin_service.get_pricing_plan( diff --git a/services/web/server/src/simcore_service_webserver/resource_usage/_service_runs_rest.py b/services/web/server/src/simcore_service_webserver/resource_usage/_service_runs_rest.py index 68cf2c6a946a..0e681f2d89e6 100644 --- a/services/web/server/src/simcore_service_webserver/resource_usage/_service_runs_rest.py +++ b/services/web/server/src/simcore_service_webserver/resource_usage/_service_runs_rest.py @@ -30,7 +30,7 @@ from .._meta import API_VTAG as VTAG from ..login.decorators import login_required -from ..models import RequestContext +from ..models import AuthenticatedRequestContext from ..security.decorators import permission_required from ..wallets.errors import WalletAccessForbiddenError from . import _service_runs_service as api @@ -123,7 +123,7 @@ class ServicesAggregatedUsagesListQueryParams(PageQueryParameters): @permission_required("resource-usage.read") @_handle_resource_usage_exceptions async def list_resource_usage_services(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) query_params: ServicesResourceUsagesListQueryParams = ( parse_request_query_parameters_as( ServicesResourceUsagesListQueryParams, request @@ -166,7 +166,7 @@ async def list_resource_usage_services(request: web.Request): @permission_required("resource-usage.read") @_handle_resource_usage_exceptions async def list_osparc_credits_aggregated_usages(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) query_params: ServicesAggregatedUsagesListQueryParams = ( parse_request_query_parameters_as( ServicesAggregatedUsagesListQueryParams, request @@ -206,7 +206,7 @@ async def list_osparc_credits_aggregated_usages(request: web.Request): @permission_required("resource-usage.read") @_handle_resource_usage_exceptions async def export_resource_usage_services(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) query_params: ServicesResourceUsagesReportQueryParams = ( parse_request_query_parameters_as( ServicesResourceUsagesReportQueryParams, request diff --git a/services/web/server/src/simcore_service_webserver/resource_usage/_utils.py b/services/web/server/src/simcore_service_webserver/resource_usage/_utils.py index 38c1f038b179..1476a16faf05 100644 --- a/services/web/server/src/simcore_service_webserver/resource_usage/_utils.py +++ b/services/web/server/src/simcore_service_webserver/resource_usage/_utils.py @@ -25,11 +25,11 @@ def handle_client_exceptions(app: web.Application) -> Iterator[ClientSession]: if err.status == status.HTTP_404_NOT_FOUND: raise web.HTTPNotFound(text=MSG_RESOURCE_USAGE_TRACKER_NOT_FOUND) raise web.HTTPServiceUnavailable( - reason=MSG_RESOURCE_USAGE_TRACKER_SERVICE_UNAVAILABLE + text=MSG_RESOURCE_USAGE_TRACKER_SERVICE_UNAVAILABLE ) from err except (TimeoutError, ClientConnectionError) as err: _logger.debug("Request to resource usage tracker service failed: %s", err) raise web.HTTPServiceUnavailable( - reason=MSG_RESOURCE_USAGE_TRACKER_SERVICE_UNAVAILABLE + text=MSG_RESOURCE_USAGE_TRACKER_SERVICE_UNAVAILABLE ) from err diff --git a/services/web/server/src/simcore_service_webserver/resource_usage/plugin.py b/services/web/server/src/simcore_service_webserver/resource_usage/plugin.py index 32fcae9266ee..75c25c7158b8 100644 --- a/services/web/server/src/simcore_service_webserver/resource_usage/plugin.py +++ b/services/web/server/src/simcore_service_webserver/resource_usage/plugin.py @@ -1,12 +1,11 @@ -""" Resource tracking service +"""Resource tracking service""" -""" import logging from aiohttp import web -from servicelib.aiohttp.application_keys import APP_SETTINGS_KEY -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from ..application_setup import ModuleCategory, app_setup_func +from ..constants import APP_SETTINGS_KEY from ..rabbitmq import setup_rabbitmq from ..wallets.plugin import setup_wallets from . import _pricing_plans_admin_rest, _pricing_plans_rest, _service_runs_rest @@ -15,7 +14,7 @@ _logger = logging.getLogger(__name__) -@app_module_setup( +@app_setup_func( __name__, ModuleCategory.ADDON, settings_name="WEBSERVER_RESOURCE_USAGE_TRACKER", diff --git a/services/web/server/src/simcore_service_webserver/rest/_handlers.py b/services/web/server/src/simcore_service_webserver/rest/_handlers.py index 5425d7341e40..721b9a651007 100644 --- a/services/web/server/src/simcore_service_webserver/rest/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/rest/_handlers.py @@ -1,7 +1,4 @@ -""" Basic healthckeck and configuration handles to the rest API - - -""" +"""Basic healthckeck and configuration handles to the rest API""" import datetime import logging @@ -40,7 +37,7 @@ async def healthcheck_liveness_probe(request: web.Request): health_report = await healthcheck.run(request.app) except HealthCheckError as err: _logger.warning("%s", err) - raise web.HTTPServiceUnavailable(reason="unhealthy") from err + raise web.HTTPServiceUnavailable(text="unhealthy") from err return web.json_response(data={"data": health_report}) diff --git a/services/web/server/src/simcore_service_webserver/rest/plugin.py b/services/web/server/src/simcore_service_webserver/rest/plugin.py index 9d23181a814e..7b3c4788b954 100644 --- a/services/web/server/src/simcore_service_webserver/rest/plugin.py +++ b/services/web/server/src/simcore_service_webserver/rest/plugin.py @@ -1,15 +1,14 @@ -""" Restful API +"""Restful API - - Loads and validates openapi specifications (oas) - - Adds check and diagnostic routes - - Activates middlewares +- Loads and validates openapi specifications (oas) +- Adds check and diagnostic routes +- Activates middlewares """ import logging from aiohttp import web -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup from servicelib.aiohttp.rest_middlewares import ( envelope_middleware_factory, error_middleware_factory, @@ -17,6 +16,7 @@ from swagger_ui import api_doc # type: ignore from .._meta import API_VTAG +from ..application_setup import ModuleCategory, app_setup_func from ..security.plugin import setup_security from . import _handlers from ._utils import get_openapi_specs_path @@ -26,7 +26,7 @@ _logger = logging.getLogger(__name__) -@app_module_setup( +@app_setup_func( "simcore_service_webserver.rest", ModuleCategory.ADDON, settings_name="WEBSERVER_REST", diff --git a/services/web/server/src/simcore_service_webserver/scicrunch/db.py b/services/web/server/src/simcore_service_webserver/scicrunch/db.py index 57e19bbed357..c99c82f2aed7 100644 --- a/services/web/server/src/simcore_service_webserver/scicrunch/db.py +++ b/services/web/server/src/simcore_service_webserver/scicrunch/db.py @@ -1,5 +1,5 @@ """ - Access to postgres database scicrunch_resources table where USED rrids get stored +Access to postgres database scicrunch_resources table where USED rrids get stored """ import logging @@ -10,7 +10,7 @@ from simcore_postgres_database.models.scicrunch_resources import scicrunch_resources from sqlalchemy.dialects.postgresql import insert as sa_pg_insert -from ..db.plugin import get_database_engine +from ..db.plugin import get_database_engine_legacy from .models import ResearchResource, ResearchResourceAtdB logger = logging.getLogger(__name__) @@ -26,7 +26,7 @@ class ResearchResourceRepository: # WARNING: interfaces to both ResarchResource and ResearchResourceAtDB def __init__(self, app: web.Application): - self._engine = get_database_engine(app) + self._engine = get_database_engine_legacy(app) async def list_resources(self) -> list[ResearchResource]: async with self._engine.acquire() as conn: @@ -39,7 +39,9 @@ async def list_resources(self) -> list[ResearchResource]: ) res: ResultProxy = await conn.execute(stmt) rows: list[RowProxy] = await res.fetchall() - return [ResearchResource.model_validate(row) for row in rows] if rows else [] + return ( + [ResearchResource.model_validate(row) for row in rows] if rows else [] + ) async def get(self, rrid: str) -> ResearchResourceAtdB | None: async with self._engine.acquire() as conn: diff --git a/services/web/server/src/simcore_service_webserver/scicrunch/errors.py b/services/web/server/src/simcore_service_webserver/scicrunch/errors.py index c8e88ff2654b..95a071c03bd3 100644 --- a/services/web/server/src/simcore_service_webserver/scicrunch/errors.py +++ b/services/web/server/src/simcore_service_webserver/scicrunch/errors.py @@ -9,7 +9,7 @@ class ScicrunchError(WebServerBaseError): - msg_template = "{reason}" + msg_template = "{details}" class ScicrunchServiceError(ScicrunchError): @@ -47,26 +47,31 @@ def map_to_scicrunch_error(rrid: str, error_code: int, message: str) -> Scicrunc <= status.HTTP_511_NETWORK_AUTHENTICATION_REQUIRED ), error_code # nosec - custom_error = ScicrunchError(reason="Unexpected error in scicrunch.org") + custom_error = ScicrunchError( + details="Unexpected error in scicrunch.org", original_message=message + ) if error_code == web_exceptions.HTTPBadRequest.status_code: - custom_error = InvalidRRIDError(rrid=rrid) + custom_error = InvalidRRIDError(rrid=rrid, original_message=message) elif error_code == web_exceptions.HTTPNotFound.status_code: - custom_error = InvalidRRIDError(msg_template=f"Did not find any '{rrid}'") + custom_error = InvalidRRIDError( + msg_template=f"Did not find any '{rrid}'", original_message=message + ) elif error_code == web_exceptions.HTTPUnauthorized.status_code: custom_error = ScicrunchConfigError( - reason="osparc was not authorized to access scicrunch.org." - "Please check API access tokens." + details="osparc was not authorized to access scicrunch.org." + "Please check API access tokens.", + original_message=message, ) elif ( error_code >= status.HTTP_500_INTERNAL_SERVER_ERROR ): # scicrunch.org server error custom_error = ScicrunchServiceError( - reason="scicrunch.org cannot perform our requests" + details="scicrunch.org cannot perform our requests", + original_message=message, ) - _logger.error("%s: %s", custom_error, message) return custom_error diff --git a/services/web/server/src/simcore_service_webserver/scicrunch/plugin.py b/services/web/server/src/simcore_service_webserver/scicrunch/plugin.py index e23631bdb27d..ef82e673c30d 100644 --- a/services/web/server/src/simcore_service_webserver/scicrunch/plugin.py +++ b/services/web/server/src/simcore_service_webserver/scicrunch/plugin.py @@ -1,11 +1,12 @@ """ - Notice that this is used as a submodule of groups'a app module +Notice that this is used as a submodule of groups'a app module """ + import logging from aiohttp import web -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from ..application_setup import ModuleCategory, app_setup_func from .service_client import SciCrunch from .settings import get_plugin_settings @@ -18,7 +19,7 @@ async def _on_startup(app: web.Application): assert api == SciCrunch.get_instance(app) # nosec -@app_module_setup( +@app_setup_func( "simcore_service_webserver.scicrunch", ModuleCategory.ADDON, settings_name="WEBSERVER_SCICRUNCH", diff --git a/services/web/server/src/simcore_service_webserver/scicrunch/service_client.py b/services/web/server/src/simcore_service_webserver/scicrunch/service_client.py index ec8f43283b3f..c70454f4c62f 100644 --- a/services/web/server/src/simcore_service_webserver/scicrunch/service_client.py +++ b/services/web/server/src/simcore_service_webserver/scicrunch/service_client.py @@ -1,10 +1,9 @@ """ - Client to interact with scicrunch service (https://scicrunch.org) - - both with REST API and resolver API +Client to interact with scicrunch service (https://scicrunch.org) + - both with REST API and resolver API """ -import asyncio import logging from aiohttp import ClientSession, client_exceptions, web @@ -72,7 +71,7 @@ def get_instance(cls, app: web.Application) -> "SciCrunch": obj: SciCrunch | None = app.get(f"{__name__}.{cls.__name__}") if obj is None: raise ScicrunchConfigError( - reason="Services on scicrunch.org are currently disabled" + details="Services on scicrunch.org are currently disabled" ) return obj @@ -153,17 +152,17 @@ async def get_resource_fields(self, rrid: str) -> ResearchResource: except (ValidationError, client_exceptions.InvalidURL) as err: raise ScicrunchAPIError( - reason="scicrunch API response unexpectedly changed" + details="scicrunch API response unexpectedly changed" ) from err except ( client_exceptions.ClientConnectionError, client_exceptions.ClientPayloadError, - asyncio.TimeoutError, + TimeoutError, ) as err: # https://docs.aiohttp.org/en/stable/client_reference.html#hierarchy-of-exceptions raise ScicrunchServiceError( - reason="Failed to connect scicrunch service" + details="Failed to connect scicrunch service" ) from err async def search_resource(self, name_as: str) -> list[ResourceHit]: diff --git a/services/web/server/src/simcore_service_webserver/security/_authz_access_model.py b/services/web/server/src/simcore_service_webserver/security/_authz_access_model.py index 057d68401228..b17425698984 100644 --- a/services/web/server/src/simcore_service_webserver/security/_authz_access_model.py +++ b/services/web/server/src/simcore_service_webserver/security/_authz_access_model.py @@ -1,17 +1,16 @@ -""" hierarchical role-based access control (HRBAC) +"""hierarchical role-based access control (HRBAC) - References: - https://b_logger.nodeswat.com/implement-access-control-in-node-js-8567e7b484d1 +References: + https://b_logger.nodeswat.com/implement-access-control-in-node-js-8567e7b484d1 """ -import inspect import logging -import re -from collections.abc import Callable +from collections.abc import Awaitable, Callable from dataclasses import dataclass, field from typing import TypeAlias, TypedDict +from common_library.async_tools import maybe_await from models_library.products import ProductName from models_library.users import UserID @@ -27,16 +26,32 @@ class AuthContextDict(TypedDict, total=False): OptionalContext: TypeAlias = AuthContextDict | dict | None +CheckFunction: TypeAlias = ( + # Type for check functions that can be either sync or async + Callable[[OptionalContext], bool] + | Callable[[OptionalContext], Awaitable[bool]] +) + @dataclass class _RolePermissions: role: UserRole - # named permissions allowed - allowed: list[str] = field(default_factory=list) - # checked permissions: permissions with conditions - check: dict[str, Callable[[OptionalContext], bool]] = field(default_factory=dict) - # inherited permission - inherits: list[UserRole] = field(default_factory=list) + + allowed: list[str] = field( + default_factory=list, metadata={"description": "list of allowed operations"} + ) + check: dict[str, CheckFunction] = field( + default_factory=dict, + metadata={ + "description": "checked permissions: dict of operations with conditions" + }, + ) + inherits: list[UserRole] = field( + default_factory=list, + metadata={ + "description": "list of parent roles that inherit permissions from this role" + }, + ) @classmethod def from_rawdata(cls, role: str | UserRole, value: dict) -> "_RolePermissions": @@ -99,12 +114,7 @@ async def can( if operation in role_access.check: check = role_access.check[operation] try: - ok: bool - if inspect.iscoroutinefunction(check): - ok = await check(context) - else: - ok = check(context) - return ok + return await maybe_await(check(context)) except Exception: # pylint: disable=broad-except _logger.debug( @@ -132,32 +142,10 @@ def from_rawdata(cls, raw: dict): return RoleBasedAccessModel(roles) -_OPERATORS_REGEX_PATTERN = re.compile(r"(&|\||\bAND\b|\bOR\b)") - - async def has_access_by_role( model: RoleBasedAccessModel, role: UserRole, - operations: str, + operation: str, context: OptionalContext = None, ) -> bool: - """Extends `RoleBasedAccessModel.can` to check access to boolean expressions of operations - - Returns True if a user with a role has permission on a given context - """ - tokens = _OPERATORS_REGEX_PATTERN.split(operations) - if len(tokens) == 1: - return await model.can(role, tokens[0], context) - - if len(tokens) == 3: - tokens = [t.strip() for t in tokens if t.strip() != ""] - lhs, op, rhs = tokens - can_lhs = await model.can(role, lhs, context) - if op in ["AND", "&"]: - if can_lhs: - return await model.can(role, rhs, context) - return False - return can_lhs or (await model.can(role, rhs, context)) - - msg = f"Invalid expression '{operations}': only supports at most two operands" - raise NotImplementedError(msg) + return await model.can(role=role, operation=operation, context=context) diff --git a/services/web/server/src/simcore_service_webserver/security/_authz_access_roles.py b/services/web/server/src/simcore_service_webserver/security/_authz_access_roles.py index 0edd5b2a10cd..7c8c9e0e4378 100644 --- a/services/web/server/src/simcore_service_webserver/security/_authz_access_roles.py +++ b/services/web/server/src/simcore_service_webserver/security/_authz_access_roles.py @@ -78,6 +78,7 @@ class PermissionDict(TypedDict, total=False): "project.workspaces.*", "function.create", "function.read", + "function.update", "function.execute", "function.delete", "resource-usage.read", @@ -106,10 +107,10 @@ class PermissionDict(TypedDict, total=False): UserRole.PRODUCT_OWNER: PermissionDict( # NOTE: Add `tags=["po"]` to entrypoints with this access requirements can=[ - "product.details.*", - "product.invitations.create", "admin.users.read", "admin.users.write", + "product.details.*", + "product.invitations.create", ], inherits=[UserRole.TESTER], ), @@ -119,7 +120,7 @@ class PermissionDict(TypedDict, total=False): "resource-usage.write", "storage.files.sync", ], - inherits=[UserRole.TESTER], + inherits=[UserRole.PRODUCT_OWNER], ), } @@ -127,3 +128,14 @@ class PermissionDict(TypedDict, total=False): assert set(ROLES_PERMISSIONS) == set( # nosec UserRole ), "All user roles must be part define permissions" # nosec + + +# Group-based permissions for named groups (e.g. PRODUCT_SUPPORT_GROUP) +# Maps group type to list of permissions that group members can perform +NAMED_GROUP_PERMISSIONS: dict[str, list[str]] = { + "PRODUCT_SUPPORT_GROUP": [ + "product.details.*", + "admin.users.read", + ], + # NOTE: Future group types can be added here +} diff --git a/services/web/server/src/simcore_service_webserver/security/_authz_policy.py b/services/web/server/src/simcore_service_webserver/security/_authz_policy.py index 89b84859d399..20f5705a6bd0 100644 --- a/services/web/server/src/simcore_service_webserver/security/_authz_policy.py +++ b/services/web/server/src/simcore_service_webserver/security/_authz_policy.py @@ -10,34 +10,53 @@ from aiohttp_security.abc import ( # type: ignore[import-untyped] AbstractAuthorizationPolicy, ) +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs +from common_library.users_enums import UserRole from models_library.products import ProductName from models_library.users import UserID -from simcore_postgres_database.aiopg_errors import DatabaseError +from servicelib.aiohttp.db_asyncpg_engine import get_async_engine +from simcore_postgres_database.aiopg_errors import DatabaseError as AiopgDatabaseError +from sqlalchemy.exc import DatabaseError as SQLAlchemyDatabaseError -from ..db.plugin import get_database_engine +from . import _authz_repository from ._authz_access_model import ( AuthContextDict, OptionalContext, RoleBasedAccessModel, has_access_by_role, ) -from ._authz_db import AuthInfoDict, get_active_user_or_none, is_user_in_product_name +from ._authz_access_roles import NAMED_GROUP_PERMISSIONS +from ._authz_repository import ActiveUserIdAndRole from ._constants import MSG_AUTH_NOT_AVAILABLE, PERMISSION_PRODUCT_LOGIN_KEY -from ._identity_api import IdentityStr +from ._identity_web import IdentityStr _logger = logging.getLogger(__name__) -# Keeps a cache during bursts to avoid stress on the database _SECOND = 1 # in seconds -_AUTHZ_BURST_CACHE_TTL: Final = 5 * _SECOND +_MINUTE: Final = 60 * _SECOND +_AUTHZ_BURST_CACHE_TTL: Final = ( + # WARNING: TLL=0 means it never expires + # Rationale: + # a user's access to a product does not change that frequently + # Keeps a cache during bursts to avoid stress on the database + 30 + * _MINUTE +) @contextlib.contextmanager def _handle_exceptions_as_503(): try: yield - except DatabaseError as err: - _logger.exception("Auth unavailable due to database error") + except (AiopgDatabaseError, SQLAlchemyDatabaseError) as err: + _logger.exception( + **create_troubleshooting_log_kwargs( + "Auth unavailable due to database error", + error=err, + tip="Check database connection", + ) + ) + raise web.HTTPServiceUnavailable(text=MSG_AUTH_NOT_AVAILABLE) from err @@ -51,13 +70,17 @@ def __init__(self, app: web.Application, access_model: RoleBasedAccessModel): namespace=__name__, key_builder=lambda f, *ag, **kw: f"{f.__name__}/{kw['email']}", ) - async def _get_auth_or_none(self, *, email: str) -> AuthInfoDict | None: + async def _get_authorized_user_or_none( + self, *, email: str + ) -> ActiveUserIdAndRole | None: """ Raises: web.HTTPServiceUnavailable: if database raises an exception """ with _handle_exceptions_as_503(): - return await get_active_user_or_none(get_database_engine(self._app), email) + return await _authz_repository.get_active_user_or_none( + get_async_engine(self._app), email=email + ) @cached( ttl=_AUTHZ_BURST_CACHE_TTL, @@ -72,8 +95,23 @@ async def _has_access_to_product( web.HTTPServiceUnavailable: if database raises an exception """ with _handle_exceptions_as_503(): - return await is_user_in_product_name( - get_database_engine(self._app), user_id, product_name + return await _authz_repository.is_user_in_product_name( + get_async_engine(self._app), user_id=user_id, product_name=product_name + ) + + @cached( + ttl=_AUTHZ_BURST_CACHE_TTL, + namespace=__name__, + key_builder=lambda f, *ag, **kw: f"{f.__name__}/{kw['user_id']}/{kw['group_id']}", + ) + async def _is_user_in_group(self, *, user_id: UserID, group_id: int) -> bool: + """ + Raises: + web.HTTPServiceUnavailable: if database raises an exception + """ + with _handle_exceptions_as_503(): + return await _authz_repository.is_user_in_group( + get_async_engine(self._app), user_id=user_id, group_id=group_id ) @property @@ -82,7 +120,11 @@ def access_model(self) -> RoleBasedAccessModel: async def clear_cache(self): # pylint: disable=no-member - for fun in (self._get_auth_or_none, self._has_access_to_product): + for fun in ( + self._get_authorized_user_or_none, + self._has_access_to_product, + self._is_user_in_group, + ): autz_cache: BaseCache = fun.cache await autz_cache.clear() @@ -96,7 +138,9 @@ async def authorized_userid(self, identity: IdentityStr) -> int | None: Return the user_id of the user identified by the identity or "None" if no user exists related to the identity. """ - user_info: AuthInfoDict | None = await self._get_auth_or_none(email=identity) + user_info: ActiveUserIdAndRole | None = await self._get_authorized_user_or_none( + email=identity + ) if user_info is None: return None @@ -117,31 +161,51 @@ async def permits( :return: True if user has permission to execute this operation within the given context """ if identity is None or permission is None: - _logger.debug( - "Invalid %s of %s. Denying access.", - f"{identity=}", - f"{permission=}", - ) return False - auth_info = await self._get_auth_or_none(email=identity) - if auth_info is None: + # authorized user info + authorized_user_info = await self._get_authorized_user_or_none(email=identity) + if authorized_user_info is None: return False + user_id = authorized_user_info["id"] + user_role = authorized_user_info["role"] + + # context info: product_name context = context or AuthContextDict() + product_name = context.get("product_name") + + assert user_id == context.get( # nosec + "authorized_uid" + ), f"{user_id}!={context.get('authorized_uid')}" - # product access + # PRODUCT access if permission == PERMISSION_PRODUCT_LOGIN_KEY: - product_name = context.get("product_name") ok: bool = product_name is not None and await self._has_access_to_product( - user_id=auth_info["id"], product_name=product_name + user_id=user_id, product_name=product_name ) return ok - # role-based access - return await has_access_by_role( + # ROLE-BASED access policy + role_allowed = await has_access_by_role( self._access_model, - role=auth_info["role"], - operations=permission, + role=user_role, + operation=permission, context=context, ) + + if role_allowed: + return True + + # GROUP-BASED access policy (only if enabled in context and user is above GUEST role) + product_support_group_id = context.get("product_support_group_id", None) + group_allowed = ( + product_support_group_id is not None + and user_role > UserRole.GUEST + and permission in NAMED_GROUP_PERMISSIONS.get("PRODUCT_SUPPORT_GROUP", []) + and await self._is_user_in_group( + user_id=user_id, group_id=product_support_group_id + ) + ) + + return group_allowed # noqa: RET504 diff --git a/services/web/server/src/simcore_service_webserver/security/_authz_db.py b/services/web/server/src/simcore_service_webserver/security/_authz_repository.py similarity index 54% rename from services/web/server/src/simcore_service_webserver/security/_authz_db.py rename to services/web/server/src/simcore_service_webserver/security/_authz_repository.py index 300130b6f822..2f58de8328ea 100644 --- a/services/web/server/src/simcore_service_webserver/security/_authz_db.py +++ b/services/web/server/src/simcore_service_webserver/security/_authz_repository.py @@ -2,53 +2,58 @@ from typing import TypedDict import sqlalchemy as sa -from aiopg.sa import Engine -from aiopg.sa.result import ResultProxy from models_library.basic_types import IdInt +from models_library.groups import GroupID from models_library.products import ProductName from models_library.users import UserID from pydantic import TypeAdapter from simcore_postgres_database.models.groups import user_to_groups from simcore_postgres_database.models.products import products from simcore_postgres_database.models.users import UserRole +from sqlalchemy.ext.asyncio import AsyncEngine from ..db.models import UserStatus, users _logger = logging.getLogger(__name__) -class AuthInfoDict(TypedDict, total=True): +class ActiveUserIdAndRole(TypedDict, total=True): id: IdInt role: UserRole -async def get_active_user_or_none(engine: Engine, email: str) -> AuthInfoDict | None: +async def get_active_user_or_none( + engine: AsyncEngine, *, email: str +) -> ActiveUserIdAndRole | None: """Gets a user with email if ACTIVE othewise return None Raises: - DatabaseError: unexpected errors found in https://github.com/ITISFoundation/osparc-simcore/issues/880 and https://github.com/ITISFoundation/osparc-simcore/pull/1160 + DatabaseError: unexpected errors found in + https://github.com/ITISFoundation/osparc-simcore/issues/880 and + https://github.com/ITISFoundation/osparc-simcore/pull/1160 """ - async with engine.acquire() as conn: - result: ResultProxy = await conn.execute( + async with engine.connect() as conn: + result = await conn.execute( sa.select(users.c.id, users.c.role).where( (users.c.email == email) & (users.c.status == UserStatus.ACTIVE) ) ) - row = await result.fetchone() - assert ( - row is None or TypeAdapter(IdInt).validate_python(row.id) is not None # nosec + row = result.one_or_none() + + assert ( # nosec + row is None or TypeAdapter(IdInt).validate_python(row.id) is not None ) - assert ( - row is None or TypeAdapter(UserRole).validate_python(row.role) is not None # nosec + assert ( # nosec + row is None or TypeAdapter(UserRole).validate_python(row.role) is not None ) - return AuthInfoDict(id=row.id, role=row.role) if row else None + return ActiveUserIdAndRole(id=row.id, role=row.role) if row else None async def is_user_in_product_name( - engine: Engine, user_id: UserID, product_name: ProductName + engine: AsyncEngine, *, user_id: UserID, product_name: ProductName ) -> bool: - async with engine.acquire() as conn: + async with engine.connect() as conn: return ( await conn.scalar( sa.select(users.c.id) @@ -61,3 +66,18 @@ async def is_user_in_product_name( ) is not None ) + + +async def is_user_in_group( + engine: AsyncEngine, + *, + user_id: UserID, + group_id: GroupID, +) -> bool: + async with engine.connect() as conn: + result = await conn.scalar( + sa.select(user_to_groups.c.uid).where( + (user_to_groups.c.uid == user_id) & (user_to_groups.c.gid == group_id) + ) + ) + return result is not None diff --git a/services/web/server/src/simcore_service_webserver/security/_authz_service.py b/services/web/server/src/simcore_service_webserver/security/_authz_service.py new file mode 100644 index 000000000000..9a0a139e1dde --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/security/_authz_service.py @@ -0,0 +1,37 @@ +# mypy: disable-error-code=truthy-function + + +import aiohttp_security.api # type: ignore[import-untyped] +import passlib.hash +from aiohttp import web + +from ._authz_access_model import RoleBasedAccessModel +from ._authz_policy import AuthorizationPolicy +from ._constants import PERMISSION_PRODUCT_LOGIN_KEY + +assert PERMISSION_PRODUCT_LOGIN_KEY # nosec + + +def get_access_model(app: web.Application) -> RoleBasedAccessModel: + autz_policy: AuthorizationPolicy = app[aiohttp_security.api.AUTZ_KEY] + return autz_policy.access_model + + +async def clean_auth_policy_cache(app: web.Application) -> None: + autz_policy: AuthorizationPolicy = app[aiohttp_security.api.AUTZ_KEY] + await autz_policy.clear_cache() + + +# +# utils (i.e. independent from setup) +# + + +def encrypt_password(password: str) -> str: + hashed: str = passlib.hash.sha256_crypt.using(rounds=1000).hash(password) + return hashed + + +def check_password(password: str, password_hash: str) -> bool: + is_valid: bool = passlib.hash.sha256_crypt.verify(password, password_hash) + return is_valid diff --git a/services/web/server/src/simcore_service_webserver/security/api.py b/services/web/server/src/simcore_service_webserver/security/_authz_web.py similarity index 51% rename from services/web/server/src/simcore_service_webserver/security/api.py rename to services/web/server/src/simcore_service_webserver/security/_authz_web.py index 7ebb722a7519..8f9f1042ef18 100644 --- a/services/web/server/src/simcore_service_webserver/security/api.py +++ b/services/web/server/src/simcore_service_webserver/security/_authz_web.py @@ -1,34 +1,16 @@ # mypy: disable-error-code=truthy-function -""" API for security subsystem. - - -NOTE: DO NOT USE aiohttp_security.api directly but use this interface instead -""" import aiohttp_security.api # type: ignore[import-untyped] -import passlib.hash from aiohttp import web from models_library.users import UserID -from ._authz_access_model import AuthContextDict, OptionalContext, RoleBasedAccessModel -from ._authz_policy import AuthorizationPolicy -from ._constants import PERMISSION_PRODUCT_LOGIN_KEY -from ._identity_api import forget_identity, remember_identity +from ._authz_access_model import AuthContextDict, OptionalContext +from ._constants import MSG_UNAUTHORIZED, PERMISSION_PRODUCT_LOGIN_KEY assert PERMISSION_PRODUCT_LOGIN_KEY # nosec -def get_access_model(app: web.Application) -> RoleBasedAccessModel: - autz_policy: AuthorizationPolicy = app[aiohttp_security.api.AUTZ_KEY] - return autz_policy.access_model - - -async def clean_auth_policy_cache(app: web.Application) -> None: - autz_policy: AuthorizationPolicy = app[aiohttp_security.api.AUTZ_KEY] - await autz_policy.clear_cache() - - async def is_anonymous(request: web.Request) -> bool: """ User is considered anonymous if there is not verified identity in request. @@ -46,7 +28,7 @@ async def check_user_authorized(request: web.Request) -> UserID: # NOTE: Same as aiohttp_security.api.check_authorized user_id: UserID | None = await aiohttp_security.api.authorized_userid(request) if user_id is None: - raise web.HTTPUnauthorized + raise web.HTTPUnauthorized(text=MSG_UNAUTHORIZED) return user_id @@ -66,37 +48,35 @@ async def check_user_permission( allowed = await aiohttp_security.api.permits(request, permission, context) if not allowed: - raise web.HTTPForbidden( - reason=f"You do not have sufficient access rights for {permission}" - ) - - -# -# utils (i.e. independent from setup) -# - + msg = "You do not have sufficient access rights for" + if permission == PERMISSION_PRODUCT_LOGIN_KEY: + msg += f" {context.get('product_name')}" + else: + msg += f" {permission}" + raise web.HTTPForbidden(text=msg) -def encrypt_password(password: str) -> str: - hashed: str = passlib.hash.sha256_crypt.using(rounds=1000).hash(password) - return hashed - - -def check_password(password: str, password_hash: str) -> bool: - is_valid: bool = passlib.hash.sha256_crypt.verify(password, password_hash) - return is_valid +async def check_user_permission_with_groups( + request: web.Request, permission: str +) -> None: + """Checker that passes to authorized users with given permission via roles OR groups. -assert forget_identity # nosec -assert remember_identity # nosec + Raises: + web.HTTPUnauthorized: If user is not authorized + web.HTTPForbidden: If user is authorized but lacks both role and group permissions + """ + from ..products import products_web + context = { + "authorized_uid": await check_user_authorized(request), + "enable_group_permissions": True, + "request": request, + "product_support_group_id": products_web.get_current_product( + request + ).support_standard_group_id, + } -__all__: tuple[str, ...] = ( - "AuthContextDict", - "check_user_permission", - "encrypt_password", - "forget_identity", - "get_access_model", - "is_anonymous", - "PERMISSION_PRODUCT_LOGIN_KEY", - "remember_identity", -) + allowed = await aiohttp_security.api.permits(request, permission, context) + if not allowed: + msg = f"You do not have sufficient access rights for {permission}" + raise web.HTTPForbidden(text=msg) diff --git a/services/web/server/src/simcore_service_webserver/security/_constants.py b/services/web/server/src/simcore_service_webserver/security/_constants.py index a7b03fb3db7e..b10848ee5227 100644 --- a/services/web/server/src/simcore_service_webserver/security/_constants.py +++ b/services/web/server/src/simcore_service_webserver/security/_constants.py @@ -1,5 +1,10 @@ from typing import Final -MSG_AUTH_NOT_AVAILABLE: Final[str] = "Authentication service is temporary unavailable" +from common_library.user_messages import user_message + +MSG_UNAUTHORIZED: Final[str] = user_message("Unauthorized") +MSG_AUTH_NOT_AVAILABLE: Final[str] = user_message( + "Authentication service is temporary unavailable" +) PERMISSION_PRODUCT_LOGIN_KEY: Final[str] = "product.login" diff --git a/services/web/server/src/simcore_service_webserver/security/_identity_api.py b/services/web/server/src/simcore_service_webserver/security/_identity_web.py similarity index 100% rename from services/web/server/src/simcore_service_webserver/security/_identity_api.py rename to services/web/server/src/simcore_service_webserver/security/_identity_web.py diff --git a/services/web/server/src/simcore_service_webserver/security/decorators.py b/services/web/server/src/simcore_service_webserver/security/decorators.py index 8ae5a36f985f..945db63d278e 100644 --- a/services/web/server/src/simcore_service_webserver/security/decorators.py +++ b/services/web/server/src/simcore_service_webserver/security/decorators.py @@ -3,7 +3,8 @@ from aiohttp import web from servicelib.aiohttp.typing_extension import Handler -from .api import check_user_permission +from ._authz_web import check_user_permission_with_groups +from .security_web import check_user_permission def permission_required(permissions: str): @@ -26,3 +27,24 @@ async def _wrapped(request: web.Request): return _wrapped return _decorator + + +def group_or_role_permission_required(permission: str): + """Decorator that checks user permissions via role or gorup membership + + User gets access if they have permission via role OR group membership. + + If user is not authorized - raises HTTPUnauthorized, + if user is authorized but lacks both role and group permissions - raises HTTPForbidden. + """ + + def _decorator(handler: Handler): + @wraps(handler) + async def _wrapped(request: web.Request): + await check_user_permission_with_groups(request, permission) + + return await handler(request) + + return _wrapped + + return _decorator diff --git a/services/web/server/src/simcore_service_webserver/security/plugin.py b/services/web/server/src/simcore_service_webserver/security/plugin.py index 49fa986937cc..479c07d262a8 100644 --- a/services/web/server/src/simcore_service_webserver/security/plugin.py +++ b/services/web/server/src/simcore_service_webserver/security/plugin.py @@ -1,18 +1,20 @@ -""" Security subsystem. +"""Security subsystem. - - Responsible of authentication and authorization +- Responsible of authentication and authorization - See login/decorators.py - Based on https://aiohttp-security.readthedocs.io/en/latest/ +See login/decorators.py +Based on https://aiohttp-security.readthedocs.io/en/latest/ """ import logging +from typing import Final import aiohttp_security # type: ignore[import-untyped] from aiohttp import web -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from ..application_setup import ModuleCategory, app_setup_func +from ..db.plugin import setup_db from ..session.plugin import setup_session from ._authz_access_model import RoleBasedAccessModel from ._authz_access_roles import ROLES_PERMISSIONS @@ -21,13 +23,17 @@ _logger = logging.getLogger(__name__) +APP_SECURITY_CLIENT_KEY: Final = web.AppKey("APP_SECURITY_CLIENT_KEY", object) -@app_module_setup( + +@app_setup_func( __name__, ModuleCategory.SYSTEM, settings_name="WEBSERVER_SECURITY", logger=_logger ) def setup_security(app: web.Application): - + # NOTE: No need to add a dependency with products domain, i.e. do not call setup_products. + # The logic about the product is obtained via the security repository setup_session(app) + setup_db(app) # Identity Policy: uses sessions to identify (SEE how sessions are setup in session/plugin.py) identity_policy = SessionIdentityPolicy() diff --git a/services/web/server/src/simcore_service_webserver/security/security_service.py b/services/web/server/src/simcore_service_webserver/security/security_service.py new file mode 100644 index 000000000000..aeb5452bd491 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/security/security_service.py @@ -0,0 +1,20 @@ +"""Service-layer interface + +NOTE: Must not include functions that depend on aiohttp.web.Request (use *_web.py instead) +""" + +from ._authz_service import ( + check_password, + clean_auth_policy_cache, + encrypt_password, + get_access_model, +) + +__all__: tuple[str, ...] = ( + "check_password", + "clean_auth_policy_cache", + "encrypt_password", + "get_access_model", +) + +# nopycln: file diff --git a/services/web/server/src/simcore_service_webserver/security/security_web.py b/services/web/server/src/simcore_service_webserver/security/security_web.py new file mode 100644 index 000000000000..6eaf7fe7376e --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/security/security_web.py @@ -0,0 +1,27 @@ +# mypy: disable-error-code=truthy-function +"""aiohttp.web-related interfaces i.e. web.Request is used in the inputs + +NOTE: DO NOT USE aiohttp_security.api directly but use this interface instead +NOTE: functions in this module +""" + +from ._authz_access_model import AuthContextDict +from ._authz_web import ( + check_user_authorized, + check_user_permission, + is_anonymous, +) +from ._constants import PERMISSION_PRODUCT_LOGIN_KEY +from ._identity_web import forget_identity, remember_identity + +__all__: tuple[str, ...] = ( + "PERMISSION_PRODUCT_LOGIN_KEY", + "AuthContextDict", + "check_user_authorized", + "check_user_permission", + "forget_identity", + "is_anonymous", + "remember_identity", +) + +# nopycln: file diff --git a/services/web/server/src/simcore_service_webserver/session/plugin.py b/services/web/server/src/simcore_service_webserver/session/plugin.py index 702c5df40dff..4023dc5ba34c 100644 --- a/services/web/server/src/simcore_service_webserver/session/plugin.py +++ b/services/web/server/src/simcore_service_webserver/session/plugin.py @@ -1,19 +1,22 @@ """user's session plugin""" import logging +from typing import Final import aiohttp_session from aiohttp import web -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup from settings_library.utils_session import DEFAULT_SESSION_COOKIE_NAME +from ..application_setup import ModuleCategory, app_setup_func from ._cookie_storage import SharedCookieEncryptedCookieStorage from .settings import SessionSettings, get_plugin_settings _logger = logging.getLogger(__name__) +APP_SESSION_KEY: Final = web.AppKey("APP_SESSION_KEY", object) -@app_module_setup( + +@app_setup_func( __name__, ModuleCategory.ADDON, settings_name="WEBSERVER_SESSION", logger=_logger ) def setup_session(app: web.Application): @@ -41,4 +44,9 @@ def setup_session(app: web.Application): samesite=settings.SESSION_COOKIE_SAMESITE, ) aiohttp_session.setup(app=app, storage=encrypted_cookie_sessions) - app.middlewares[-1].__middleware_name__ = f"{__name__}.session" # type: ignore[attr-defined] # PC this attribute does not exist and mypy does not like it + setattr( # noqa: B010 + # aiohttp_session.setup has appended a middleware. We add an identifier (mostly for debugging) + app.middlewares[-1], + "__middleware_name__", + f"{__name__}.session", + ) diff --git a/services/web/server/src/simcore_service_webserver/session/settings.py b/services/web/server/src/simcore_service_webserver/session/settings.py index 4e1c99dac68f..8ed7aa397c98 100644 --- a/services/web/server/src/simcore_service_webserver/session/settings.py +++ b/services/web/server/src/simcore_service_webserver/session/settings.py @@ -21,9 +21,7 @@ class SessionSettings(BaseCustomSettings, MixinSessionSettings): description="Secret key to encrypt cookies. " 'TIP: python3 -c "from cryptography.fernet import *; print(Fernet.generate_key())"', min_length=44, - validation_alias=AliasChoices( - "SESSION_SECRET_KEY", "WEBSERVER_SESSION_SECRET_KEY" - ), + validation_alias=AliasChoices("WEBSERVER_SESSION_SECRET_KEY"), ), ] diff --git a/services/web/server/src/simcore_service_webserver/socketio/_handlers.py b/services/web/server/src/simcore_service_webserver/socketio/_handlers.py index c864a735d52d..c9ff46341483 100644 --- a/services/web/server/src/simcore_service_webserver/socketio/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/socketio/_handlers.py @@ -9,19 +9,22 @@ import socketio.exceptions # type: ignore[import-untyped] from aiohttp import web +from common_library.logging.logging_base import get_log_record_extra +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from models_library.api_schemas_webserver.socketio import SocketIORoomStr from models_library.products import ProductName +from models_library.projects import ProjectID from models_library.socketio import SocketMessageDict from models_library.users import UserID +from pydantic import TypeAdapter from servicelib.aiohttp.observer import emit -from servicelib.logging_utils import get_log_record_extra, log_context -from servicelib.request_keys import RQT_USERID_KEY +from servicelib.aiohttp.request_keys import RQT_USERID_KEY +from servicelib.logging_utils import log_context from ..groups.api import list_user_groups_ids_with_read_access from ..login.decorators import login_required from ..products import products_web -from ..products.models import Product -from ..resource_manager.user_sessions import managed_resource +from ..resource_manager.user_sessions import PROJECT_ID_KEY, managed_resource from ._utils import EnvironDict, SocketID, get_socket_server, register_socketio_handler from .messages import SOCKET_IO_HEARTBEAT_EVENT, send_message_to_user @@ -44,15 +47,17 @@ def auth_user_factory(socket_id: SocketID): @login_required - async def _handler(request: web.Request) -> tuple[UserID, ProductName]: + async def _handler(request: web.Request) -> tuple[UserID, ProductName, str]: """ Raises: web.HTTPUnauthorized: when the user is not recognized. Keeps the original request """ app = request.app - user_id = UserID(request.get(RQT_USERID_KEY, _ANONYMOUS_USER_ID)) + user_id = TypeAdapter(UserID).validate_python( + request.get(RQT_USERID_KEY, _ANONYMOUS_USER_ID) + ) client_session_id = request.query.get("client_session_id", None) - product: Product = products_web.get_current_product(request) + product = products_web.get_current_product(request) _logger.debug( "client %s,%s authenticated", f"{user_id=}", f"{client_session_id=}" @@ -74,14 +79,9 @@ async def _handler(request: web.Request) -> tuple[UserID, ProductName]: # REDIS wrapper with managed_resource(user_id, client_session_id, app) as resource_registry: - _logger.info( - "socketio connection from user %s", - user_id, - extra=get_log_record_extra(user_id=user_id), - ) await resource_registry.set_socket_id(socket_id) - return user_id, product.name + return user_id, product.name, client_session_id return _handler @@ -95,10 +95,24 @@ async def _set_user_in_group_rooms( sio = get_socket_server(app) for gid in group_ids: - # NOTE socketio need to be upgraded that's why enter_room is not an awaitable - sio.enter_room(socket_id, SocketIORoomStr.from_group_id(gid)) + await sio.enter_room(socket_id, SocketIORoomStr.from_group_id(gid)) + + await sio.enter_room(socket_id, SocketIORoomStr.from_user_id(user_id)) + - sio.enter_room(socket_id, SocketIORoomStr.from_user_id(user_id)) +async def _set_user_in_project_rooms( + app: web.Application, user_id: UserID, client_session_id: str, socket_id: SocketID +) -> None: + """Adds user in project rooms in case he has any project open""" + project_ids = [] + with managed_resource(user_id, client_session_id, app) as user_session: + project_ids = await user_session.find_all_resources_of_user(PROJECT_ID_KEY) + + sio = get_socket_server(app) + for project_id in project_ids: + await sio.enter_room( + socket_id, SocketIORoomStr.from_project_id(ProjectID(project_id)) + ) # @@ -126,18 +140,23 @@ async def connect( try: auth_user_handler = auth_user_factory(socket_id) - user_id, product_name = await auth_user_handler(environ["aiohttp.request"]) + user_id, product_name, client_session_id = await auth_user_handler( + environ["aiohttp.request"] + ) + _logger.info( + "%s successfully connected with %s", + f"{user_id=}", + f"{client_session_id=}", + extra=get_log_record_extra(user_id=user_id), + ) await _set_user_in_group_rooms(app, user_id, socket_id) + await _set_user_in_project_rooms(app, user_id, client_session_id, socket_id) - _logger.info("Sending set_heartbeat_emit_interval with %s", _EMIT_INTERVAL_S) + _logger.debug("Sending set_heartbeat_emit_interval with %s", _EMIT_INTERVAL_S) await emit( - app, - "SIGNAL_USER_CONNECTED", - user_id, - app, - product_name, + app, "SIGNAL_USER_CONNECTED", user_id, app, product_name, client_session_id ) await send_message_to_user( @@ -163,38 +182,58 @@ async def connect( @register_socketio_handler async def disconnect(socket_id: SocketID, app: web.Application) -> None: """socketio reserved handler for when the socket.io connection is disconnected.""" - sio = get_socket_server(app) - async with sio.session(socket_id) as socketio_session: - if user_id := socketio_session.get("user_id"): - client_session_id = socketio_session["client_session_id"] - product_name = socketio_session["product_name"] - - with log_context( - _logger, - logging.INFO, - "disconnection of %s for %s", - f"{user_id=}", - f"{client_session_id=}", - ): - with managed_resource(user_id, client_session_id, app) as user_session: - await user_session.remove_socket_id() - # signal same user other clients if available - await emit( - app, - "SIGNAL_USER_DISCONNECTED", - user_id, - client_session_id, - app, - product_name, + try: + async with get_socket_server(app).session(socket_id) as socketio_session: + # if session is well formed, we can access its data + try: + user_id = socketio_session["user_id"] + client_session_id = socketio_session["client_session_id"] + product_name = socketio_session["product_name"] + + except KeyError as err: + _logger.exception( + **create_troubleshooting_log_kwargs( + f"Socket session {socket_id} does not have user_id or client_session_id during disconnect", + error=err, + error_context={ + "socket_id": socket_id, + "socketio_session": socketio_session, + }, + tip="Check if session is corrupted", + ) ) - - else: - # this should not happen!! - _logger.error( - "Unknown client diconnected sid: %s, session %s", - socket_id, - f"{socketio_session}", + return + + except KeyError as err: + _logger.warning( + **create_troubleshooting_log_kwargs( + f"Socket session {socket_id} not found during disconnect, already cleaned up", + error=err, + error_context={"socket_id": socket_id}, ) + ) + return + + # Notify disconnection to all replicas/plugins + with log_context( + _logger, + logging.INFO, + "disconnection of %s with %s", + f"{user_id=}", + f"{client_session_id=}", + ): + with managed_resource(user_id, client_session_id, app) as user_session: + await user_session.remove_socket_id() + + # signal same user other clients if available + await emit( + app, + "SIGNAL_USER_DISCONNECTED", + user_id, + client_session_id, + app, + product_name, + ) @register_socketio_handler diff --git a/services/web/server/src/simcore_service_webserver/socketio/_observer.py b/services/web/server/src/simcore_service_webserver/socketio/_observer.py index ab420d020459..9573819183ce 100644 --- a/services/web/server/src/simcore_service_webserver/socketio/_observer.py +++ b/services/web/server/src/simcore_service_webserver/socketio/_observer.py @@ -1,4 +1,4 @@ -""" Observer events handlers +"""Observer events handlers SEE servicelib.observer """ @@ -7,12 +7,12 @@ import logging from aiohttp import web -from servicelib.aiohttp.application_keys import APP_FIRE_AND_FORGET_TASKS_KEY +from common_library.logging.logging_base import get_log_record_extra from servicelib.aiohttp.observer import register_observer, setup_observer_registry -from servicelib.logging_utils import get_log_record_extra from servicelib.utils import fire_and_forget_task, logged_gather from socketio import AsyncServer # type: ignore[import-untyped] +from ..constants import APP_FIRE_AND_FORGET_TASKS_KEY from ..resource_manager.user_sessions import managed_resource from ._utils import get_socket_server @@ -40,7 +40,7 @@ async def _on_user_logout( _logger.debug("user %s must be disconnected", user_id) # find the sockets related to the user sio: AsyncServer = get_socket_server(app) - with managed_resource(user_id, client_session_id, app) as user_session: + with managed_resource(int(user_id), client_session_id, app) as user_session: # start by disconnecting this client if possible if client_session_id: if socket_id := await user_session.get_socket_id(): diff --git a/services/web/server/src/simcore_service_webserver/socketio/_utils.py b/services/web/server/src/simcore_service_webserver/socketio/_utils.py index 9d5beca61c15..ef37950a7760 100644 --- a/services/web/server/src/simcore_service_webserver/socketio/_utils.py +++ b/services/web/server/src/simcore_service_webserver/socketio/_utils.py @@ -1,7 +1,8 @@ import inspect +from collections.abc import Awaitable, Callable from functools import wraps from types import ModuleType -from typing import Any, Awaitable, Callable +from typing import Any from aiohttp import web from socketio import AsyncServer # type: ignore[import-untyped] @@ -103,7 +104,6 @@ def register_socketio_handler(func: Callable) -> Callable: if is_handler: _socketio_handlers_registry.append(func) else: - raise SyntaxError( - "the function shall be of type fct(*args, app: web.Application" - ) + msg = "the function shall be of type fct(*args, app: web.Application" + raise SyntaxError(msg) return func diff --git a/services/web/server/src/simcore_service_webserver/socketio/messages.py b/services/web/server/src/simcore_service_webserver/socketio/messages.py index 081cab053770..5d6a973ea304 100644 --- a/services/web/server/src/simcore_service_webserver/socketio/messages.py +++ b/services/web/server/src/simcore_service_webserver/socketio/messages.py @@ -8,6 +8,7 @@ from aiohttp.web import Application from models_library.api_schemas_webserver.socketio import SocketIORoomStr from models_library.groups import GroupID +from models_library.projects import ProjectID from models_library.socketio import SocketMessageDict from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder @@ -27,7 +28,9 @@ SOCKET_IO_LOG_EVENT: Final[str] = "logger" SOCKET_IO_NODE_UPDATED_EVENT: Final[str] = "nodeUpdated" + SOCKET_IO_PROJECT_UPDATED_EVENT: Final[str] = "projectStateUpdated" + SOCKET_IO_WALLET_OSPARC_CREDITS_UPDATED_EVENT: Final[str] = "walletOsparcCreditsUpdated" @@ -52,7 +55,7 @@ async def _safe_emit( room=room, ignore_queue=ignore_queue, ) - _logger.info("emitted socketio event '%s' to room '%s'", event, room) + _logger.debug("emitted socketio event '%s' to room '%s'", event, room) async def send_message_to_user( @@ -60,12 +63,12 @@ async def send_message_to_user( user_id: UserID, message: SocketMessageDict, *, - ignore_queue: bool, + ignore_queue: bool = False, ) -> None: """ Keyword Arguments: - ignore_queue -- set to False when this message is delivered from a server that has no direct connection to the client (default: {True}) - An example where this is value is False, is sending messages to a user in the GC + ignore_queue -- set to True when this message is delivered from a server that has no direct connection to the user client (default: {False}) + Be careful with this option, as it can lead to message loss if the user is not connected to this server!! """ sio: AsyncServer = get_socket_server(app) @@ -98,3 +101,18 @@ async def send_message_to_standard_group( # that might be connected to different replicas ignore_queue=False, ) + + +async def send_message_to_project_room( + app: Application, + project_id: ProjectID, + message: SocketMessageDict, +) -> None: + sio: AsyncServer = get_socket_server(app) + + await _safe_emit( + sio, + room=SocketIORoomStr.from_project_id(project_id), + message=message, + ignore_queue=False, + ) diff --git a/services/web/server/src/simcore_service_webserver/socketio/plugin.py b/services/web/server/src/simcore_service_webserver/socketio/plugin.py index 86d19aceeac4..149cad073abd 100644 --- a/services/web/server/src/simcore_service_webserver/socketio/plugin.py +++ b/services/web/server/src/simcore_service_webserver/socketio/plugin.py @@ -1,14 +1,15 @@ -""" plugin socket-io +"""plugin socket-io - SEE https://github.com/miguelgrinberg/python-socketio +SEE https://github.com/miguelgrinberg/python-socketio """ import logging +from typing import Final from aiohttp import web -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from ..application_setup import ModuleCategory, app_setup_func from ..constants import APP_SETTINGS_KEY from ..rabbitmq import setup_rabbitmq from ._observer import setup_socketio_observer_events @@ -16,8 +17,12 @@ _logger = logging.getLogger(__name__) +APP_SOCKETIO_SERVER_KEY: Final = web.AppKey( + "APP_SOCKETIO_SERVER_KEY", object +) # socketio.AsyncServer -@app_module_setup( + +@app_setup_func( "simcore_service_webserver.socketio", ModuleCategory.ADDON, settings_name="WEBSERVER_SOCKETIO", diff --git a/services/web/server/src/simcore_service_webserver/socketio/server.py b/services/web/server/src/simcore_service_webserver/socketio/server.py index 617af26072fe..4245b9502f74 100644 --- a/services/web/server/src/simcore_service_webserver/socketio/server.py +++ b/services/web/server/src/simcore_service_webserver/socketio/server.py @@ -19,17 +19,12 @@ async def _socketio_server_cleanup_ctx(app: web.Application) -> AsyncIterator[None]: - use_logger: bool | logging.Logger = _logger - # SEE https://github.com/miguelgrinberg/python-socketio/blob/v4.6.1/docs/server.rst#aiohttp - server_manager = AsyncAioPikaManager( - url=get_rabbitmq_settings(app).dsn, - logger=use_logger, - ) + server_manager = AsyncAioPikaManager(url=get_rabbitmq_settings(app).dsn) sio_server = AsyncServer( async_mode="aiohttp", - logger=use_logger, - engineio_logger=False, + logger=True, + engineio_logger=True, client_manager=server_manager, json=JsonNamespace, ) diff --git a/services/web/server/src/simcore_service_webserver/statics/_events.py b/services/web/server/src/simcore_service_webserver/statics/_events.py index 1d1e1912004e..af62a9bff253 100644 --- a/services/web/server/src/simcore_service_webserver/statics/_events.py +++ b/services/web/server/src/simcore_service_webserver/statics/_events.py @@ -7,6 +7,7 @@ from aiohttp.client import ClientSession from aiohttp.client_exceptions import ClientConnectionError, ClientError from common_library.json_serialization import json_dumps +from models_library.utils.change_case import snake_to_camel from packaging.version import Version from servicelib.aiohttp.client_session import get_client_session from tenacity.asyncio import AsyncRetrying @@ -99,6 +100,10 @@ def _get_release_notes_vtag(vtag: str) -> str: return f"v{version.major}.{version.minor}.0" +def _get_product_data(product: Product) -> dict[str, Any]: + return {snake_to_camel(k): v for k, v in product.to_statics().items()} + + async def create_and_cache_statics_json(app: web.Application) -> None: # NOTE: in devel model, the folder might be under construction # (qx-compile takes time), therefore we create statics.json @@ -122,7 +127,8 @@ async def create_and_cache_statics_json(app: web.Application) -> None: data = deepcopy(common) _logger.debug("Product %s", product.name) - data.update(product.to_statics()) + + data.update(_get_product_data(product)) # Adds specifics to login settings if (p := product.login_settings) and (v := p.get("LOGIN_2FA_REQUIRED", None)): diff --git a/services/web/server/src/simcore_service_webserver/statics/plugin.py b/services/web/server/src/simcore_service_webserver/statics/plugin.py index 07c30033fe86..ff316b267418 100644 --- a/services/web/server/src/simcore_service_webserver/statics/plugin.py +++ b/services/web/server/src/simcore_service_webserver/statics/plugin.py @@ -1,16 +1,17 @@ -""" Serves client's code +"""Serves client's code - - The client-side runs a RIA (Rich Interface Application) so the server does not - need to render pages upon request but only serve once the code to the client. - - The client application then interacts with the server via a http and/or socket API - - The client application is under ``services/static-webserver/client`` and the ``webclient`` service - is used to build it. +- The client-side runs a RIA (Rich Interface Application) so the server does not +need to render pages upon request but only serve once the code to the client. +- The client application then interacts with the server via a http and/or socket API +- The client application is under ``services/static-webserver/client`` and the ``webclient`` service +is used to build it. """ + import logging from aiohttp import web -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from ..application_setup import ModuleCategory, app_setup_func from ..constants import INDEX_RESOURCE_NAME from ..products.plugin import setup_products from ._events import create_and_cache_statics_json, create_cached_indexes @@ -20,7 +21,7 @@ _logger = logging.getLogger(__name__) -@app_module_setup( +@app_setup_func( __name__, ModuleCategory.ADDON, settings_name="WEBSERVER_STATICWEB", logger=_logger ) def setup_statics(app: web.Application) -> None: diff --git a/services/web/server/src/simcore_service_webserver/storage/_rest.py b/services/web/server/src/simcore_service_webserver/storage/_rest.py index 9efd512aee88..e5cffbc94884 100644 --- a/services/web/server/src/simcore_service_webserver/storage/_rest.py +++ b/services/web/server/src/simcore_service_webserver/storage/_rest.py @@ -12,7 +12,9 @@ from models_library.api_schemas_long_running_tasks.tasks import ( TaskGet, ) -from models_library.api_schemas_rpc_async_jobs.async_jobs import AsyncJobGet +from models_library.api_schemas_rpc_async_jobs.async_jobs import ( + AsyncJobGet, +) from models_library.api_schemas_storage.storage_schemas import ( FileUploadCompleteResponse, FileUploadCompletionBody, @@ -31,12 +33,14 @@ from pydantic import AnyUrl, BaseModel, ByteSize, TypeAdapter, field_validator from servicelib.aiohttp import status from servicelib.aiohttp.client_session import get_client_session +from servicelib.aiohttp.request_keys import RQT_USERID_KEY from servicelib.aiohttp.requests_validation import ( parse_request_body_as, parse_request_path_parameters_as, parse_request_query_parameters_as, ) from servicelib.aiohttp.rest_responses import create_data_response +from servicelib.celery.models import OwnerMetadata from servicelib.common_headers import X_FORWARDED_PROTO from servicelib.rabbitmq.rpc_interfaces.storage.paths import ( compute_path_size as remote_compute_path_size, @@ -45,13 +49,12 @@ delete_paths as remote_delete_paths, ) from servicelib.rabbitmq.rpc_interfaces.storage.simcore_s3 import start_export_data -from servicelib.request_keys import RQT_USERID_KEY from servicelib.rest_responses import unwrap_envelope from yarl import URL from .._meta import API_VTAG from ..login.decorators import login_required -from ..models import RequestContext +from ..models import AuthenticatedRequestContext, WebServerOwnerMetadata from ..rabbitmq import get_rabbitmq_rpc_client from ..security.decorators import permission_required from ..tasks._exception_handlers import handle_export_data_exceptions @@ -139,7 +142,7 @@ async def _forward_request_to_storage( match resp.status: case status.HTTP_422_UNPROCESSABLE_ENTITY: raise web.HTTPUnprocessableEntity( - reason=await resp.text(), content_type=resp.content_type + text=await resp.text(), content_type=resp.content_type ) case status.HTTP_404_NOT_FOUND: raise web.HTTPNotFound(text=await resp.text()) @@ -183,7 +186,6 @@ def _create_data_response_from_async_job( return create_data_response( TaskGet( task_id=async_job_id, - task_name=async_job_id, status_href=f"{request.url.with_path(str(request.app.router['get_async_job_status'].url_for(task_id=async_job_id)))}", abort_href=f"{request.url.with_path(str(request.app.router['cancel_async_job'].url_for(task_id=async_job_id)))}", result_href=f"{request.url.with_path(str(request.app.router['get_async_job_result'].url_for(task_id=async_job_id)))}", @@ -199,7 +201,7 @@ def _create_data_response_from_async_job( @login_required @permission_required("storage.files.*") async def compute_path_size(request: web.Request) -> web.Response: - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as( StoragePathComputeSizeParams, request ) @@ -207,10 +209,15 @@ async def compute_path_size(request: web.Request) -> web.Response: rabbitmq_rpc_client = get_rabbitmq_rpc_client(request.app) async_job, _ = await remote_compute_path_size( rabbitmq_rpc_client, - user_id=req_ctx.user_id, - product_name=req_ctx.product_name, location_id=path_params.location_id, path=path_params.path, + owner_metadata=OwnerMetadata.model_validate( + WebServerOwnerMetadata( + user_id=req_ctx.user_id, + product_name=req_ctx.product_name, + ).model_dump() + ), + user_id=req_ctx.user_id, ) return _create_data_response_from_async_job(request, async_job) @@ -223,17 +230,22 @@ async def compute_path_size(request: web.Request) -> web.Response: @login_required @permission_required("storage.files.*") async def batch_delete_paths(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(StorageLocationPathParams, request) body = await parse_request_body_as(BatchDeletePathsBodyParams, request) rabbitmq_rpc_client = get_rabbitmq_rpc_client(request.app) async_job, _ = await remote_delete_paths( rabbitmq_rpc_client, - user_id=req_ctx.user_id, - product_name=req_ctx.product_name, location_id=path_params.location_id, paths=body.paths, + owner_metadata=OwnerMetadata.model_validate( + WebServerOwnerMetadata( + user_id=req_ctx.user_id, + product_name=req_ctx.product_name, + ).model_dump() + ), + user_id=req_ctx.user_id, ) return _create_data_response_from_async_job(request, async_job) @@ -488,22 +500,27 @@ def allow_only_simcore(cls, v: int) -> int: return v rabbitmq_rpc_client = get_rabbitmq_rpc_client(request.app) - _req_ctx = RequestContext.model_validate(request) + _req_ctx = AuthenticatedRequestContext.model_validate(request) _ = parse_request_path_parameters_as(_PathParams, request) export_data_post = await parse_request_body_as( model_schema_cls=DataExportPost, request=request ) async_job_rpc_get, _ = await start_export_data( rabbitmq_rpc_client=rabbitmq_rpc_client, - user_id=_req_ctx.user_id, - product_name=_req_ctx.product_name, paths_to_export=export_data_post.paths, + export_as="path", + owner_metadata=OwnerMetadata.model_validate( + WebServerOwnerMetadata( + user_id=_req_ctx.user_id, + product_name=_req_ctx.product_name, + ).model_dump() + ), + user_id=_req_ctx.user_id, ) _job_id = f"{async_job_rpc_get.job_id}" return create_data_response( TaskGet( task_id=_job_id, - task_name=_job_id, status_href=f"{request.url.with_path(str(request.app.router['get_async_job_status'].url_for(task_id=_job_id)))}", abort_href=f"{request.url.with_path(str(request.app.router['cancel_async_job'].url_for(task_id=_job_id)))}", result_href=f"{request.url.with_path(str(request.app.router['get_async_job_result'].url_for(task_id=_job_id)))}", diff --git a/services/web/server/src/simcore_service_webserver/storage/api.py b/services/web/server/src/simcore_service_webserver/storage/api.py index 868dd63ad935..34d86c1f4780 100644 --- a/services/web/server/src/simcore_service_webserver/storage/api.py +++ b/services/web/server/src/simcore_service_webserver/storage/api.py @@ -7,7 +7,7 @@ from typing import Any, Final from aiohttp import ClientError, ClientSession, ClientTimeout, web -from models_library.api_schemas_rpc_async_jobs.async_jobs import AsyncJobNameData +from common_library.logging.logging_base import get_log_record_extra from models_library.api_schemas_storage import STORAGE_RPC_NAMESPACE from models_library.api_schemas_storage.storage_schemas import ( FileLocation, @@ -23,13 +23,15 @@ from models_library.users import UserID from pydantic import ByteSize, HttpUrl, TypeAdapter from servicelib.aiohttp.client_session import get_client_session -from servicelib.logging_utils import get_log_record_extra, log_context +from servicelib.celery.models import OwnerMetadata +from servicelib.logging_utils import log_context from servicelib.rabbitmq.rpc_interfaces.async_jobs.async_jobs import ( AsyncJobComposedResult, submit_and_wait, ) from yarl import URL +from ..models import WebServerOwnerMetadata from ..projects.models import ProjectDict from ..projects.utils import NodesMap from ..rabbitmq import get_rabbitmq_rpc_client @@ -37,6 +39,7 @@ _logger = logging.getLogger(__name__) + _TOTAL_TIMEOUT_TO_COPY_DATA_SECS: Final[int] = 60 * 60 _SIMCORE_LOCATION: Final[LocationID] = 0 @@ -117,7 +120,12 @@ async def copy_data_folders_from_project( rabbitmq_client, method_name="copy_folders_from_project", rpc_namespace=STORAGE_RPC_NAMESPACE, - job_id_data=AsyncJobNameData(user_id=user_id, product_name=product_name), + owner_metadata=OwnerMetadata.model_validate( + WebServerOwnerMetadata( + user_id=user_id, + product_name=product_name, + ).model_dump() + ), body=TypeAdapter(FoldersBody).validate_python( { "source": source_project, @@ -126,6 +134,7 @@ async def copy_data_folders_from_project( }, ), client_timeout=datetime.timedelta(seconds=_TOTAL_TIMEOUT_TO_COPY_DATA_SECS), + user_id=user_id, ): yield job_composed_result diff --git a/services/web/server/src/simcore_service_webserver/storage/plugin.py b/services/web/server/src/simcore_service_webserver/storage/plugin.py index e0c17eb8b0f8..e27b6aa64006 100644 --- a/services/web/server/src/simcore_service_webserver/storage/plugin.py +++ b/services/web/server/src/simcore_service_webserver/storage/plugin.py @@ -1,20 +1,21 @@ -""" storage subsystem - manages the interaction with the storage service - -""" +"""storage subsystem - manages the interaction with the storage service""" import logging +from typing import Final from aiohttp import web -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from ..application_setup import ModuleCategory, app_setup_func from ..constants import APP_SETTINGS_KEY from ..rest.plugin import setup_rest from . import _rest _logger = logging.getLogger(__name__) +APP_STORAGE_CLIENT_KEY: Final = web.AppKey("APP_STORAGE_CLIENT_KEY", object) + -@app_module_setup( +@app_setup_func( __name__, ModuleCategory.ADDON, settings_name="WEBSERVER_STORAGE", logger=_logger ) def setup_storage(app: web.Application): diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_catalog.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_catalog.py index 752b1c3e2ee8..30a9fbd35ecb 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_catalog.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_catalog.py @@ -24,8 +24,8 @@ ) from simcore_postgres_database.utils_services import create_select_latest_services_query -from ..db.plugin import get_database_engine -from ._errors import ServiceNotFound +from ..db.plugin import get_database_engine_legacy +from ._errors import ServiceNotFoundError from .settings import StudiesDispatcherSettings, get_plugin_settings LARGEST_PAGE_SIZE = 1000 @@ -68,7 +68,7 @@ async def iter_latest_product_services( assert page_number >= 1 # nosec assert ((page_number - 1) * page_size) >= 0 # nosec - engine: Engine = get_database_engine(app) + engine: Engine = get_database_engine_legacy(app) settings: StudiesDispatcherSettings = get_plugin_settings(app) # Select query for latest version of the service @@ -140,7 +140,7 @@ async def validate_requested_service( service_key: ServiceKey, service_version: ServiceVersion, ) -> ValidService: - engine: Engine = get_database_engine(app) + engine: Engine = get_database_engine_legacy(app) async with engine.acquire() as conn: query = sa.select( @@ -156,7 +156,7 @@ async def validate_requested_service( row = await result.fetchone() if row is None: - raise ServiceNotFound( + raise ServiceNotFoundError( service_key=service_key, service_version=service_version ) diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_constants.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_constants.py index b36820434b00..d92c75618eb1 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_constants.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_constants.py @@ -1,34 +1,39 @@ from typing import Final +from common_library.user_messages import user_message + # # NOTE: MSG_$(ERROR_CODE_NAME) strings MUST be human readable messages # Please keep alphabetical order # -MSG_PROJECT_NOT_FOUND: Final[str] = "Cannot find any study with ID '{project_id}'" - -# This error happens when the linked study ID does not exists OR is not shared with everyone -MSG_PROJECT_NOT_PUBLISHED: Final[str] = "Cannot find any study with ID '{project_id}'" +MSG_PROJECT_NOT_FOUND: Final[str] = user_message( + "The project with ID '{project_id}' could not be found.", _version=1 +) -# This error happens when the linked study ID does not exists OR is not shared with everyone OR is NOT public -MSG_PUBLIC_PROJECT_NOT_PUBLISHED: Final[str] = ( - "Only available for registered users.

" - "Please login and try again.

" - "If you don't have an account, please request one at {support_email}

" +# This error happens when the linked project ID does not exists OR is not shared with everyone +MSG_PROJECT_NOT_PUBLISHED: Final[str] = user_message( + "The project with ID '{project_id}' is not available or not shared.", _version=1 ) -MSG_GUESTS_NOT_ALLOWED: Final[str] = ( - "Access restricted to registered users.

" - "If you don't have an account, please email to support and request one

" +# This error happens when the linked project ID does not exists OR is not shared with everyone OR is NOT public +MSG_PUBLIC_PROJECT_NOT_PUBLISHED: Final[str] = user_message( + "This project is only available for registered users.

" + "Please log in and try again.

" + "If you don't have an account, please request one at {support_email}.

", + _version=1, ) -MSG_TOO_MANY_GUESTS: Final[str] = ( - "We have reached the maximum of anonymous users allowed the platform. " - "Please try later or login with a registered account." + +MSG_TOO_MANY_GUESTS: Final[str] = user_message( + "We have reached the maximum number of anonymous users allowed on the platform. " + "Please try again later or log in with a registered account.", + _version=1, ) -MSG_UNEXPECTED_DISPATCH_ERROR: Final[str] = ( - "Sorry, but looks like something unexpected went wrong!" - "We track these errors automatically, but if the problem persists feel free to contact us." - "In the meantime, try refreshing." +MSG_UNEXPECTED_DISPATCH_ERROR: Final[str] = user_message( + "Sorry, something unexpected went wrong! " + "We track these errors automatically, but if the problem persists please contact us. " + "In the meantime, try refreshing the page.", + _version=1, ) diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_controller/__init__.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_controller/__init__.py new file mode 100644 index 000000000000..a051fdcd7af5 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_controller/__init__.py @@ -0,0 +1,29 @@ +import logging + +from aiohttp import web + +from ...login.decorators import login_required +from .._controller.rest.redirects import get_redirection_to_viewer +from ..settings import StudiesDispatcherSettings +from .rest.nih import routes as nih_routes +from .rest.redirects import get_redirection_to_viewer + +_logger = logging.getLogger(__name__) + + +def setup_controller(app: web.Application, settings: StudiesDispatcherSettings): + # routes + redirect_handler = get_redirection_to_viewer + if settings.is_login_required(): + redirect_handler = login_required(get_redirection_to_viewer) + + _logger.info( + "'%s' config explicitly disables anonymous users from this feature", + __name__, + ) + + app.router.add_routes( + [web.get("/view", redirect_handler, name="get_redirection_to_viewer")] + ) + + app.router.add_routes(nih_routes) diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_controller/rest/__init__.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_controller/rest/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_controller/rest/nih.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_controller/rest/nih.py new file mode 100644 index 000000000000..74174840fb38 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_controller/rest/nih.py @@ -0,0 +1,65 @@ +"""Handles requests to the Rest API""" + +import logging + +from aiohttp import web +from aiohttp.web import Request +from pydantic import ( + ValidationError, +) + +from ...._meta import API_VTAG +from ....products import products_web +from ....utils_aiohttp import envelope_json_response +from ... import _service +from ..._catalog import iter_latest_product_services +from .nih_schemas import ServiceGet, Viewer + +_logger = logging.getLogger(__name__) + + +routes = web.RouteTableDef() + + +@routes.get(f"/{API_VTAG}/services", name="list_latest_services") +async def list_latest_services(request: Request): + """Returns a list latest version of services""" + product_name = products_web.get_product_name(request) + + services = [] + async for service_data in iter_latest_product_services( + request.app, product_name=product_name + ): + try: + service = ServiceGet.create(service_data, request) + services.append(service) + except ValidationError as err: + _logger.debug("Invalid %s: %s", f"{service_data=}", err) + + return envelope_json_response(services) + + +@routes.get(f"/{API_VTAG}/viewers", name="list_viewers") +async def list_viewers(request: Request): + # filter: file_type=* + file_type: str | None = request.query.get("file_type", None) + + viewers = [ + Viewer.create(request, viewer).model_dump() + for viewer in await _service.list_viewers_info(request.app, file_type=file_type) + ] + return envelope_json_response(viewers) + + +@routes.get(f"/{API_VTAG}/viewers/default", name="list_default_viewers") +async def list_default_viewers(request: Request): + # filter: file_type=* + file_type: str | None = request.query.get("file_type", None) + + viewers = [ + Viewer.create(request, viewer).model_dump() + for viewer in await _service.list_viewers_info( + request.app, file_type=file_type, only_default=True + ) + ] + return envelope_json_response(viewers) diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_rest_handlers.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_controller/rest/nih_schemas.py similarity index 67% rename from services/web/server/src/simcore_service_webserver/studies_dispatcher/_rest_handlers.py rename to services/web/server/src/simcore_service_webserver/studies_dispatcher/_controller/rest/nih_schemas.py index 943893972fe6..b9579f0ff9c1 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_rest_handlers.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_controller/rest/nih_schemas.py @@ -1,6 +1,3 @@ -""" Handles requests to the Rest API - -""" import logging from dataclasses import asdict @@ -13,18 +10,13 @@ ConfigDict, Field, TypeAdapter, - ValidationError, field_validator, ) from pydantic.networks import HttpUrl -from .._meta import API_VTAG -from ..products import products_web -from ..utils_aiohttp import envelope_json_response -from ._catalog import ServiceMetaData, iter_latest_product_services -from ._core import list_viewers_info -from ._models import ViewerInfo -from ._redirects_handlers import ViewerQueryParams +from ..._catalog import ServiceMetaData +from ..._models import ViewerInfo +from .redirects_schemas import ViewerQueryParams _logger = logging.getLogger(__name__) @@ -50,8 +42,8 @@ def _compose_service_only_dispatcher_prefix_url( request: web.Request, service_key: str, service_version: str ) -> HttpUrl: params = ViewerQueryParams( - viewer_key=ServiceKey(service_key), - viewer_version=ServiceVersion(service_version), + viewer_key=TypeAdapter(ServiceKey).validate_python(service_key), + viewer_version=TypeAdapter(ServiceVersion).validate_python(service_version), ).model_dump(exclude_none=True, exclude_unset=True) absolute_url = request.url.join( request.app.router["get_redirection_to_viewer"].url_for().with_query(**params) @@ -150,60 +142,3 @@ def remove_dot_prefix_from_extension(cls, v): } } ) - - -# -# API Handlers -# - - -routes = web.RouteTableDef() - - -@routes.get(f"/{API_VTAG}/services", name="list_latest_services") -async def list_latest_services(request: Request): - """Returns a list latest version of services""" - product_name = products_web.get_product_name(request) - - services = [] - async for service_data in iter_latest_product_services( - request.app, product_name=product_name - ): - try: - service = ServiceGet.create(service_data, request) - services.append(service) - except ValidationError as err: - _logger.debug("Invalid %s: %s", f"{service_data=}", err) - - return envelope_json_response(services) - - -@routes.get(f"/{API_VTAG}/viewers", name="list_viewers") -async def list_viewers(request: Request): - # filter: file_type=* - file_type: str | None = request.query.get("file_type", None) - - viewers = [ - Viewer.create(request, viewer).model_dump() - for viewer in await list_viewers_info(request.app, file_type=file_type) - ] - return envelope_json_response(viewers) - - -@routes.get(f"/{API_VTAG}/viewers/default", name="list_default_viewers") -async def list_default_viewers(request: Request): - # filter: file_type=* - file_type: str | None = request.query.get("file_type", None) - - viewers = [ - Viewer.create(request, viewer).model_dump() - for viewer in await list_viewers_info( - request.app, file_type=file_type, only_default=True - ) - ] - return envelope_json_response(viewers) - - -rest_handler_functions = { - fun.__name__: fun for fun in [list_default_viewers, list_viewers] -} diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_redirects_handlers.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_controller/rest/redirects.py similarity index 54% rename from services/web/server/src/simcore_service_webserver/studies_dispatcher/_redirects_handlers.py rename to services/web/server/src/simcore_service_webserver/studies_dispatcher/_controller/rest/redirects.py index 605b00f623d1..332d628bd196 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_redirects_handlers.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_controller/rest/redirects.py @@ -1,40 +1,39 @@ """Handles request to the viewers redirection entrypoints""" -import functools import logging -import urllib.parse -from typing import TypeAlias from aiohttp import web -from common_library.error_codes import create_error_code from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID -from models_library.services import ServiceKey, ServiceVersion -from pydantic import BaseModel, ConfigDict, ValidationError, field_validator -from servicelib.aiohttp import status from servicelib.aiohttp.requests_validation import parse_request_query_parameters_as -from servicelib.aiohttp.typing_extension import Handler -from servicelib.logging_errors import create_troubleshotting_log_kwargs - -from ..dynamic_scheduler import api as dynamic_scheduler_service -from ..products import products_web -from ..utils import compose_support_error_msg -from ..utils_aiohttp import create_redirect_to_page_response, get_api_base_url -from ._catalog import ValidService, validate_requested_service -from ._constants import MSG_UNEXPECTED_DISPATCH_ERROR -from ._core import validate_requested_file, validate_requested_viewer -from ._errors import InvalidRedirectionParams, StudyDispatcherError -from ._models import FileParams, ServiceInfo, ServiceParams, ViewerInfo -from ._projects import ( + +from ....dynamic_scheduler import api as dynamic_scheduler_service +from ....products import products_web +from ....utils_aiohttp import create_redirect_to_page_response, get_api_base_url +from ... import _service +from ..._catalog import ValidService, validate_requested_service +from ..._errors import ( + InvalidRedirectionParamsError, +) +from ..._models import ServiceInfo, ViewerInfo +from ..._projects import ( get_or_create_project_with_file, get_or_create_project_with_file_and_service, get_or_create_project_with_service, ) -from ._users import UserInfo, ensure_authentication, get_or_create_guest_user -from .settings import get_plugin_settings +from ..._users import UserInfo, ensure_authentication, get_or_create_guest_user +from ...settings import get_plugin_settings +from .redirects_exceptions import handle_errors_with_error_page +from .redirects_schemas import ( + FileQueryParams, + RedirectionQueryParams, + ServiceAndFileParams, + ServiceQueryParams, +) _logger = logging.getLogger(__name__) + # # HELPERS # @@ -58,18 +57,6 @@ def _create_redirect_response_to_view_page( ) -def _create_redirect_response_to_error_page( - app: web.Application, message: str, status_code: int -) -> web.HTTPFound: - # NOTE: these are 'error' page params and need to be interpreted by front-end correctly! - return create_redirect_to_page_response( - app, - page="error", - message=message, - status_code=status_code, - ) - - def _create_service_info_from(service: ValidService) -> ServiceInfo: values_map = { "key": service.key, @@ -82,133 +69,12 @@ def _create_service_info_from(service: ValidService) -> ServiceInfo: return ServiceInfo.model_construct(_fields_set=set(values_map.keys()), **values_map) -def _handle_errors_with_error_page(handler: Handler): - @functools.wraps(handler) - async def wrapper(request: web.Request) -> web.StreamResponse: - try: - return await handler(request) - - except (web.HTTPRedirection, web.HTTPSuccessful): - # NOTE: that response is a redirection that is reraised and not returned - raise - - except StudyDispatcherError as err: - raise _create_redirect_response_to_error_page( - request.app, - message=f"Sorry, we cannot dispatch your study: {err}", - status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, # 422 - ) from err - - except web.HTTPUnauthorized as err: - raise _create_redirect_response_to_error_page( - request.app, - message=f"{err.reason}. Please reload this page to login/register.", - status_code=err.status_code, - ) from err - - except web.HTTPUnprocessableEntity as err: - raise _create_redirect_response_to_error_page( - request.app, - message=f"Invalid parameters in link: {err.reason}", - status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, # 422 - ) from err - - except web.HTTPClientError as err: - _logger.exception("Client error with status code %d", err.status_code) - raise _create_redirect_response_to_error_page( - request.app, - message=err.reason, - status_code=err.status_code, - ) from err - - except (ValidationError, web.HTTPServerError, Exception) as err: - error_code = create_error_code(err) - - user_error_msg = compose_support_error_msg( - msg=MSG_UNEXPECTED_DISPATCH_ERROR, error_code=error_code - ) - _logger.exception( - **create_troubleshotting_log_kwargs( - user_error_msg, - error=err, - error_code=error_code, - error_context={"request": request}, - tip="Unexpected failure while dispatching study", - ) - ) - raise _create_redirect_response_to_error_page( - request.app, - message=user_error_msg, - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - ) from err - - return wrapper - - -# -# API Schemas -# - - -class ServiceQueryParams(ServiceParams): - model_config = ConfigDict(extra="forbid") - - -class FileQueryParams(FileParams): - model_config = ConfigDict(extra="forbid") - - @field_validator("file_type") - @classmethod - def ensure_extension_upper_and_dotless(cls, v): - # NOTE: see filetype constraint-check - if v and isinstance(v, str): - w = urllib.parse.unquote(v) - return w.upper().lstrip(".") - return v - - -class ServiceAndFileParams(FileQueryParams, ServiceParams): ... - - -class ViewerQueryParams(BaseModel): - file_type: str | None = None - viewer_key: ServiceKey - viewer_version: ServiceVersion - - @staticmethod - def from_viewer(viewer: ViewerInfo) -> "ViewerQueryParams": - # can safely construct w/o validation from a viewer - return ViewerQueryParams.model_construct( - file_type=viewer.filetype, - viewer_key=viewer.key, - viewer_version=viewer.version, - ) - - @field_validator("file_type") - @classmethod - def ensure_extension_upper_and_dotless(cls, v): - # NOTE: see filetype constraint-check - if v and isinstance(v, str): - w = urllib.parse.unquote(v) - return w.upper().lstrip(".") - return v - - -RedirectionQueryParams: TypeAlias = ( - # NOTE: Extra.forbid in FileQueryParams, ServiceQueryParams avoids bad casting when - # errors in ServiceAndFileParams - ServiceAndFileParams - | FileQueryParams - | ServiceQueryParams -) - - # -# API HANDLERS +# ROUTES # -@_handle_errors_with_error_page +@handle_errors_with_error_page async def get_redirection_to_viewer(request: web.Request): """ - validate request @@ -229,7 +95,7 @@ async def get_redirection_to_viewer(request: web.Request): file_params = service_params = query_params # NOTE: Cannot check file_size in from HEAD in a AWS download link so file_size is just infomative - viewer: ViewerInfo = await validate_requested_viewer( + viewer: ViewerInfo = await _service.validate_requested_viewer( request.app, file_type=file_params.file_type, file_size=file_params.file_size, @@ -298,7 +164,7 @@ async def get_redirection_to_viewer(request: web.Request): elif isinstance(query_params, FileQueryParams): file_params_ = query_params - validate_requested_file( + _service.validate_requested_file( app=request.app, file_type=file_params_.file_type, file_size=file_params_.file_size, @@ -336,7 +202,7 @@ async def get_redirection_to_viewer(request: web.Request): else: # NOTE: if query is done right, this should never happen - raise InvalidRedirectionParams + raise InvalidRedirectionParamsError(query_params=query_params) # Adds auth cookies (login) await ensure_authentication(user, request, response) diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_controller/rest/redirects_exceptions.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_controller/rest/redirects_exceptions.py new file mode 100644 index 000000000000..094a7cebac99 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_controller/rest/redirects_exceptions.py @@ -0,0 +1,167 @@ +import functools +import logging + +from aiohttp import web +from common_library.error_codes import create_error_code +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs +from common_library.user_messages import user_message +from models_library.function_services_catalog._utils import ServiceNotFound +from servicelib.aiohttp import status +from servicelib.aiohttp.typing_extension import Handler + +from ....exception_handling import create_error_context_from_request +from ....utils import compose_support_error_msg +from ....utils_aiohttp import create_redirect_to_page_response +from ..._constants import MSG_UNEXPECTED_DISPATCH_ERROR +from ..._errors import ( + FileToLargeError, + GuestUserNotAllowedError, + GuestUsersLimitError, + IncompatibleServiceError, + InvalidRedirectionParamsError, + ProjectWorkbenchMismatchError, +) + +_logger = logging.getLogger(__name__) + +# +# HELPERS +# + + +def _create_redirect_response_to_error_page( + app: web.Application, message: str, status_code: int +) -> web.HTTPFound: + # NOTE: these are 'error' page params and need to be interpreted by front-end correctly! + return create_redirect_to_page_response( + app, + page="error", + message=message, + status_code=status_code, + ) + + +def _create_error_redirect_with_logging( + request: web.Request, + err: Exception, + *, + message: str, + status_code: int, + tip: str | None = None, +) -> web.HTTPFound: + """Helper to create error redirect with consistent logging""" + error_code = create_error_code(err) + user_error_msg = compose_support_error_msg(msg=message, error_code=error_code) + + _logger.exception( + **create_troubleshooting_log_kwargs( + user_error_msg, + error=err, + error_code=error_code, + error_context=create_error_context_from_request(request), + tip=tip, + ) + ) + + return _create_redirect_response_to_error_page( + request.app, + message=user_error_msg, + status_code=status_code, + ) + + +def _create_simple_error_redirect( + request: web.Request, + public_error: Exception, + *, + status_code: int, +) -> web.HTTPFound: + """Helper to create simple error redirect without logging + + WARNING: note that the `public_error` is exposed as-is in the user-message + """ + user_error_msg = user_message( + f"Unable to open your project: {public_error}", _version=1 + ) + return _create_redirect_response_to_error_page( + request.app, + message=user_error_msg, + status_code=status_code, + ) + + +def handle_errors_with_error_page(handler: Handler): + @functools.wraps(handler) + async def _wrapper(request: web.Request) -> web.StreamResponse: + try: + return await handler(request) + + except (web.HTTPRedirection, web.HTTPSuccessful): + # NOTE: that response is a redirection that is reraised and not returned + raise + + except GuestUserNotAllowedError as err: + raise _create_redirect_response_to_error_page( + request.app, + message=user_message( + "Access is restricted to registered users.

" + "If you don't have an account, please contact support to request one.

", + _version=2, + ), + status_code=status.HTTP_401_UNAUTHORIZED, + ) from err + + except ProjectWorkbenchMismatchError as err: + raise _create_error_redirect_with_logging( + request, + err, + message=MSG_UNEXPECTED_DISPATCH_ERROR, + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + tip="project might be corrupted", + ) from err + + except ( + ServiceNotFound, + FileToLargeError, + IncompatibleServiceError, + GuestUsersLimitError, + ) as err: + raise _create_simple_error_redirect( + request, + err, + status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, + ) from err + + except (InvalidRedirectionParamsError, web.HTTPUnprocessableEntity) as err: + # Validation error in query parameters + raise _create_error_redirect_with_logging( + request, + err, + message=user_message( + "The link you provided is invalid because it doesn't contain valid information related to data or a service. " + "Please check the link and make sure it is correct.", + _version=1, + ), + status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, + tip="The link might be corrupted", + ) from err + + except web.HTTPClientError as err: + raise _create_error_redirect_with_logging( + request, + err, + message="Fatal error while redirecting request", + status_code=err.status_code, + tip="The link might be corrupted", + ) from err + + except Exception as err: + raise _create_error_redirect_with_logging( + request, + err, + message=MSG_UNEXPECTED_DISPATCH_ERROR, + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + tip="Unexpected failure while dispatching study", + ) from err + + return _wrapper diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_controller/rest/redirects_schemas.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_controller/rest/redirects_schemas.py new file mode 100644 index 000000000000..3cbbd931d148 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_controller/rest/redirects_schemas.py @@ -0,0 +1,60 @@ +import urllib.parse +from typing import TypeAlias + +from models_library.services import ServiceKey, ServiceVersion +from pydantic import BaseModel, ConfigDict, field_validator + +from ..._models import FileParams, ServiceParams, ViewerInfo + + +class ServiceQueryParams(ServiceParams): + model_config = ConfigDict(extra="forbid") + + +class FileQueryParams(FileParams): + model_config = ConfigDict(extra="forbid") + + @field_validator("file_type") + @classmethod + def _ensure_extension_upper_and_dotless(cls, v): + # NOTE: see filetype constraint-check + if v and isinstance(v, str): + w = urllib.parse.unquote(v) + return w.upper().lstrip(".") + return v + + +class ServiceAndFileParams(FileQueryParams, ServiceParams): ... + + +class ViewerQueryParams(BaseModel): + file_type: str | None = None + viewer_key: ServiceKey + viewer_version: ServiceVersion + + @staticmethod + def from_viewer(viewer: ViewerInfo) -> "ViewerQueryParams": + # can safely construct w/o validation from a viewer + return ViewerQueryParams.model_construct( + file_type=viewer.filetype, + viewer_key=viewer.key, + viewer_version=viewer.version, + ) + + @field_validator("file_type") + @classmethod + def _ensure_extension_upper_and_dotless(cls, v): + # NOTE: see filetype constraint-check + if v and isinstance(v, str): + w = urllib.parse.unquote(v) + return w.upper().lstrip(".") + return v + + +RedirectionQueryParams: TypeAlias = ( + # NOTE: Extra.forbid in FileQueryParams, ServiceQueryParams avoids bad casting when + # errors in ServiceAndFileParams + ServiceAndFileParams + | FileQueryParams + | ServiceQueryParams +) diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_core.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_core.py deleted file mode 100644 index f0fa876bf0d2..000000000000 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_core.py +++ /dev/null @@ -1,157 +0,0 @@ -import logging -import uuid -from collections import deque -from functools import lru_cache - -import sqlalchemy as sa -from aiohttp import web -from models_library.services import ServiceVersion -from models_library.utils.pydantic_tools_extension import parse_obj_or_none -from pydantic import ByteSize, TypeAdapter, ValidationError -from servicelib.logging_utils import log_decorator -from simcore_postgres_database.models.services_consume_filetypes import ( - services_consume_filetypes, -) -from sqlalchemy.dialects.postgresql import ARRAY, INTEGER - -from ..db.plugin import get_database_engine -from ._errors import FileToLarge, IncompatibleService -from ._models import ViewerInfo -from .settings import get_plugin_settings - -_BASE_UUID = uuid.UUID("ca2144da-eabb-4daf-a1df-a3682050e25f") - - -_logger = logging.getLogger(__name__) - - -@lru_cache -def compose_uuid_from(*values) -> uuid.UUID: - composition: str = "/".join(map(str, values)) - return uuid.uuid5(_BASE_UUID, composition) - - -async def list_viewers_info( - app: web.Application, file_type: str | None = None, *, only_default: bool = False -) -> list[ViewerInfo]: - # - # TODO: These services MUST be shared with EVERYBODY! Setup check on startup and fill - # with !? - # - consumers: deque = deque() - - async with get_database_engine(app).acquire() as conn: - # FIXME: ADD CONDITION: service MUST be shared with EVERYBODY! - query = services_consume_filetypes.select() - if file_type: - query = query.where(services_consume_filetypes.c.filetype == file_type) - - query = query.order_by("filetype", "preference_order") - - if file_type and only_default: - query = query.limit(1) - - _logger.debug("Listing viewers:\n%s", query) - - listed_filetype = set() - async for row in await conn.execute(query): - try: - # TODO: filter in database (see test_list_default_compatible_services ) - if only_default and row["filetype"] in listed_filetype: - continue - listed_filetype.add(row["filetype"]) - consumer = ViewerInfo.create_from_db(row) - consumers.append(consumer) - - except ValidationError as err: - _logger.warning("Review invalid service metadata %s: %s", row, err) - - return list(consumers) - - -async def get_default_viewer( - app: web.Application, - file_type: str, - file_size: int | None = None, -) -> ViewerInfo: - """ - - Raises: - IncompatibleService - FileToLarge - """ - try: - viewers = await list_viewers_info(app, file_type, only_default=True) - viewer = viewers[0] - except IndexError as err: - raise IncompatibleService(file_type=file_type) from err - - if current_size := parse_obj_or_none(ByteSize, file_size): - max_size: ByteSize = get_plugin_settings(app).STUDIES_MAX_FILE_SIZE_ALLOWED - if current_size > max_size: - raise FileToLarge(file_size_in_mb=current_size.to("MiB")) - - return viewer - - -@log_decorator(_logger, level=logging.DEBUG) -async def validate_requested_viewer( - app: web.Application, - file_type: str, - file_size: int | None = None, - service_key: str | None = None, - service_version: str | None = None, -) -> ViewerInfo: - """ - - Raises: - IncompatibleService: When there is no match - - """ - - def _version(column_or_value): - # converts version value string to array[integer] that can be compared - return sa.func.string_to_array(column_or_value, ".").cast(ARRAY(INTEGER)) - - if not service_key and not service_version: - return await get_default_viewer(app, file_type, file_size) - - if service_key and service_version: - async with get_database_engine(app).acquire() as conn: - query = ( - services_consume_filetypes.select() - .where( - (services_consume_filetypes.c.filetype == file_type) - & (services_consume_filetypes.c.service_key == service_key) - & ( - _version(services_consume_filetypes.c.service_version) - <= _version(service_version) - ) - ) - .order_by(_version(services_consume_filetypes.c.service_version).desc()) - .limit(1) - ) - - result = await conn.execute(query) - row = await result.first() - if row: - view = ViewerInfo.create_from_db(row) - view.version = TypeAdapter(ServiceVersion).validate_python( - service_version - ) - return view - - raise IncompatibleService(file_type=file_type) - - -@log_decorator(_logger, level=logging.DEBUG) -def validate_requested_file( - app: web.Application, file_type: str, file_size: int | None = None -): - # NOTE in the future we might want to prevent some types to be pulled - assert file_type # nosec - - if current_size := parse_obj_or_none(ByteSize, file_size): - max_size: ByteSize = get_plugin_settings(app).STUDIES_MAX_FILE_SIZE_ALLOWED - if current_size > max_size: - raise FileToLarge(file_size_in_mb=current_size.to("MiB")) diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_errors.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_errors.py index 4c7c0bbce731..f52b2c1f71af 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_errors.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_errors.py @@ -1,28 +1,50 @@ +from common_library.user_messages import user_message + from ..errors import WebServerBaseError -class StudyDispatcherError(WebServerBaseError, ValueError): - ... +class StudyDispatcherError(WebServerBaseError, ValueError): ... -class IncompatibleService(StudyDispatcherError): - msg_template = "None of the registered services can handle '{file_type}'" +class IncompatibleServiceError(StudyDispatcherError): + msg_template = user_message( + "None of the registered services can handle '{file_type}' files.", _version=1 + ) -class FileToLarge(StudyDispatcherError): - msg_template = "File size {file_size_in_mb} MB is over allowed limit" +class FileToLargeError(StudyDispatcherError): + msg_template = user_message( + "File size {file_size_in_mb} MB exceeds the allowed limit.", _version=1 + ) -class ServiceNotFound(StudyDispatcherError): - msg_template = "Service {service_key}:{service_version} not found" +class ServiceNotFoundError(StudyDispatcherError): + msg_template = user_message( + "Service {service_key}:{service_version} could not be found.", _version=1 + ) -class InvalidRedirectionParams(StudyDispatcherError): - msg_template = ( - "The link you provided is invalid because it doesn't contain any information related to data or a service." - " Please check the link and make sure it is correct." +class InvalidRedirectionParamsError(StudyDispatcherError): + msg_template = user_message( + "The provided link is invalid or incomplete.", _version=1 ) class GuestUsersLimitError(StudyDispatcherError): - msg_template = "Maximum number of guests was reached. Please login with a registered user or try again later" + msg_template = user_message( + "Maximum number of guest users has been reached. Please log in with a registered account or try again later.", + _version=1, + ) + + +class GuestUserNotAllowedError(StudyDispatcherError): + msg_template = user_message( + "Guest users are not allowed to access this resource.", _version=1 + ) + + +class ProjectWorkbenchMismatchError(StudyDispatcherError): + msg_template = user_message( + "Project {project_uuid} appears to be corrupted and cannot be accessed properly.", + _version=1, + ) diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_models.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_models.py index a9a1cc23661f..a98a3eac3052 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_models.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_models.py @@ -1,21 +1,16 @@ from typing import Annotated -from aiopg.sa.result import RowProxy from models_library.services import ServiceKey, ServiceVersion -from pydantic import BaseModel, Field, HttpUrl, PositiveInt, TypeAdapter +from pydantic import BaseModel, Field, HttpUrl, PositiveInt class ServiceInfo(BaseModel): key: ServiceKey version: ServiceVersion - label: Annotated[str, Field(..., description="Display name")] + label: Annotated[str, Field(description="Display name")] - thumbnail: HttpUrl = Field( - default=TypeAdapter(HttpUrl).validate_python( - "https://via.placeholder.com/170x120.png" - ) - ) + thumbnail: HttpUrl = HttpUrl("https://via.placeholder.com/170x120.png") is_guest_allowed: bool = True @@ -38,23 +33,12 @@ class ViewerInfo(ServiceInfo): to visualize a file of that type """ - filetype: str = Field(..., description="Filetype associated to this viewer") - - input_port_key: str = Field( - ..., - description="Name of the connection port, since it is service-dependent", - ) - - @classmethod - def create_from_db(cls, row: RowProxy) -> "ViewerInfo": - return cls( - key=row["service_key"], - version=row["service_version"], - filetype=row["filetype"], - label=row["service_display_name"] or row["service_key"].split("/")[-1], - input_port_key=row["service_input_port"], - is_guest_allowed=row["is_guest_allowed"], - ) + filetype: Annotated[str, Field(description="Filetype associated to this viewer")] + + input_port_key: Annotated[ + str, + Field(description="Name of the connection port, since it is service-dependent"), + ] class ServiceParams(BaseModel): diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_projects.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_projects.py index 8c952fb4d7d4..669ae2233142 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_projects.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_projects.py @@ -25,7 +25,8 @@ from ..projects._projects_service import get_project_for_user from ..projects.exceptions import ProjectInvalidRightsError, ProjectNotFoundError from ..utils import now_str -from ._core import compose_uuid_from +from . import _service +from ._errors import ProjectWorkbenchMismatchError from ._models import FileParams, ServiceInfo, ViewerInfo from ._users import UserInfo @@ -41,11 +42,11 @@ def _generate_nodeids(project_id: ProjectID) -> tuple[NodeID, NodeID]: - file_picker_id = compose_uuid_from( + file_picker_id = _service.compose_uuid_from( project_id, "4c69c0ce-00e4-4bd5-9cf0-59b67b3a9343", ) - viewer_id = compose_uuid_from( + viewer_id = _service.compose_uuid_from( project_id, "fc718e5a-bf07-4abe-b526-d9cafd34830c", ) @@ -264,7 +265,9 @@ async def get_or_create_project_with_file_and_service( # - if user requests several times, the same project is reused # - if user is not a guest, the project will be saved in it's account (desired?) # - project_uid: ProjectID = compose_uuid_from(user.id, viewer.footprint, download_link) + project_uid: ProjectID = _service.compose_uuid_from( + user.id, viewer.footprint, download_link + ) # Ids are linked to produce a footprint (see viewer_project_exists) file_picker_id, service_id = _generate_nodeids(project_uid) @@ -281,16 +284,14 @@ async def get_or_create_project_with_file_and_service( if is_valid: exists = True else: - _logger.error( - "Project %s exists but does not seem to be a viewer generated by this module." - " user: %s, viewer:%s, download_link:%s", - project_uid, - user, - viewer, - download_link, + raise ProjectWorkbenchMismatchError( + project_uuid=project_uid, + user=user, + viewer=viewer, + download_link=download_link, + project_db_workbench_keys=list(project_db.get("workbench", {}).keys()), + expected_keys=[file_picker_id, service_id], ) - # FIXME: CANNOT GUARANTEE!!, DELETE?? ERROR?? and cannot be viewed until verified? - raise web.HTTPInternalServerError except (ProjectNotFoundError, ProjectInvalidRightsError): exists = False @@ -325,7 +326,7 @@ async def get_or_create_project_with_service( product_name: str, product_api_base_url: str, ) -> ProjectNodePair: - project_uid: ProjectID = compose_uuid_from(user.id, service_info.footprint) + project_uid: ProjectID = _service.compose_uuid_from(user.id, service_info.footprint) _, service_id = _generate_nodeids(project_uid) try: @@ -363,7 +364,7 @@ async def get_or_create_project_with_file( product_name: str, product_api_base_url: str, ) -> ProjectNodePair: - project_uid: ProjectID = compose_uuid_from(user.id, file_params.footprint) + project_uid: ProjectID = _service.compose_uuid_from(user.id, file_params.footprint) file_picker_id, _ = _generate_nodeids(project_uid) if not await _project_exists( diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_projects_permalinks.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_projects_permalinks.py index 362bb7509b80..f63fc3cf1c0f 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_projects_permalinks.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_projects_permalinks.py @@ -9,8 +9,9 @@ from typing_extensions import ( # https://docs.pydantic.dev/latest/api/standard_library_types/#typeddict TypedDict, ) +from yarl import URL -from ..db.plugin import get_database_engine +from ..db.plugin import get_database_engine_legacy from ..projects.exceptions import PermalinkNotAllowedError, ProjectNotFoundError from ..projects.projects_permalink_service import ( ProjectPermalink, @@ -33,8 +34,10 @@ class _GroupAccessRightsDict(TypedDict): def create_permalink_for_study( - request: web.Request, + app: web.Application, *, + request_url: URL, + request_headers: dict[str, str], project_uuid: ProjectID | ProjectIDStr, project_type: ProjectType, project_access_rights: dict[_GroupID, _GroupAccessRightsDict], @@ -65,7 +68,7 @@ def create_permalink_for_study( raise PermalinkNotAllowedError(msg) # create - url_for = create_url_for_function(request) + url_for = create_url_for_function(app, request_url, request_headers) permalink = TypeAdapter(HttpUrl).validate_python( url_for(route_name="get_redirection_to_study_page", id=f"{project_uuid}"), ) @@ -77,14 +80,17 @@ def create_permalink_for_study( async def permalink_factory( - request: web.Request, project_uuid: ProjectID + app: web.Application, + request_url: URL, + request_headers: dict[str, str], + project_uuid: ProjectID, ) -> ProjectPermalink: """ - Assumes project_id is up-to-date in the database """ # NOTE: next iterations will mobe this as part of the project repository pattern - engine = get_database_engine(request.app) + engine = get_database_engine_legacy(app) async with engine.acquire() as conn: access_rights_subquery = ( sa.select( @@ -121,7 +127,9 @@ async def permalink_factory( raise ProjectNotFoundError(project_uuid=project_uuid) return create_permalink_for_study( - request, + app, + request_url=request_url, + request_headers=request_headers, project_uuid=row.uuid, project_type=row.type, project_access_rights=row.access_rights, diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_repository.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_repository.py new file mode 100644 index 000000000000..46ee8d110aef --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_repository.py @@ -0,0 +1,127 @@ +import logging +from collections.abc import AsyncIterator + +import sqlalchemy as sa +from models_library.services import ServiceVersion +from pydantic import TypeAdapter, ValidationError +from simcore_postgres_database.models.services_consume_filetypes import ( + services_consume_filetypes, +) +from simcore_postgres_database.utils_repos import pass_or_acquire_connection +from sqlalchemy.dialects.postgresql import ARRAY, INTEGER +from sqlalchemy.engine import Row +from sqlalchemy.ext.asyncio import AsyncConnection + +from ..db.base_repository import BaseRepository +from ._models import ViewerInfo + +_logger = logging.getLogger(__name__) + + +def _version(column_or_value): + """Converts version value string to array[integer] that can be compared.""" + return sa.func.string_to_array(column_or_value, ".").cast(ARRAY(INTEGER)) + + +def create_viewer_info_from_db(row: Row) -> ViewerInfo: + """Create ViewerInfo instance from database row.""" + return ViewerInfo( + key=row.service_key, + version=row.service_version, + filetype=row.filetype, + label=row.service_display_name or row.service_key.split("/")[-1], + input_port_key=row.service_input_port, + is_guest_allowed=row.is_guest_allowed, + ) + + +class StudiesDispatcherRepository(BaseRepository): + + async def list_viewers_info( + self, + connection: AsyncConnection | None = None, + *, + file_type: str | None = None, + only_default: bool = False, + ) -> list[ViewerInfo]: + """List viewer services that can consume the given file type.""" + + async def _iter_viewers() -> AsyncIterator[ViewerInfo]: + query = services_consume_filetypes.select() + if file_type: + query = query.where(services_consume_filetypes.c.filetype == file_type) + + query = query.order_by("filetype", "preference_order") + + if file_type and only_default: + query = query.limit(1) + + _logger.debug("Listing viewers:\n%s", query) + + async with pass_or_acquire_connection(self.engine, connection) as conn: + result = await conn.stream(query) + + listed_filetype = set() + async for row in result: + try: + # TODO: filter in database (see test_list_default_compatible_services ) + if only_default and row.filetype in listed_filetype: + continue + listed_filetype.add(row.filetype) + consumer = create_viewer_info_from_db(row) + yield consumer + + except ValidationError as err: + _logger.warning( + "Review invalid service metadata %s: %s", row, err + ) + + return [viewer async for viewer in _iter_viewers()] + + async def get_default_viewer_for_filetype( + self, + connection: AsyncConnection | None = None, + *, + file_type: str, + ) -> ViewerInfo | None: + """Get the default viewer for a specific file type.""" + viewers = await self.list_viewers_info( + connection=connection, file_type=file_type, only_default=True + ) + return viewers[0] if viewers else None + + async def find_compatible_viewer( + self, + connection: AsyncConnection | None = None, + *, + file_type: str, + service_key: str, + service_version: str, + ) -> ViewerInfo | None: + """Find a compatible viewer service for the given file type, service key, and version.""" + + query = ( + services_consume_filetypes.select() + .where( + (services_consume_filetypes.c.filetype == file_type) + & (services_consume_filetypes.c.service_key == service_key) + & ( + _version(services_consume_filetypes.c.service_version) + <= _version(service_version) + ) + ) + .order_by(_version(services_consume_filetypes.c.service_version).desc()) + .limit(1) + ) + + async with pass_or_acquire_connection(self.engine, connection) as conn: + result = await conn.execute(query) + row = result.one_or_none() + if row: + view = create_viewer_info_from_db(row) + view.version = TypeAdapter(ServiceVersion).validate_python( + service_version + ) + return view + + return None diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_service.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_service.py new file mode 100644 index 000000000000..beb83985b4d7 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_service.py @@ -0,0 +1,99 @@ +import logging +import uuid +from functools import lru_cache + +from aiohttp import web +from models_library.utils.pydantic_tools_extension import parse_obj_or_none +from pydantic import ByteSize +from servicelib.logging_utils import log_decorator + +from ._errors import FileToLargeError, IncompatibleServiceError +from ._models import ViewerInfo +from ._repository import StudiesDispatcherRepository +from .settings import get_plugin_settings + +_BASE_UUID = uuid.UUID("ca2144da-eabb-4daf-a1df-a3682050e25f") + + +_logger = logging.getLogger(__name__) + + +@lru_cache +def compose_uuid_from(*values) -> uuid.UUID: + composition: str = "/".join(map(str, values)) + return uuid.uuid5(_BASE_UUID, composition) + + +async def list_viewers_info( + app: web.Application, file_type: str | None = None, *, only_default: bool = False +) -> list[ViewerInfo]: + repo = StudiesDispatcherRepository.create_from_app(app) + return await repo.list_viewers_info(file_type=file_type, only_default=only_default) + + +async def get_default_viewer( + app: web.Application, + file_type: str, + file_size: int | None = None, +) -> ViewerInfo: + """ + + Raises: + IncompatibleService + FileToLarge + """ + repo = StudiesDispatcherRepository.create_from_app(app) + viewer = await repo.get_default_viewer_for_filetype(file_type=file_type) + + if viewer is None: + raise IncompatibleServiceError(file_type=file_type) + + if current_size := parse_obj_or_none(ByteSize, file_size): + max_size: ByteSize = get_plugin_settings(app).STUDIES_MAX_FILE_SIZE_ALLOWED + if current_size > max_size: + raise FileToLargeError(file_size_in_mb=current_size.to("MiB")) + + return viewer + + +@log_decorator(_logger, level=logging.DEBUG) +async def validate_requested_viewer( + app: web.Application, + file_type: str, + file_size: int | None = None, + service_key: str | None = None, + service_version: str | None = None, +) -> ViewerInfo: + """ + + Raises: + IncompatibleService: When there is no match + + """ + if not service_key and not service_version: + return await get_default_viewer(app, file_type, file_size) + + if service_key and service_version: + repo = StudiesDispatcherRepository.create_from_app(app) + viewer = await repo.find_compatible_viewer( + file_type=file_type, + service_key=service_key, + service_version=service_version, + ) + if viewer: + return viewer + + raise IncompatibleServiceError(file_type=file_type) + + +@log_decorator(_logger, level=logging.DEBUG) +def validate_requested_file( + app: web.Application, file_type: str, file_size: int | None = None +): + # NOTE in the future we might want to prevent some types to be pulled + assert file_type # nosec + + if current_size := parse_obj_or_none(ByteSize, file_size): + max_size: ByteSize = get_plugin_settings(app).STUDIES_MAX_FILE_SIZE_ALLOWED + if current_size > max_size: + raise FileToLargeError(file_size_in_mb=current_size.to("MiB")) diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_studies_access.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_studies_access.py index f9012c67d4f8..77a83efe4071 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_studies_access.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_studies_access.py @@ -21,10 +21,10 @@ from aiohttp import web from aiohttp_session import get_session from common_library.error_codes import create_error_code +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from models_library.projects import ProjectID from servicelib.aiohttp import status from servicelib.aiohttp.typing_extension import Handler -from servicelib.logging_errors import create_troubleshotting_log_kwargs from ..constants import INDEX_RESOURCE_NAME from ..director_v2 import director_v2_service @@ -39,7 +39,7 @@ ProjectNotFoundError, ) from ..projects.models import ProjectDict -from ..security.api import is_anonymous, remember_identity +from ..security import security_web from ..storage.api import copy_data_folders_from_project from ..utils import compose_support_error_msg from ..utils_aiohttp import create_redirect_to_page_response, get_api_base_url @@ -96,7 +96,7 @@ async def _get_published_template_project( ) if project_group_get.read is False: raise ProjectGroupNotFoundError( - reason=f"Project {project_uuid} group 1 not read access" + details=f"Project {project_uuid} group 1 not read access" ) if not prj: @@ -268,7 +268,7 @@ async def wrapper(request: web.Request) -> web.StreamResponse: msg=MSG_UNEXPECTED_DISPATCH_ERROR, error_code=error_code ) _logger.exception( - **create_troubleshotting_log_kwargs( + **create_troubleshooting_log_kwargs( user_error_msg, error=err, error_code=error_code, @@ -300,7 +300,7 @@ async def get_redirection_to_study_page(request: web.Request) -> web.Response: # Checks USER user = None - is_anonymous_user = await is_anonymous(request) + is_anonymous_user = await security_web.is_anonymous(request) if not is_anonymous_user: # NOTE: covers valid cookie with unauthorized user (e.g. expired guest/banned) user = await get_authorized_user(request) @@ -341,7 +341,7 @@ async def get_redirection_to_study_page(request: web.Request) -> web.Response: user_error_msg = MSG_TOO_MANY_GUESTS _logger.exception( - **create_troubleshotting_log_kwargs( + **create_troubleshooting_log_kwargs( user_error_msg, error=exc, error_code=error_code, @@ -373,7 +373,7 @@ async def get_redirection_to_study_page(request: web.Request) -> web.Response: user_error_msg = MSG_UNEXPECTED_DISPATCH_ERROR _logger.exception( - **create_troubleshotting_log_kwargs( + **create_troubleshooting_log_kwargs( user_error_msg, error=exc, error_code=error_code, @@ -405,7 +405,7 @@ async def get_redirection_to_study_page(request: web.Request) -> web.Response: if is_anonymous_user: _logger.debug("Auto login for anonymous user %s", user["name"]) - await remember_identity( + await security_web.remember_identity( request, response, user_email=user["email"], diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_users.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_users.py index 0ca360d5aeb9..c40e213e7b21 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/_users.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/_users.py @@ -17,30 +17,27 @@ import redis.asyncio as aioredis from aiohttp import web +from common_library.users_enums import UserRole, UserStatus from models_library.emails import LowerCaseEmailStr +from models_library.users import UserID from pydantic import BaseModel, TypeAdapter from redis.exceptions import LockNotOwnedError -from servicelib.aiohttp.application_keys import APP_FIRE_AND_FORGET_TASKS_KEY from servicelib.logging_utils import log_decorator from servicelib.utils import fire_and_forget_task from servicelib.utils_secrets import generate_password +from simcore_postgres_database.utils_users import UsersRepo +from ..constants import APP_FIRE_AND_FORGET_TASKS_KEY +from ..db.plugin import get_asyncpg_engine from ..garbage_collector.settings import GUEST_USER_RC_LOCK_FORMAT -from ..groups.api import auto_add_user_to_product_group -from ..login._login_service import ACTIVE, GUEST -from ..login.login_repository_legacy import AsyncpgStorage, get_plugin_storage +from ..groups import api as groups_service +from ..login._login_service import GUEST from ..products import products_web from ..redis import get_redis_lock_manager_client -from ..security.api import ( - check_user_authorized, - encrypt_password, - is_anonymous, - remember_identity, -) -from ..users.api import get_user +from ..security import security_service, security_web +from ..users import users_service from ..users.exceptions import UserNotFoundError -from ._constants import MSG_GUESTS_NOT_ALLOWED -from ._errors import GuestUsersLimitError +from ._errors import GuestUserNotAllowedError, GuestUsersLimitError from .settings import StudiesDispatcherSettings, get_plugin_settings _logger = logging.getLogger(__name__) @@ -60,8 +57,8 @@ async def get_authorized_user(request: web.Request) -> dict: and logged in (valid cookie)? """ with suppress(web.HTTPUnauthorized, UserNotFoundError): - user_id = await check_user_authorized(request) - user: dict = await get_user(request.app, user_id) + user_id = await security_web.check_user_authorized(request) + user: dict = await users_service.get_user(request.app, user_id) return user return {} @@ -100,7 +97,6 @@ async def create_temporary_guest_user(request: web.Request): MaxGuestUsersError: No more guest users allowed """ - db: AsyncpgStorage = get_plugin_storage(request.app) redis_locks_client: aioredis.Redis = get_redis_lock_manager_client(request.app) settings: StudiesDispatcherSettings = get_plugin_settings(app=request.app) product_name = products_web.get_product_name(request) @@ -114,31 +110,33 @@ async def create_temporary_guest_user(request: web.Request): password = generate_password(length=12) expires_at = datetime.utcnow() + settings.STUDIES_GUEST_ACCOUNT_LIFETIME - usr = None + user_id: UserID | None = None + + repo = UsersRepo(get_asyncpg_engine(request.app)) + try: async with redis_locks_client.lock( GUEST_USER_RC_LOCK_FORMAT.format(user_id=random_user_name), timeout=MAX_DELAY_TO_CREATE_USER, ): # NOTE: usr Dict is incomplete, e.g. does not contain primary_gid - usr = await db.create_user( - { - "name": random_user_name, - "email": email, - "password_hash": encrypt_password(password), - "status": ACTIVE, - "role": GUEST, - "expires_at": expires_at, - } + user_row = await repo.new_user( + email=email, + password_hash=security_service.encrypt_password(password), + status=UserStatus.ACTIVE, + role=UserRole.GUEST, + expires_at=expires_at, ) - user = await get_user(request.app, usr["id"]) - await auto_add_user_to_product_group( - request.app, user_id=user["id"], product_name=product_name + user_id = user_row.id + + user = await users_service.get_user(request.app, user_id) + await groups_service.auto_add_user_to_product_group( + request.app, user_id=user_id, product_name=product_name ) # (2) read details above await redis_locks_client.lock( - GUEST_USER_RC_LOCK_FORMAT.format(user_id=user["id"]), + GUEST_USER_RC_LOCK_FORMAT.format(user_id=user_id), timeout=MAX_DELAY_TO_GUEST_FIRST_CONNECTION, ).acquire() @@ -151,14 +149,16 @@ async def create_temporary_guest_user(request: web.Request): # stop creating GUEST users. # NOTE: here we cleanup but if any trace is left it will be deleted by gc - if usr is not None and usr.get("id"): + if user_id: - async def _cleanup(draft_user): + async def _cleanup(): with suppress(Exception): - await db.delete_user(draft_user) + await users_service.delete_user_without_projects( + request.app, user_id=user_id, clean_cache=False + ) fire_and_forget_task( - _cleanup(usr), + _cleanup(), task_suffix_name="cleanup_temporary_guest_user", fire_and_forget_tasks_collection=request.app[ APP_FIRE_AND_FORGET_TASKS_KEY @@ -186,13 +186,13 @@ async def get_or_create_guest_user( allow_anonymous_or_guest_users -- if True, it will create a temporary GUEST account Raises: - web.HTTPUnauthorized if ANONYMOUS users are not allowed (either w/o auth or as GUEST) + GuestUserNotAllowedError if ANONYMOUS users are not allowed (either w/o auth or as GUEST) """ user = None # anonymous = no identity in request - is_anonymous_user = await is_anonymous(request) + is_anonymous_user = await security_web.is_anonymous(request) if not is_anonymous_user: # NOTE: covers valid cookie with unauthorized user (e.g. expired guest/banned) user = await get_authorized_user(request) @@ -204,7 +204,11 @@ async def get_or_create_guest_user( if not allow_anonymous_or_guest_users and (not user or user.get("role") == GUEST): # NOTE: if allow_anonymous_users=False then GUEST users are NOT allowed! - raise web.HTTPUnauthorized(text=MSG_GUESTS_NOT_ALLOWED) + raise GuestUserNotAllowedError( + allow_anonymous_or_guest_users=allow_anonymous_or_guest_users, + user=user, + is_anonymous_user=is_anonymous_user, + ) assert isinstance(user, dict) # nosec @@ -223,7 +227,7 @@ async def ensure_authentication( ): if user.needs_login: _logger.debug("Auto login for anonymous user %s", user.name) - await remember_identity( + await security_web.remember_identity( request, response, user_email=user.email, diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/plugin.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/plugin.py index ef420205b734..3c5d19a4ba05 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/plugin.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/plugin.py @@ -1,13 +1,12 @@ import logging from aiohttp import web -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from ..application_setup import ModuleCategory, app_setup_func from ..login.decorators import login_required from ..products.plugin import setup_products -from . import _rest_handlers +from ._controller import setup_controller from ._projects_permalinks import setup_projects_permalinks -from ._redirects_handlers import get_redirection_to_viewer from ._studies_access import get_redirection_to_study_page from .settings import StudiesDispatcherSettings, get_plugin_settings @@ -32,7 +31,7 @@ def _setup_studies_access(app: web.Application, settings: StudiesDispatcherSetti ) -@app_module_setup( +@app_setup_func( "simcore_service_webserver.studies_dispatcher", ModuleCategory.ADDON, settings_name="WEBSERVER_STUDIES_DISPATCHER", @@ -48,20 +47,7 @@ def setup_studies_dispatcher(app: web.Application) -> bool: _setup_studies_access(app, settings) setup_projects_permalinks(app, settings) - # routes - redirect_handler = get_redirection_to_viewer - if settings.is_login_required(): - redirect_handler = login_required(get_redirection_to_viewer) - - _logger.info( - "'%s' config explicitly disables anonymous users from this feature", - __name__, - ) - - app.router.add_routes( - [web.get("/view", redirect_handler, name="get_redirection_to_viewer")] - ) - - app.router.add_routes(_rest_handlers.routes) + # rest controllers + setup_controller(app, settings) return True diff --git a/services/web/server/src/simcore_service_webserver/studies_dispatcher/settings.py b/services/web/server/src/simcore_service_webserver/studies_dispatcher/settings.py index 4d61119d0b72..d90c254d6faa 100644 --- a/services/web/server/src/simcore_service_webserver/studies_dispatcher/settings.py +++ b/services/web/server/src/simcore_service_webserver/studies_dispatcher/settings.py @@ -1,60 +1,64 @@ from datetime import timedelta +from typing import Annotated, Final from aiohttp import web from common_library.pydantic_validators import validate_numeric_string_as_timedelta -from pydantic import ByteSize, HttpUrl, TypeAdapter, field_validator -from pydantic.fields import Field +from pydantic import ByteSize, Field, HttpUrl, TypeAdapter, field_validator from pydantic_settings import SettingsConfigDict -from servicelib.aiohttp.application_keys import APP_SETTINGS_KEY from settings_library.base import BaseCustomSettings +from ..constants import APP_SETTINGS_KEY + +_DEFAULT_THUMBNAIL: Final[HttpUrl] = TypeAdapter(HttpUrl).validate_python( + "https://via.placeholder.com/170x120.png" +) + class StudiesDispatcherSettings(BaseCustomSettings): - STUDIES_ACCESS_ANONYMOUS_ALLOWED: bool = Field( - default=False, - description="If enabled, the study links are accessible to anonymous users", - ) + STUDIES_ACCESS_ANONYMOUS_ALLOWED: Annotated[ + bool, + Field( + description="If enabled, the study links are accessible to anonymous users" + ), + ] = False - STUDIES_GUEST_ACCOUNT_LIFETIME: timedelta = Field( - default=timedelta(minutes=15), - description="Sets lifetime of a guest user until it is logged out " - " and removed by the GC", - ) + STUDIES_GUEST_ACCOUNT_LIFETIME: Annotated[ + timedelta, + Field( + description="Sets lifetime of a guest user until it is logged out and removed by the GC" + ), + ] = timedelta(minutes=15) - STUDIES_DEFAULT_SERVICE_THUMBNAIL: HttpUrl = Field( - default=TypeAdapter(HttpUrl).validate_python( - "https://via.placeholder.com/170x120.png" + STUDIES_DEFAULT_SERVICE_THUMBNAIL: Annotated[ + HttpUrl, + Field( + description="Default thumbnail for services or dispatch project with a service" ), - description="Default thumbnail for services or dispatch project with a service", - ) + ] = _DEFAULT_THUMBNAIL - STUDIES_DEFAULT_FILE_THUMBNAIL: HttpUrl = Field( - default=TypeAdapter(HttpUrl).validate_python( - "https://via.placeholder.com/170x120.png" + STUDIES_DEFAULT_FILE_THUMBNAIL: Annotated[ + HttpUrl, + Field( + description="Default thumbnail for dispatch projects with only data (i.e. file-picker)" ), - description="Default thumbnail for dispatch projects with only data (i.e. file-picker)", - ) + ] = _DEFAULT_THUMBNAIL - STUDIES_MAX_FILE_SIZE_ALLOWED: ByteSize = Field( - default=TypeAdapter(ByteSize).validate_python("50Mib"), - description="Limits the size of the files that can be dispatched" - "Note that the accuracy of the file size is not guaranteed and this limit might be surpassed", - ) + STUDIES_MAX_FILE_SIZE_ALLOWED: Annotated[ + ByteSize, + Field( + description="Limits the size of the files that can be dispatched. " + "Note that the accuracy of the file size is not guaranteed and this limit might be surpassed" + ), + ] = TypeAdapter(ByteSize).validate_python("50Mib") @field_validator("STUDIES_GUEST_ACCOUNT_LIFETIME") @classmethod def _is_positive_lifetime(cls, v): if v and isinstance(v, timedelta) and v.total_seconds() <= 0: - msg = f"Must be a positive number, got {v.total_seconds()=}" + msg = f"Must be a positive lifetime, got {v.total_seconds()=}" raise ValueError(msg) return v - def is_login_required(self): - """Used just to allow protecting the dispatcher redirect entrypoint programatically - Normally dispatcher entrypoints are openened - """ - return not self.STUDIES_ACCESS_ANONYMOUS_ALLOWED - _validate_studies_guest_account_lifetime = validate_numeric_string_as_timedelta( "STUDIES_GUEST_ACCOUNT_LIFETIME" ) @@ -68,6 +72,12 @@ def is_login_required(self): } ) + def is_login_required(self): + """Used just to allow protecting the dispatcher redirect entrypoint programatically + Normally dispatcher entrypoints are openened + """ + return not self.STUDIES_ACCESS_ANONYMOUS_ALLOWED + def get_plugin_settings(app: web.Application) -> StudiesDispatcherSettings: settings = app[APP_SETTINGS_KEY].WEBSERVER_STUDIES_DISPATCHER diff --git a/services/web/server/src/simcore_service_webserver/tags/_rest.py b/services/web/server/src/simcore_service_webserver/tags/_rest.py index ea39edd6c2ad..21dc5e97c6e1 100644 --- a/services/web/server/src/simcore_service_webserver/tags/_rest.py +++ b/services/web/server/src/simcore_service_webserver/tags/_rest.py @@ -1,4 +1,5 @@ from aiohttp import web +from common_library.user_messages import user_message from servicelib.aiohttp import status from servicelib.aiohttp.requests_validation import ( parse_request_body_as, @@ -38,23 +39,35 @@ _TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = { TagNotFoundError: HttpErrorInfo( status.HTTP_404_NOT_FOUND, - "Tag {tag_id} not found: either no access or does not exists", + user_message( + "The tag '{tag_id}' could not be found or you don't have access to it.", + _version=1, + ), ), TagOperationNotAllowedError: HttpErrorInfo( status.HTTP_403_FORBIDDEN, - "Could not {operation} tag {tag_id}. Not found or insuficient access.", + user_message( + "Unable to {operation} tag '{tag_id}'. The tag was not found or you don't have sufficient access.", + _version=1, + ), ), ShareTagWithEveryoneNotAllowedError: HttpErrorInfo( status.HTTP_403_FORBIDDEN, - "Sharing with everyone is not permitted.", + user_message("Sharing with everyone is not allowed.", _version=1), ), ShareTagWithProductGroupNotAllowedError: HttpErrorInfo( status.HTTP_403_FORBIDDEN, - "Sharing with all users is only permitted to admin users (e.g. testers, POs, ...).", + user_message( + "Sharing with all users is only allowed for admin users (e.g. testers, POs, ...).", + _version=1, + ), ), InsufficientTagShareAccessError: HttpErrorInfo( status.HTTP_403_FORBIDDEN, - "Insufficient access rightst to share (or unshare) tag {tag_id}.", + user_message( + "You don't have sufficient access rights to share (or unshare) tag '{tag_id}'.", + _version=1, + ), ), } diff --git a/services/web/server/src/simcore_service_webserver/tags/_service.py b/services/web/server/src/simcore_service_webserver/tags/_service.py index 0c28c2a462fe..b12dcc313540 100644 --- a/services/web/server/src/simcore_service_webserver/tags/_service.py +++ b/services/web/server/src/simcore_service_webserver/tags/_service.py @@ -1,5 +1,4 @@ -""" Implements `tags` plugin **service layer** -""" +"""Implements `tags` plugin **service layer**""" from aiohttp import web from common_library.groups_dicts import AccessRightsDict @@ -12,7 +11,7 @@ from sqlalchemy.ext.asyncio import AsyncEngine from ..products import products_service -from ..users.api import get_user_role +from ..users import users_service from .errors import ( InsufficientTagShareAccessError, ShareTagWithEveryoneNotAllowedError, @@ -94,7 +93,9 @@ async def _validate_tag_sharing_permissions( ) if _is_product_group(app, group_id=group_id): - user_role: UserRole = await get_user_role(app, user_id=caller_user_id) + user_role: UserRole = await users_service.get_user_role( + app, user_id=caller_user_id + ) if user_role < UserRole.TESTER: raise ShareTagWithProductGroupNotAllowedError( user_id=caller_user_id, diff --git a/services/web/server/src/simcore_service_webserver/tags/plugin.py b/services/web/server/src/simcore_service_webserver/tags/plugin.py index 650f8ba32976..c4b984add2eb 100644 --- a/services/web/server/src/simcore_service_webserver/tags/plugin.py +++ b/services/web/server/src/simcore_service_webserver/tags/plugin.py @@ -1,18 +1,17 @@ -""" tags management subsystem +"""tags management subsystem""" -""" import logging from aiohttp import web -from servicelib.aiohttp.application_keys import APP_SETTINGS_KEY -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from ..application_setup import ModuleCategory, app_setup_func +from ..constants import APP_SETTINGS_KEY from . import _rest _logger = logging.getLogger(__name__) -@app_module_setup( +@app_setup_func( __name__, ModuleCategory.ADDON, settings_name="WEBSERVER_TAGS", diff --git a/services/web/server/src/simcore_service_webserver/tags/schemas.py b/services/web/server/src/simcore_service_webserver/tags/schemas.py index 5f83e3032ecd..7ff06e1ae358 100644 --- a/services/web/server/src/simcore_service_webserver/tags/schemas.py +++ b/services/web/server/src/simcore_service_webserver/tags/schemas.py @@ -7,7 +7,7 @@ from models_library.rest_base import RequestParameters, StrictRequestParameters from models_library.users import UserID from pydantic import Field, PositiveInt, StringConstraints -from servicelib.request_keys import RQT_USERID_KEY +from servicelib.aiohttp.request_keys import RQT_USERID_KEY from simcore_postgres_database.utils_tags import TagAccessRightsDict, TagDict diff --git a/services/web/server/src/simcore_service_webserver/tasks/_exception_handlers.py b/services/web/server/src/simcore_service_webserver/tasks/_exception_handlers.py index b0b9d8754c51..81b9806c57a9 100644 --- a/services/web/server/src/simcore_service_webserver/tasks/_exception_handlers.py +++ b/services/web/server/src/simcore_service_webserver/tasks/_exception_handlers.py @@ -1,3 +1,4 @@ +from common_library.user_messages import user_message from models_library.api_schemas_rpc_async_jobs.exceptions import ( JobAbortedError, JobError, @@ -22,35 +23,48 @@ _TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = { InvalidFileIdentifierError: HttpErrorInfo( status.HTTP_404_NOT_FOUND, - "Could not find file {file_id}", + user_message( + "The file with identifier {file_id} could not be found", _version=2 + ), ), AccessRightError: HttpErrorInfo( status.HTTP_403_FORBIDDEN, - "Accessright error: user {user_id} does not have access to file {file_id} with location {location_id}", + user_message( + "Permission denied: You (user {user_id}) don't have the necessary rights to access file {file_id} in location {location_id}", + _version=2, + ), ), JobAbortedError: HttpErrorInfo( status.HTTP_410_GONE, - "Task {job_id} is aborted", + user_message("Task {job_id} was terminated before completion", _version=2), ), JobError: HttpErrorInfo( status.HTTP_500_INTERNAL_SERVER_ERROR, - "Task '{job_id}' failed with exception type '{exc_type}' and message: {exc_msg}", + user_message( + "Task '{job_id}' encountered an error: {exc_msg} (error type: '{exc_type}')", + _version=2, + ), ), JobNotDoneError: HttpErrorInfo( status.HTTP_404_NOT_FOUND, - "task {job_id} is not done yet", + user_message( + "Task {job_id} is still running and has not completed yet", _version=2 + ), ), JobMissingError: HttpErrorInfo( status.HTTP_404_NOT_FOUND, - "No task with id: {job_id}", + user_message("The requested task with ID {job_id} does not exist", _version=2), ), JobSchedulerError: HttpErrorInfo( status.HTTP_500_INTERNAL_SERVER_ERROR, - "Encountered an error with the task scheduling system", + user_message( + "The task scheduling system encountered an error. Please try again later", + _version=2, + ), ), JobStatusError: HttpErrorInfo( status.HTTP_500_INTERNAL_SERVER_ERROR, - "Encountered an error while getting the status of task {job_id}", + user_message("Unable to get the current status for task {job_id}", _version=2), ), } diff --git a/services/web/server/src/simcore_service_webserver/tasks/_rest.py b/services/web/server/src/simcore_service_webserver/tasks/_rest.py index 039b746e4892..3ab13730940e 100644 --- a/services/web/server/src/simcore_service_webserver/tasks/_rest.py +++ b/services/web/server/src/simcore_service_webserver/tasks/_rest.py @@ -16,25 +16,25 @@ ) from models_library.api_schemas_rpc_async_jobs.async_jobs import ( AsyncJobId, - AsyncJobNameData, ) from models_library.api_schemas_storage import STORAGE_RPC_NAMESPACE from pydantic import BaseModel from servicelib.aiohttp import status from servicelib.aiohttp.long_running_tasks.server import ( - get_task_context, - get_tasks_manager, + get_long_running_manager, ) from servicelib.aiohttp.requests_validation import ( parse_request_path_parameters_as, ) from servicelib.aiohttp.rest_responses import create_data_response +from servicelib.celery.models import OwnerMetadata +from servicelib.long_running_tasks import lrt_api from servicelib.rabbitmq.rpc_interfaces.async_jobs import async_jobs from .._meta import API_VTAG from ..login.decorators import login_required -from ..long_running_tasks import webserver_request_context_decorator -from ..models import RequestContext +from ..long_running_tasks.plugin import webserver_request_context_decorator +from ..models import AuthenticatedRequestContext, WebServerOwnerMetadata from ..rabbitmq import get_rabbitmq_rpc_client from ..security.decorators import permission_required from ._exception_handlers import handle_export_data_exceptions @@ -56,21 +56,26 @@ @handle_export_data_exceptions @webserver_request_context_decorator async def get_async_jobs(request: web.Request) -> web.Response: - inprocess_task_manager = get_tasks_manager(request.app) - inprocess_task_context = get_task_context(request) - inprocess_tracked_tasks = inprocess_task_manager.list_tasks(inprocess_task_context) + inprocess_long_running_manager = get_long_running_manager(request.app) + inprocess_tracked_tasks = await lrt_api.list_tasks( + inprocess_long_running_manager.rpc_client, + inprocess_long_running_manager.lrt_namespace, + inprocess_long_running_manager.get_task_context(request), + ) - _req_ctx = RequestContext.model_validate(request) + _req_ctx = AuthenticatedRequestContext.model_validate(request) rabbitmq_rpc_client = get_rabbitmq_rpc_client(request.app) user_async_jobs = await async_jobs.list_jobs( rabbitmq_rpc_client=rabbitmq_rpc_client, rpc_namespace=STORAGE_RPC_NAMESPACE, - job_id_data=AsyncJobNameData( - user_id=_req_ctx.user_id, product_name=_req_ctx.product_name + owner_metadata=OwnerMetadata.model_validate( + WebServerOwnerMetadata( + user_id=_req_ctx.user_id, + product_name=_req_ctx.product_name, + ).model_dump() ), - filter_="", ) return create_data_response( [ @@ -86,9 +91,8 @@ async def get_async_jobs(request: web.Request) -> web.Response: + [ TaskGet( task_id=f"{task.task_id}", - task_name=task.task_name, status_href=f"{request.app.router['get_task_status'].url_for(task_id=task.task_id)}", - abort_href=f"{request.app.router['cancel_and_delete_task'].url_for(task_id=task.task_id)}", + abort_href=f"{request.app.router['remove_task'].url_for(task_id=task.task_id)}", result_href=f"{request.app.router['get_task_result'].url_for(task_id=task.task_id)}", ) for task in inprocess_tracked_tasks @@ -109,7 +113,7 @@ class _StorageAsyncJobId(BaseModel): @handle_export_data_exceptions async def get_async_job_status(request: web.Request) -> web.Response: - _req_ctx = RequestContext.model_validate(request) + _req_ctx = AuthenticatedRequestContext.model_validate(request) rabbitmq_rpc_client = get_rabbitmq_rpc_client(request.app) async_job_get = parse_request_path_parameters_as(_StorageAsyncJobId, request) @@ -117,8 +121,11 @@ async def get_async_job_status(request: web.Request) -> web.Response: rabbitmq_rpc_client=rabbitmq_rpc_client, rpc_namespace=STORAGE_RPC_NAMESPACE, job_id=async_job_get.task_id, - job_id_data=AsyncJobNameData( - user_id=_req_ctx.user_id, product_name=_req_ctx.product_name + owner_metadata=OwnerMetadata.model_validate( + WebServerOwnerMetadata( + user_id=_req_ctx.user_id, + product_name=_req_ctx.product_name, + ).model_dump() ), ) _task_id = f"{async_job_rpc_status.job_id}" @@ -143,7 +150,7 @@ async def get_async_job_status(request: web.Request) -> web.Response: @handle_export_data_exceptions async def cancel_async_job(request: web.Request) -> web.Response: - _req_ctx = RequestContext.model_validate(request) + _req_ctx = AuthenticatedRequestContext.model_validate(request) rabbitmq_rpc_client = get_rabbitmq_rpc_client(request.app) async_job_get = parse_request_path_parameters_as(_StorageAsyncJobId, request) @@ -152,8 +159,11 @@ async def cancel_async_job(request: web.Request) -> web.Response: rabbitmq_rpc_client=rabbitmq_rpc_client, rpc_namespace=STORAGE_RPC_NAMESPACE, job_id=async_job_get.task_id, - job_id_data=AsyncJobNameData( - user_id=_req_ctx.user_id, product_name=_req_ctx.product_name + owner_metadata=OwnerMetadata.model_validate( + WebServerOwnerMetadata( + user_id=_req_ctx.user_id, + product_name=_req_ctx.product_name, + ).model_dump() ), ) @@ -171,7 +181,7 @@ async def get_async_job_result(request: web.Request) -> web.Response: class _PathParams(BaseModel): task_id: UUID - _req_ctx = RequestContext.model_validate(request) + _req_ctx = AuthenticatedRequestContext.model_validate(request) rabbitmq_rpc_client = get_rabbitmq_rpc_client(request.app) async_job_get = parse_request_path_parameters_as(_PathParams, request) @@ -179,8 +189,11 @@ class _PathParams(BaseModel): rabbitmq_rpc_client=rabbitmq_rpc_client, rpc_namespace=STORAGE_RPC_NAMESPACE, job_id=async_job_get.task_id, - job_id_data=AsyncJobNameData( - user_id=_req_ctx.user_id, product_name=_req_ctx.product_name + owner_metadata=OwnerMetadata.model_validate( + WebServerOwnerMetadata( + user_id=_req_ctx.user_id, + product_name=_req_ctx.product_name, + ).model_dump() ), ) diff --git a/services/web/server/src/simcore_service_webserver/templates/common/request_account.jinja2 b/services/web/server/src/simcore_service_webserver/templates/common/request_account.jinja2 index e273a387be60..bc85afb0559a 100644 --- a/services/web/server/src/simcore_service_webserver/templates/common/request_account.jinja2 +++ b/services/web/server/src/simcore_service_webserver/templates/common/request_account.jinja2 @@ -11,7 +11,11 @@ Dear {{ name }},

- We have received the following request form for an account in {{ product.display_name }} from {{ host }} + We have received the following request form for an account in : +

    +
  1. Product: {{ product.display_name }}
  2. +
  3. Host: {{ host }}
  4. +

diff --git a/services/web/server/src/simcore_service_webserver/templates/common/request_support.jinja2 b/services/web/server/src/simcore_service_webserver/templates/common/request_support.jinja2
new file mode 100644
index 000000000000..fe2ebaa25238
--- /dev/null
+++ b/services/web/server/src/simcore_service_webserver/templates/common/request_support.jinja2
@@ -0,0 +1,36 @@
+Request for Support on {{ host }}
+
+

+ Dear Support Team, +

+ +

+ We have received a support request from {{ first_name }} {{ last_name }} ({{ user_email }}) on {{ host }}. +

+ +

+ All communication should take place in the Platform Support Center at the following link: + {{ conversation_url }} +

+ +

+ First message content: {{ message_content }} +

+ +

+ Extra Context: +

+ +
+{{ dumps(extra_context) }}
+
diff --git a/services/web/server/src/simcore_service_webserver/tracing.py b/services/web/server/src/simcore_service_webserver/tracing.py index ffbb8f404a03..417830208c26 100644 --- a/services/web/server/src/simcore_service_webserver/tracing.py +++ b/services/web/server/src/simcore_service_webserver/tracing.py @@ -1,11 +1,11 @@ import logging from aiohttp import web -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup from servicelib.aiohttp.tracing import get_tracing_lifespan from settings_library.tracing import TracingSettings -from ._meta import APP_NAME +from .application_settings import get_application_settings +from .application_setup import ModuleCategory, app_setup_func from .constants import APP_SETTINGS_KEY log = logging.getLogger(__name__) @@ -18,15 +18,28 @@ def get_plugin_settings(app: web.Application) -> TracingSettings: return settings -@app_module_setup( +@app_setup_func( __name__, ModuleCategory.ADDON, settings_name="WEBSERVER_TRACING", logger=log ) def setup_app_tracing(app: web.Application): + """ + Sets up OpenTelemetry tracing for the application. + + NOTE: uses app[APP_SETTINGS_KEY].APP_NAME to set the service name advertised to the + tracing backend. This is used to identify the service in the tracing UI. + Note that this defaults in _meta.APP_NAME to "simcore-service-webserver" if not set otherwise + in setup_settings(app, app_name="...") in the application factory. + + """ + + app_settings = get_application_settings(app) tracing_settings: TracingSettings = get_plugin_settings(app) + app.cleanup_ctx.append( get_tracing_lifespan( - app, + app=app, tracing_settings=tracing_settings, - service_name=APP_NAME, + service_name=app_settings.APP_NAME, + add_response_trace_id_header=True, ) ) diff --git a/services/web/server/src/simcore_service_webserver/trash/_rest.py b/services/web/server/src/simcore_service_webserver/trash/_rest.py index d69719840869..f55faca75d31 100644 --- a/services/web/server/src/simcore_service_webserver/trash/_rest.py +++ b/services/web/server/src/simcore_service_webserver/trash/_rest.py @@ -2,11 +2,12 @@ import logging from aiohttp import web +from common_library.user_messages import user_message from servicelib.aiohttp import status -from servicelib.aiohttp.application_keys import APP_FIRE_AND_FORGET_TASKS_KEY from servicelib.utils import fire_and_forget_task from .._meta import API_VTAG as VTAG +from ..constants import APP_FIRE_AND_FORGET_TASKS_KEY from ..exception_handling import ( ExceptionToHttpErrorMap, HttpErrorInfo, @@ -25,11 +26,17 @@ _TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = { ProjectRunningConflictError: HttpErrorInfo( status.HTTP_409_CONFLICT, - "Current study is in use and cannot be trashed [project_id={project_uuid}]. Please stop all services first and try again", + user_message( + "The project is currently in use and cannot be moved to trash. Please stop all running services first and try again.", + _version=1, + ), ), ProjectStoppingError: HttpErrorInfo( status.HTTP_503_SERVICE_UNAVAILABLE, - "Something went wrong while stopping services before trashing. Aborting trash.", + user_message( + "An error occurred while stopping services before moving to trash. The operation has been cancelled.", + _version=1, + ), ), } diff --git a/services/web/server/src/simcore_service_webserver/trash/_service.py b/services/web/server/src/simcore_service_webserver/trash/_service.py index 502d4d2c1373..055598866790 100644 --- a/services/web/server/src/simcore_service_webserver/trash/_service.py +++ b/services/web/server/src/simcore_service_webserver/trash/_service.py @@ -5,9 +5,9 @@ import arrow from aiohttp import web +from common_library.logging.logging_errors import create_troubleshooting_log_kwargs from models_library.products import ProductName from models_library.users import UserID -from servicelib.logging_errors import create_troubleshotting_log_kwargs from servicelib.logging_utils import log_context from ..folders import folders_trash_service @@ -42,7 +42,6 @@ async def _empty_explicitly_trashed_projects( ): for project_id in trashed_projects_ids: try: - await projects_trash_service.delete_explicitly_trashed_project( app, user_id=user_id, @@ -51,7 +50,7 @@ async def _empty_explicitly_trashed_projects( except Exception as exc: # pylint: disable=broad-exception-caught _logger.warning( - **create_troubleshotting_log_kwargs( + **create_troubleshooting_log_kwargs( "Error deleting a trashed project while emptying trash.", error=exc, error_context={ @@ -88,7 +87,7 @@ async def _empty_explicitly_trashed_folders_and_content( except Exception as exc: # pylint: disable=broad-exception-caught _logger.warning( - **create_troubleshotting_log_kwargs( + **create_troubleshooting_log_kwargs( "Error deleting a trashed folders (and content) while emptying trash.", error=exc, error_context={ @@ -125,7 +124,7 @@ async def _empty_explicitely_trashed_workspaces_and_content( except Exception as exc: # pylint: disable=broad-exception-caught _logger.warning( - **create_troubleshotting_log_kwargs( + **create_troubleshooting_log_kwargs( "Error deleting a trashed workspace (and content) while emptying trash.", error=exc, error_context={ @@ -143,7 +142,7 @@ async def safe_empty_trash( *, product_name: ProductName, user_id: UserID, - on_explicitly_trashed_projects_deleted: asyncio.Event | None = None + on_explicitly_trashed_projects_deleted: asyncio.Event | None = None, ): # Delete explicitly trashed projects & notify await _empty_explicitly_trashed_projects(app, product_name, user_id) @@ -171,7 +170,6 @@ async def safe_delete_expired_trash_as_admin(app: web.Application) -> None: retention, delete_until, ): - ctx = { "delete_until": delete_until, "retention": retention, @@ -188,9 +186,9 @@ async def safe_delete_expired_trash_as_admin(app: web.Application) -> None: _logger.info("Deleted %d trashed workspaces", len(deleted_workspace_ids)) except Exception as exc: # pylint: disable=broad-exception-caught - _logger.warning( - **create_troubleshotting_log_kwargs( - "Error batch deleting expired workspaces as admin.", + _logger.exception( + **create_troubleshooting_log_kwargs( + "Unexpected error while batch deleting expired workspaces as admin:", error=exc, error_context=ctx, ) @@ -207,9 +205,9 @@ async def safe_delete_expired_trash_as_admin(app: web.Application) -> None: except Exception as exc: # pylint: disable=broad-exception-caught ctx_with_product = {**ctx, "product_name": product_name} - _logger.warning( - **create_troubleshotting_log_kwargs( - "Error batch deleting expired trashed folders as admin.", + _logger.exception( + **create_troubleshooting_log_kwargs( + "Unexpected error while batch deleting expired trashed folders as admin:", error=exc, error_context=ctx_with_product, ) @@ -227,9 +225,9 @@ async def safe_delete_expired_trash_as_admin(app: web.Application) -> None: _logger.info("Deleted %d trashed projects", len(deleted_project_ids)) except Exception as exc: # pylint: disable=broad-exception-caught - _logger.warning( - **create_troubleshotting_log_kwargs( - "Error batch deleting expired projects as admin.", + _logger.exception( + **create_troubleshooting_log_kwargs( + "Unexpected error while batch deleting expired projects as admin:", error=exc, error_context=ctx, ) diff --git a/services/web/server/src/simcore_service_webserver/trash/plugin.py b/services/web/server/src/simcore_service_webserver/trash/plugin.py index 977a1c748843..8796eab49537 100644 --- a/services/web/server/src/simcore_service_webserver/trash/plugin.py +++ b/services/web/server/src/simcore_service_webserver/trash/plugin.py @@ -1,13 +1,14 @@ -""" projects management subsystem +"""projects management subsystem - A project is a document defining a osparc study - It contains metadata about the study (e.g. name, description, owner, etc) and a workbench section that describes the study pipeline +A project is a document defining a osparc study +It contains metadata about the study (e.g. name, description, owner, etc) and a workbench section that describes the study pipeline """ + import logging from aiohttp import web -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from ..application_setup import ModuleCategory, app_setup_func from ..constants import APP_SETTINGS_KEY from ..folders.plugin import setup_folders from ..projects.plugin import setup_projects @@ -17,7 +18,7 @@ _logger = logging.getLogger(__name__) -@app_module_setup( +@app_setup_func( __name__, ModuleCategory.ADDON, settings_name="WEBSERVER_TRASH", diff --git a/services/web/server/src/simcore_service_webserver/user_notifications/__init__.py b/services/web/server/src/simcore_service_webserver/user_notifications/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/services/web/server/src/simcore_service_webserver/user_notifications/_controller/__init__.py b/services/web/server/src/simcore_service_webserver/user_notifications/_controller/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/services/web/server/src/simcore_service_webserver/user_notifications/_controller/rest/__init__.py b/services/web/server/src/simcore_service_webserver/user_notifications/_controller/rest/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/services/web/server/src/simcore_service_webserver/user_notifications/_controller/rest/user_notification_rest.py b/services/web/server/src/simcore_service_webserver/user_notifications/_controller/rest/user_notification_rest.py new file mode 100644 index 000000000000..679179857025 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/user_notifications/_controller/rest/user_notification_rest.py @@ -0,0 +1,86 @@ +import logging + +from aiohttp import web +from models_library.api_schemas_webserver.users import MyPermissionGet +from models_library.users import UserPermission +from pydantic import BaseModel +from servicelib.aiohttp import status +from servicelib.aiohttp.requests_validation import ( + parse_request_body_as, + parse_request_path_parameters_as, +) +from servicelib.tracing import with_profiled_span + +from ...._meta import API_VTAG +from ....login.decorators import login_required +from ....products import products_web +from ....security.decorators import permission_required +from ....users import _users_service +from ....users.schemas import UsersRequestContext +from ....utils_aiohttp import envelope_json_response +from ... import _service +from ..._models import UserNotificationCreate, UserNotificationPatch + +_logger = logging.getLogger(__name__) + + +class NotificationPathParams(BaseModel): + notification_id: str + + +routes = web.RouteTableDef() + + +@routes.get(f"/{API_VTAG}/me/notifications", name="list_user_notifications") +@login_required +@permission_required("user.notifications.read") +async def list_user_notifications(request: web.Request) -> web.Response: + req_ctx = UsersRequestContext.model_validate(request) + product_name = products_web.get_product_name(request) + notifications = await _service.list_user_notifications( + request.app, req_ctx.user_id, product_name + ) + return envelope_json_response(notifications) + + +@routes.post(f"/{API_VTAG}/me/notifications", name="create_user_notification") +@login_required +@permission_required("user.notifications.write") +async def create_user_notification(request: web.Request) -> web.Response: + body = await parse_request_body_as(UserNotificationCreate, request) + await _service.create_user_notification(request.app, body) + return web.json_response(status=status.HTTP_204_NO_CONTENT) + + +@routes.patch( + f"/{API_VTAG}/me/notifications/{{notification_id}}", + name="mark_notification_as_read", +) +@login_required +@permission_required("user.notifications.update") +async def mark_notification_as_read(request: web.Request) -> web.Response: + req_ctx = UsersRequestContext.model_validate(request) + req_path_params = parse_request_path_parameters_as(NotificationPathParams, request) + body = await parse_request_body_as(UserNotificationPatch, request) + + await _service.update_user_notification( + request.app, + req_ctx.user_id, + req_path_params.notification_id, + body.model_dump(exclude_unset=True), + ) + return web.json_response(status=status.HTTP_204_NO_CONTENT) + + +@routes.get(f"/{API_VTAG}/me/permissions", name="list_user_permissions") +@login_required +@with_profiled_span +@permission_required("user.permissions.read") +async def list_user_permissions(request: web.Request) -> web.Response: + req_ctx = UsersRequestContext.model_validate(request) + list_permissions: list[UserPermission] = await _users_service.list_user_permissions( + request.app, user_id=req_ctx.user_id, product_name=req_ctx.product_name + ) + return envelope_json_response( + [MyPermissionGet.from_domain_model(p) for p in list_permissions] + ) diff --git a/services/web/server/src/simcore_service_webserver/user_notifications/_models.py b/services/web/server/src/simcore_service_webserver/user_notifications/_models.py new file mode 100644 index 000000000000..633ce62c4c9a --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/user_notifications/_models.py @@ -0,0 +1,149 @@ +from datetime import datetime +from enum import auto +from typing import Final, Literal +from uuid import uuid4 + +from models_library.products import ProductName +from models_library.users import UserID +from models_library.utils.enums import StrAutoEnum +from pydantic import BaseModel, ConfigDict, NonNegativeInt, field_validator +from pydantic.config import JsonDict + +MAX_NOTIFICATIONS_FOR_USER_TO_SHOW: Final[NonNegativeInt] = 10 +MAX_NOTIFICATIONS_FOR_USER_TO_KEEP: Final[NonNegativeInt] = 100 + + +def get_notification_key(user_id: UserID) -> str: + return f"user_id={user_id}" + + +class NotificationCategory(StrAutoEnum): + NEW_ORGANIZATION = auto() + STUDY_SHARED = auto() + TEMPLATE_SHARED = auto() + CONVERSATION_NOTIFICATION = auto() + ANNOTATION_NOTE = auto() + WALLET_SHARED = auto() + + +class BaseUserNotification(BaseModel): + user_id: UserID + category: NotificationCategory + actionable_path: str + title: str + text: str + date: datetime + product: Literal["UNDEFINED"] | ProductName = "UNDEFINED" + resource_id: Literal[""] | str = "" + user_from_id: Literal[None] | UserID = None + + @field_validator("category", mode="before") + @classmethod + def category_to_upper(cls, value: str) -> str: + return value.upper() + + +class UserNotificationCreate(BaseUserNotification): ... + + +class UserNotificationPatch(BaseModel): + read: bool + + +class UserNotification(BaseUserNotification): + # Ideally the `id` field, will be a UUID type in the future. + # Since there is no Redis data migration service, data type + # will not change to UUID nor Union[str, UUID] + id: str + read: bool + + @classmethod + def create_from_request_data( + cls, request_data: UserNotificationCreate + ) -> "UserNotification": + return cls.model_construct( + id=f"{uuid4()}", read=False, **request_data.model_dump() + ) + + @staticmethod + def _update_json_schema_extra(schema: JsonDict) -> None: + schema.update( + { + "examples": [ # NOSONAR + { + "id": "3fb96d89-ff5d-4d27-b5aa-d20d46e20eb8", + "user_id": "1", + "category": "NEW_ORGANIZATION", + "actionable_path": "organization/40", + "title": "New organization", + "text": "You're now member of a new Organization", + "date": "2023-02-23T16:23:13.122Z", + "product": "osparc", + "read": True, + }, + { + "id": "ba64ffce-c58c-4382-aad6-96a7787251d6", + "user_id": "1", + "category": "STUDY_SHARED", + "actionable_path": "study/27edd65c-b360-11ed-93d7-02420a000014", # NOSONAR + "title": "Study shared", + "text": "A study was shared with you", + "date": "2023-02-23T16:25:13.122Z", + "product": "osparc", + "read": False, + }, + { + "id": "390053c9-3931-40e1-839f-585268f6fd3c", + "user_id": "1", + "category": "TEMPLATE_SHARED", + "actionable_path": "template/f60477b6-a07e-11ed-8d29-02420a00002d", + "title": "Template shared", + "text": "A template was shared with you", + "date": "2023-02-23T16:28:13.122Z", + "product": "osparc", + "read": False, + }, + { + "id": "390053c9-3931-40e1-839f-585268f6fd3d", + "user_id": "1", + "category": "CONVERSATION_NOTIFICATION", + "actionable_path": "study/27edd65c-b360-11ed-93d7-02420a000014", # NOSONAR + "title": "New notification", + "text": "You were notified in a conversation", + "date": "2023-02-23T16:28:13.122Z", + "product": "s4l", + "read": False, + "resource_id": "3fb96d89-ff5d-4d27-b5aa-d20d46e20e12", + "user_from_id": "2", + }, + { + "id": "390053c9-3931-40e1-839f-585268f6fd3d", + "user_id": "1", + "category": "ANNOTATION_NOTE", + "actionable_path": "study/27edd65c-b360-11ed-93d7-02420a000014", # NOSONAR + "title": "Note added", + "text": "A Note was added for you", + "date": "2023-02-23T16:28:13.122Z", + "product": "s4l", + "read": False, + "resource_id": "3fb96d89-ff5d-4d27-b5aa-d20d46e20e12", + "user_from_id": "2", + }, + { + "id": "390053c9-3931-40e1-839f-585268f6fd3e", + "user_id": "1", + "category": "WALLET_SHARED", + "actionable_path": "wallet/21", + "title": "Credits shared", + "text": "A Credit account was shared with you", + "date": "2023-09-29T16:28:13.122Z", + "product": "tis", + "read": False, + "resource_id": "3fb96d89-ff5d-4d27-b5aa-d20d46e20e13", + "user_from_id": "2", + }, + ] + } + ) + + model_config = ConfigDict(json_schema_extra=_update_json_schema_extra) diff --git a/services/web/server/src/simcore_service_webserver/user_notifications/_repository.py b/services/web/server/src/simcore_service_webserver/user_notifications/_repository.py new file mode 100644 index 000000000000..cfff603fd21e --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/user_notifications/_repository.py @@ -0,0 +1,80 @@ +from typing import Any + +import redis.asyncio as aioredis +from aiohttp import web +from common_library.json_serialization import json_loads +from models_library.users import UserID +from servicelib.redis import handle_redis_returns_union_types + +from ..redis import get_redis_user_notifications_client +from ._models import ( + MAX_NOTIFICATIONS_FOR_USER_TO_KEEP, + MAX_NOTIFICATIONS_FOR_USER_TO_SHOW, + UserNotification, + get_notification_key, +) + + +class UserNotificationsRepository: + def __init__(self, redis_client: aioredis.Redis) -> None: + self._redis_client = redis_client + + @classmethod + def create_from_app(cls, app: web.Application) -> "UserNotificationsRepository": + return cls(redis_client=get_redis_user_notifications_client(app)) + + async def list_notifications( + self, user_id: UserID, product_name: str + ) -> list[UserNotification]: + """Returns a list of notifications where the latest notification is at index 0""" + raw_notifications: list[str] = await handle_redis_returns_union_types( + self._redis_client.lrange( + get_notification_key(user_id), + -1 * MAX_NOTIFICATIONS_FOR_USER_TO_SHOW, + -1, + ) + ) + notifications = [json_loads(x) for x in raw_notifications] + + # Make it backwards compatible + for n in notifications: + if "product" not in n: + n["product"] = "UNDEFINED" + + # Filter by product + included = [product_name, "UNDEFINED"] + filtered_notifications = [n for n in notifications if n["product"] in included] + return [UserNotification.model_validate(x) for x in filtered_notifications] + + async def create_notification(self, user_notification: UserNotification) -> None: + """Insert at the head of the list and discard extra notifications""" + key = get_notification_key(user_notification.user_id) + async with self._redis_client.pipeline(transaction=True) as pipe: + pipe.lpush(key, user_notification.model_dump_json()) + pipe.ltrim(key, 0, MAX_NOTIFICATIONS_FOR_USER_TO_KEEP - 1) + await pipe.execute() + + async def update_notification( + self, user_id: UserID, notification_id: str, update_data: dict[str, Any] + ) -> bool: + """Update a specific notification. Returns True if found and updated.""" + key = get_notification_key(user_id) + all_user_notifications: list[UserNotification] = [ + UserNotification.model_validate_json(x) + for x in await handle_redis_returns_union_types( + self._redis_client.lrange(key, 0, -1) + ) + ] + + for k, user_notification in enumerate(all_user_notifications): + if notification_id == user_notification.id: + # Update the notification with new data + for field, value in update_data.items(): + if hasattr(user_notification, field): + setattr(user_notification, field, value) + + await handle_redis_returns_union_types( + self._redis_client.lset(key, k, user_notification.model_dump_json()) + ) + return True + return False diff --git a/services/web/server/src/simcore_service_webserver/user_notifications/_service.py b/services/web/server/src/simcore_service_webserver/user_notifications/_service.py new file mode 100644 index 000000000000..952163637504 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/user_notifications/_service.py @@ -0,0 +1,37 @@ +from typing import Any + +from aiohttp import web +from models_library.users import UserID + +from ._models import UserNotification, UserNotificationCreate +from ._repository import UserNotificationsRepository + + +async def list_user_notifications( + app: web.Application, user_id: UserID, product_name: str +) -> list[UserNotification]: + """List user notifications filtered by product""" + repo = UserNotificationsRepository.create_from_app(app) + return await repo.list_notifications(user_id=user_id, product_name=product_name) + + +async def create_user_notification( + app: web.Application, notification_data: UserNotificationCreate +) -> None: + """Create a new user notification""" + repo = UserNotificationsRepository.create_from_app(app) + user_notification = UserNotification.create_from_request_data(notification_data) + await repo.create_notification(user_notification) + + +async def update_user_notification( + app: web.Application, + user_id: UserID, + notification_id: str, + update_data: dict[str, Any], +) -> bool: + """Update a user notification. Returns True if found and updated.""" + repo = UserNotificationsRepository.create_from_app(app) + return await repo.update_notification( + user_id=user_id, notification_id=notification_id, update_data=update_data + ) diff --git a/services/web/server/src/simcore_service_webserver/user_notifications/bootstrap.py b/services/web/server/src/simcore_service_webserver/user_notifications/bootstrap.py new file mode 100644 index 000000000000..cedd7a9647e5 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/user_notifications/bootstrap.py @@ -0,0 +1,14 @@ +import logging + +from aiohttp import web + +from ..application_setup import ensure_single_setup +from ._controller.rest import user_notification_rest + +_logger = logging.getLogger(__name__) + + +@ensure_single_setup(__name__, logger=_logger) +def setup_user_notification_feature(app: web.Application): + + app.router.add_routes(user_notification_rest.routes) diff --git a/services/web/server/src/simcore_service_webserver/user_preferences/__init__.py b/services/web/server/src/simcore_service_webserver/user_preferences/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/services/web/server/src/simcore_service_webserver/user_preferences/_controller/__init__.py b/services/web/server/src/simcore_service_webserver/user_preferences/_controller/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/services/web/server/src/simcore_service_webserver/user_preferences/_controller/rest/__init__.py b/services/web/server/src/simcore_service_webserver/user_preferences/_controller/rest/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/services/web/server/src/simcore_service_webserver/user_preferences/_controller/rest/_rest_exceptions.py b/services/web/server/src/simcore_service_webserver/user_preferences/_controller/rest/_rest_exceptions.py new file mode 100644 index 000000000000..b1479e671eed --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/user_preferences/_controller/rest/_rest_exceptions.py @@ -0,0 +1,31 @@ +from common_library.user_messages import user_message +from servicelib.aiohttp import status +from simcore_postgres_database.utils_user_preferences import ( + CouldNotCreateOrUpdateUserPreferenceError, +) + +from ....exception_handling import ( + ExceptionToHttpErrorMap, + HttpErrorInfo, + exception_handling_decorator, + to_exceptions_handlers_map, +) +from ....users.exceptions import FrontendUserPreferenceIsNotDefinedError + +_TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = { + CouldNotCreateOrUpdateUserPreferenceError: HttpErrorInfo( + status.HTTP_400_BAD_REQUEST, + user_message( + "Could not create or modify preferences", + ), + ), + FrontendUserPreferenceIsNotDefinedError: HttpErrorInfo( + status.HTTP_404_NOT_FOUND, + user_message("Provided {frontend_preference_name} not found"), + ), +} + + +handle_rest_requests_exceptions = exception_handling_decorator( + to_exceptions_handlers_map(_TO_HTTP_ERROR_MAP) +) diff --git a/services/web/server/src/simcore_service_webserver/user_preferences/_controller/rest/user_preferences_rest.py b/services/web/server/src/simcore_service_webserver/user_preferences/_controller/rest/user_preferences_rest.py new file mode 100644 index 000000000000..69abf89d47be --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/user_preferences/_controller/rest/user_preferences_rest.py @@ -0,0 +1,39 @@ +from aiohttp import web +from models_library.api_schemas_webserver.users_preferences import ( + PatchPathParams, + PatchRequestBody, +) +from servicelib.aiohttp import status +from servicelib.aiohttp.requests_validation import ( + parse_request_body_as, + parse_request_path_parameters_as, +) + +from ...._meta import API_VTAG +from ....login.decorators import login_required +from ....models import AuthenticatedRequestContext +from ... import _service +from ._rest_exceptions import handle_rest_requests_exceptions + +routes = web.RouteTableDef() + + +@routes.patch( + f"/{API_VTAG}/me/preferences/{{preference_id}}", + name="set_frontend_preference", +) +@login_required +@handle_rest_requests_exceptions +async def set_frontend_preference(request: web.Request) -> web.Response: + req_ctx = AuthenticatedRequestContext.model_validate(request) + req_body = await parse_request_body_as(PatchRequestBody, request) + req_path_params = parse_request_path_parameters_as(PatchPathParams, request) + + await _service.set_frontend_user_preference( + request.app, + user_id=req_ctx.user_id, + product_name=req_ctx.product_name, + frontend_preference_identifier=req_path_params.preference_id, + value=req_body.value, + ) + return web.json_response(status=status.HTTP_204_NO_CONTENT) diff --git a/services/web/server/src/simcore_service_webserver/users/_preferences_models.py b/services/web/server/src/simcore_service_webserver/user_preferences/_models.py similarity index 98% rename from services/web/server/src/simcore_service_webserver/users/_preferences_models.py rename to services/web/server/src/simcore_service_webserver/user_preferences/_models.py index 6a871bcfafe2..053e366fc226 100644 --- a/services/web/server/src/simcore_service_webserver/users/_preferences_models.py +++ b/services/web/server/src/simcore_service_webserver/user_preferences/_models.py @@ -12,7 +12,7 @@ ) from pydantic import Field, NonNegativeInt -from .settings import UsersSettings, get_plugin_settings +from ..users.settings import UsersSettings, get_plugin_settings _MINUTE: Final[NonNegativeInt] = 60 diff --git a/services/web/server/src/simcore_service_webserver/user_preferences/_repository.py b/services/web/server/src/simcore_service_webserver/user_preferences/_repository.py new file mode 100644 index 000000000000..e979f26d0e8c --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/user_preferences/_repository.py @@ -0,0 +1,62 @@ +from models_library.products import ProductName +from models_library.user_preferences import FrontendUserPreference, PreferenceName +from models_library.users import UserID +from simcore_postgres_database.utils_repos import ( + pass_or_acquire_connection, + transaction_context, +) +from simcore_postgres_database.utils_user_preferences import FrontendUserPreferencesRepo +from sqlalchemy.ext.asyncio import AsyncConnection + +from ..db.base_repository import BaseRepository + + +class UserPreferencesRepository(BaseRepository): + @staticmethod + def _get_user_preference_name( + user_id: UserID, preference_name: PreferenceName + ) -> str: + return f"{user_id}/{preference_name}" + + async def get_user_preference( + self, + connection: AsyncConnection | None = None, + *, + user_id: UserID, + product_name: ProductName, + preference_class: type[FrontendUserPreference], + ) -> FrontendUserPreference | None: + async with pass_or_acquire_connection(self.engine, connection) as conn: + preference_payload: dict | None = await FrontendUserPreferencesRepo.load( + conn, + user_id=user_id, + preference_name=self._get_user_preference_name( + user_id, preference_class.get_preference_name() + ), + product_name=product_name, + ) + + return ( + None + if preference_payload is None + else preference_class.model_validate(preference_payload) + ) + + async def set_user_preference( + self, + connection: AsyncConnection | None = None, + *, + user_id: UserID, + product_name: ProductName, + preference: FrontendUserPreference, + ) -> None: + async with transaction_context(self.engine, connection) as conn: + await FrontendUserPreferencesRepo.save( + conn, + user_id=user_id, + preference_name=self._get_user_preference_name( + user_id, preference.get_preference_name() + ), + product_name=product_name, + payload=preference.to_db(), + ) diff --git a/services/web/server/src/simcore_service_webserver/users/_preferences_service.py b/services/web/server/src/simcore_service_webserver/user_preferences/_service.py similarity index 87% rename from services/web/server/src/simcore_service_webserver/users/_preferences_service.py rename to services/web/server/src/simcore_service_webserver/user_preferences/_service.py index 0a5893141e1e..a46db19ff212 100644 --- a/services/web/server/src/simcore_service_webserver/users/_preferences_service.py +++ b/services/web/server/src/simcore_service_webserver/user_preferences/_service.py @@ -19,15 +19,15 @@ GroupExtraPropertiesRepo, ) -from ..db.plugin import get_database_engine -from . import _preferences_repository -from ._preferences_models import ( +from ..db.plugin import get_database_engine_legacy +from ..users.exceptions import FrontendUserPreferenceIsNotDefinedError +from ._models import ( ALL_FRONTEND_PREFERENCES, TelemetryLowDiskSpaceWarningThresholdFrontendUserPreference, get_preference_identifier, get_preference_name, ) -from .exceptions import FrontendUserPreferenceIsNotDefinedError +from ._repository import UserPreferencesRepository _MAX_PARALLEL_DB_QUERIES: Final[NonNegativeInt] = 2 @@ -37,10 +37,11 @@ async def _get_frontend_user_preferences( user_id: UserID, product_name: ProductName, ) -> list[FrontendUserPreference]: + repo = UserPreferencesRepository.create_from_app(app) + saved_user_preferences: list[FrontendUserPreference | None] = await logged_gather( *( - _preferences_repository.get_user_preference( - app, + repo.get_user_preference( user_id=user_id, product_name=product_name, preference_class=preference_class, @@ -64,8 +65,8 @@ async def get_frontend_user_preference( product_name: ProductName, preference_class: type[FrontendUserPreference], ) -> AnyUserPreference | None: - return await _preferences_repository.get_user_preference( - app, + repo = UserPreferencesRepository.create_from_app(app) + return await repo.get_user_preference( user_id=user_id, product_name=product_name, preference_class=preference_class, @@ -75,7 +76,7 @@ async def get_frontend_user_preference( async def get_frontend_user_preferences_aggregation( app: web.Application, *, user_id: UserID, product_name: ProductName ) -> AggregatedPreferences: - async with get_database_engine(app).acquire() as conn: + async with get_database_engine_legacy(app).acquire() as conn: group_extra_properties = ( await GroupExtraPropertiesRepo.get_aggregated_properties_for_user( conn, user_id=user_id, product_name=product_name @@ -127,8 +128,8 @@ async def set_frontend_user_preference( FrontendUserPreference.get_preference_class_from_name(preference_name), ) - await _preferences_repository.set_user_preference( - app, + repo = UserPreferencesRepository.create_from_app(app) + await repo.set_user_preference( user_id=user_id, preference=TypeAdapter(preference_class).validate_python({"value": value}), # type: ignore[arg-type] # GitHK this is suspicious product_name=product_name, diff --git a/services/web/server/src/simcore_service_webserver/user_preferences/bootstrap.py b/services/web/server/src/simcore_service_webserver/user_preferences/bootstrap.py new file mode 100644 index 000000000000..058389bd8f7b --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/user_preferences/bootstrap.py @@ -0,0 +1,16 @@ +import logging + +from aiohttp import web + +from ..application_setup import ensure_single_setup +from ._controller.rest import user_preferences_rest +from ._models import overwrite_user_preferences_defaults + +_logger = logging.getLogger(__name__) + + +@ensure_single_setup(__name__, logger=_logger) +def setup_user_preferences_feature(app: web.Application): + + overwrite_user_preferences_defaults(app) + app.router.add_routes(user_preferences_rest.routes) diff --git a/services/web/server/src/simcore_service_webserver/users/preferences_api.py b/services/web/server/src/simcore_service_webserver/user_preferences/user_preferences_service.py similarity index 62% rename from services/web/server/src/simcore_service_webserver/users/preferences_api.py rename to services/web/server/src/simcore_service_webserver/user_preferences/user_preferences_service.py index 9f51b52e8b31..6f593985fbfd 100644 --- a/services/web/server/src/simcore_service_webserver/users/preferences_api.py +++ b/services/web/server/src/simcore_service_webserver/user_preferences/user_preferences_service.py @@ -1,17 +1,18 @@ -from ._preferences_models import ( +from ._models import ( PreferredWalletIdFrontendUserPreference, TwoFAFrontendUserPreference, ) -from ._preferences_service import ( +from ._service import ( get_frontend_user_preference, + get_frontend_user_preferences_aggregation, set_frontend_user_preference, ) -from .exceptions import UserDefaultWalletNotFoundError -__all__ = ( - "get_frontend_user_preference", +__all__: tuple[str, ...] = ( "PreferredWalletIdFrontendUserPreference", "TwoFAFrontendUserPreference", + "get_frontend_user_preference", + "get_frontend_user_preferences_aggregation", "set_frontend_user_preference", - "UserDefaultWalletNotFoundError", ) +# nopycln: file diff --git a/services/web/server/src/simcore_service_webserver/user_tokens/__init__.py b/services/web/server/src/simcore_service_webserver/user_tokens/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/services/web/server/src/simcore_service_webserver/user_tokens/_controller/__init__.py b/services/web/server/src/simcore_service_webserver/user_tokens/_controller/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/services/web/server/src/simcore_service_webserver/user_tokens/_controller/rest/__init__.py b/services/web/server/src/simcore_service_webserver/user_tokens/_controller/rest/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/services/web/server/src/simcore_service_webserver/user_tokens/_controller/rest/_rest_exceptions.py b/services/web/server/src/simcore_service_webserver/user_tokens/_controller/rest/_rest_exceptions.py new file mode 100644 index 000000000000..5f8717f3c833 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/user_tokens/_controller/rest/_rest_exceptions.py @@ -0,0 +1,25 @@ +from common_library.user_messages import user_message +from servicelib.aiohttp import status + +from ....exception_handling import ( + ExceptionToHttpErrorMap, + HttpErrorInfo, + exception_handling_decorator, + to_exceptions_handlers_map, +) +from ....users.exceptions import TokenNotFoundError + +_TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = { + TokenNotFoundError: HttpErrorInfo( + status.HTTP_404_NOT_FOUND, + user_message( + "The API token for '{service}' could not be found.", + _version=1, + ), + ), +} + + +handle_rest_requests_exceptions = exception_handling_decorator( + to_exceptions_handlers_map(_TO_HTTP_ERROR_MAP) +) diff --git a/services/web/server/src/simcore_service_webserver/users/_tokens_rest.py b/services/web/server/src/simcore_service_webserver/user_tokens/_controller/rest/user_tokens_rest.py similarity index 50% rename from services/web/server/src/simcore_service_webserver/users/_tokens_rest.py rename to services/web/server/src/simcore_service_webserver/user_tokens/_controller/rest/user_tokens_rest.py index ef38c61eb295..26bc0f6b9a73 100644 --- a/services/web/server/src/simcore_service_webserver/users/_tokens_rest.py +++ b/services/web/server/src/simcore_service_webserver/user_tokens/_controller/rest/user_tokens_rest.py @@ -1,23 +1,24 @@ -import functools import logging from aiohttp import web -from models_library.api_schemas_webserver.users import MyTokenCreate, MyTokenGet -from pydantic import BaseModel +from models_library.api_schemas_webserver.users import ( + MyTokenCreate, + MyTokenGet, + TokenPathParams, +) from servicelib.aiohttp import status from servicelib.aiohttp.requests_validation import ( parse_request_body_as, parse_request_path_parameters_as, ) -from servicelib.aiohttp.typing_extension import Handler -from .._meta import API_VTAG -from ..login.decorators import login_required -from ..security.decorators import permission_required -from ..utils_aiohttp import envelope_json_response -from . import _tokens_service -from ._common.schemas import UsersRequestContext -from .exceptions import TokenNotFoundError +from ...._meta import API_VTAG +from ....login.decorators import login_required +from ....security.decorators import permission_required +from ....users.schemas import UsersRequestContext +from ....utils_aiohttp import envelope_json_response +from ... import _service +from ._rest_exceptions import handle_rest_requests_exceptions _logger = logging.getLogger(__name__) @@ -25,59 +26,41 @@ routes = web.RouteTableDef() -def _handle_tokens_errors(handler: Handler): - @functools.wraps(handler) - async def _wrapper(request: web.Request) -> web.StreamResponse: - try: - return await handler(request) - - except TokenNotFoundError as exc: - raise web.HTTPNotFound( - reason=f"Token for {exc.service_id} not found" - ) from exc - - return _wrapper - - @routes.get(f"/{API_VTAG}/me/tokens", name="list_tokens") @login_required -@_handle_tokens_errors +@handle_rest_requests_exceptions @permission_required("user.tokens.*") async def list_tokens(request: web.Request) -> web.Response: req_ctx = UsersRequestContext.model_validate(request) - all_tokens = await _tokens_service.list_tokens(request.app, req_ctx.user_id) + all_tokens = await _service.list_tokens(request.app, req_ctx.user_id) return envelope_json_response([MyTokenGet.from_domain_model(t) for t in all_tokens]) @routes.post(f"/{API_VTAG}/me/tokens", name="create_token") @login_required -@_handle_tokens_errors +@handle_rest_requests_exceptions @permission_required("user.tokens.*") async def create_token(request: web.Request) -> web.Response: req_ctx = UsersRequestContext.model_validate(request) token_create = await parse_request_body_as(MyTokenCreate, request) - token = await _tokens_service.create_token( - request.app, req_ctx.user_id, token_create.to_domain_model() + token = await _service.create_token( + request.app, user_id=req_ctx.user_id, token=token_create.to_domain_model() ) return envelope_json_response(MyTokenGet.from_domain_model(token), web.HTTPCreated) -class _TokenPathParams(BaseModel): - service: str - - @routes.get(f"/{API_VTAG}/me/tokens/{{service}}", name="get_token") @login_required -@_handle_tokens_errors +@handle_rest_requests_exceptions @permission_required("user.tokens.*") async def get_token(request: web.Request) -> web.Response: req_ctx = UsersRequestContext.model_validate(request) - req_path_params = parse_request_path_parameters_as(_TokenPathParams, request) + req_path_params = parse_request_path_parameters_as(TokenPathParams, request) - token = await _tokens_service.get_token( - request.app, req_ctx.user_id, req_path_params.service + token = await _service.get_token( + request.app, user_id=req_ctx.user_id, service_id=req_path_params.service ) return envelope_json_response(MyTokenGet.from_domain_model(token)) @@ -85,14 +68,14 @@ async def get_token(request: web.Request) -> web.Response: @routes.delete(f"/{API_VTAG}/me/tokens/{{service}}", name="delete_token") @login_required -@_handle_tokens_errors +@handle_rest_requests_exceptions @permission_required("user.tokens.*") async def delete_token(request: web.Request) -> web.Response: req_ctx = UsersRequestContext.model_validate(request) - req_path_params = parse_request_path_parameters_as(_TokenPathParams, request) + req_path_params = parse_request_path_parameters_as(TokenPathParams, request) - await _tokens_service.delete_token( - request.app, req_ctx.user_id, req_path_params.service + await _service.delete_token( + request.app, user_id=req_ctx.user_id, service_id=req_path_params.service ) return web.json_response(status=status.HTTP_204_NO_CONTENT) diff --git a/services/web/server/src/simcore_service_webserver/user_tokens/_repository.py b/services/web/server/src/simcore_service_webserver/user_tokens/_repository.py new file mode 100644 index 000000000000..2478ace13f5a --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/user_tokens/_repository.py @@ -0,0 +1,121 @@ +"""Private user tokens from external services (e.g. dat-core) + +Implemented as a stand-alone API but currently only exposed to the handlers +""" + +import sqlalchemy as sa +from models_library.users import UserID, UserThirdPartyToken +from simcore_postgres_database.utils_repos import ( + pass_or_acquire_connection, + transaction_context, +) +from sqlalchemy import and_, literal_column +from sqlalchemy.ext.asyncio import AsyncConnection + +from ..db.base_repository import BaseRepository +from ..db.models import tokens +from ..users.exceptions import TokenNotFoundError + + +class UserTokensRepository(BaseRepository): + async def create_token( + self, + connection: AsyncConnection | None = None, + *, + user_id: UserID, + token: UserThirdPartyToken, + ) -> UserThirdPartyToken: + async with transaction_context(self.engine, connection) as conn: + await conn.execute( + tokens.insert().values( + user_id=user_id, + token_service=token.service, + token_data=token.model_dump(mode="json"), + ) + ) + return token + + async def list_tokens( + self, + connection: AsyncConnection | None = None, + *, + user_id: UserID, + ) -> list[UserThirdPartyToken]: + async with pass_or_acquire_connection(self.engine, connection) as conn: + result = await conn.execute( + sa.select(tokens.c.token_data).where(tokens.c.user_id == user_id) + ) + return [ + UserThirdPartyToken.model_construct(**row["token_data"]) + for row in result.fetchall() + ] + + async def get_token( + self, + connection: AsyncConnection | None = None, + *, + user_id: UserID, + service_id: str, + ) -> UserThirdPartyToken: + async with pass_or_acquire_connection(self.engine, connection) as conn: + result = await conn.execute( + sa.select(tokens.c.token_data).where( + and_( + tokens.c.user_id == user_id, + tokens.c.token_service == service_id, + ) + ) + ) + if row := result.one_or_none(): + return UserThirdPartyToken.model_construct(**row["token_data"]) + raise TokenNotFoundError(service_id=service_id) + + async def update_token( + self, + connection: AsyncConnection | None = None, + *, + user_id: UserID, + service_id: str, + token_data: dict[str, str], + ) -> UserThirdPartyToken: + async with transaction_context(self.engine, connection) as conn: + result = await conn.execute( + sa.select(tokens.c.token_data, tokens.c.token_id).where( + (tokens.c.user_id == user_id) + & (tokens.c.token_service == service_id) + ) + ) + row = result.one_or_none() + if not row: + raise TokenNotFoundError(service_id=service_id) + + data = dict(row["token_data"]) + tid = row["token_id"] + data.update(token_data) + + result = await conn.execute( + tokens.update() + .where(tokens.c.token_id == tid) + .values(token_data=data) + .returning(literal_column("*")) + ) + updated_token = result.one() + assert updated_token # nosec + return UserThirdPartyToken.model_construct(**updated_token["token_data"]) + + async def delete_token( + self, + connection: AsyncConnection | None = None, + *, + user_id: UserID, + service_id: str, + ) -> None: + async with transaction_context(self.engine, connection) as conn: + await conn.execute( + tokens.delete().where( + and_( + tokens.c.user_id == user_id, + tokens.c.token_service == service_id, + ) + ) + ) diff --git a/services/web/server/src/simcore_service_webserver/user_tokens/_service.py b/services/web/server/src/simcore_service_webserver/user_tokens/_service.py new file mode 100644 index 000000000000..eeb320e50fc7 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/user_tokens/_service.py @@ -0,0 +1,38 @@ +"""Service interface for user tokens operations""" + +from aiohttp import web +from models_library.users import UserID, UserThirdPartyToken + +from ._repository import UserTokensRepository + + +async def list_tokens( + app: web.Application, user_id: UserID +) -> list[UserThirdPartyToken]: + """List all tokens for a user""" + repo = UserTokensRepository.create_from_app(app) + return await repo.list_tokens(user_id=user_id) + + +async def create_token( + app: web.Application, *, user_id: UserID, token: UserThirdPartyToken +) -> UserThirdPartyToken: + """Create a new token for a user""" + repo = UserTokensRepository.create_from_app(app) + return await repo.create_token(user_id=user_id, token=token) + + +async def get_token( + app: web.Application, *, user_id: UserID, service_id: str +) -> UserThirdPartyToken: + """Get a specific token for a user and service""" + repo = UserTokensRepository.create_from_app(app) + return await repo.get_token(user_id=user_id, service_id=service_id) + + +async def delete_token( + app: web.Application, *, user_id: UserID, service_id: str +) -> None: + """Delete a token for a user and service""" + repo = UserTokensRepository.create_from_app(app) + await repo.delete_token(user_id=user_id, service_id=service_id) diff --git a/services/web/server/src/simcore_service_webserver/user_tokens/bootstrap.py b/services/web/server/src/simcore_service_webserver/user_tokens/bootstrap.py new file mode 100644 index 000000000000..864845bcee65 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/user_tokens/bootstrap.py @@ -0,0 +1,13 @@ +import logging + +from aiohttp import web + +from ..application_setup import ensure_single_setup +from ._controller.rest import user_tokens_rest + +_logger = logging.getLogger(__name__) + + +@ensure_single_setup(__name__, logger=_logger) +def setup_user_tokens_feature(app: web.Application): + app.add_routes(user_tokens_rest.routes) diff --git a/services/web/server/src/simcore_service_webserver/users/_accounts_repository.py b/services/web/server/src/simcore_service_webserver/users/_accounts_repository.py new file mode 100644 index 000000000000..5dfcb27f7833 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/users/_accounts_repository.py @@ -0,0 +1,596 @@ +import logging +from typing import Any, cast + +import sqlalchemy as sa +from common_library.exclude import Unset, is_unset +from common_library.users_enums import AccountRequestStatus +from models_library.products import ProductName +from models_library.users import ( + UserID, +) +from simcore_postgres_database.models.groups import groups, user_to_groups +from simcore_postgres_database.models.products import products +from simcore_postgres_database.models.users import UserStatus, users +from simcore_postgres_database.models.users_details import ( + users_pre_registration_details, +) +from simcore_postgres_database.utils_repos import ( + pass_or_acquire_connection, + transaction_context, +) +from sqlalchemy.engine.row import Row +from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine + +_logger = logging.getLogger(__name__) + + +# +# PRE-REGISTRATION +# + + +async def create_user_pre_registration( + engine: AsyncEngine, + connection: AsyncConnection | None = None, + *, + email: str, + created_by: UserID | None = None, + product_name: ProductName, + link_to_existing_user: bool = True, + **other_values, +) -> int: + """Creates a user pre-registration entry. + + Args: + engine: Database engine + connection: Optional existing connection + email: Email address for the pre-registration + created_by: ID of the user creating the pre-registration (None for anonymous) + product_name: Product name the user is requesting access to + link_to_existing_user: Whether to link the pre-registration to an existing user with the same email + **other_values: Additional values to insert in the pre-registration entry + + Returns: + ID of the created pre-registration + """ + async with transaction_context(engine, connection) as conn: + # If link_to_existing_user is True, try to find a matching user + user_id = None + if link_to_existing_user: + result = await conn.execute( + sa.select(users.c.id).where(users.c.email == email) + ) + user = result.one_or_none() + if user: + user_id = user.id + + # Insert the pre-registration record + values = { + "pre_email": email, + "product_name": product_name, + **other_values, + } + + # Only add created_by if not None + if created_by is not None: + values["created_by"] = created_by + + # Add user_id if found + if user_id is not None: + values["user_id"] = user_id + + result = await conn.execute( + sa.insert(users_pre_registration_details) + .values(**values) + .returning(users_pre_registration_details.c.id) + ) + pre_registration_id: int = result.scalar_one() + return pre_registration_id + + +async def list_user_pre_registrations( + engine: AsyncEngine, + connection: AsyncConnection | None = None, + *, + filter_by_pre_email: str | None = None, + filter_by_product_name: ProductName | Unset = Unset.VALUE, + filter_by_account_request_status: AccountRequestStatus | None = None, + pagination_limit: int = 50, + pagination_offset: int = 0, +) -> tuple[list[dict[str, Any]], int]: + """Lists user pre-registrations with optional filters. + + Args: + engine: Database engine + connection: Optional existing connection + filter_by_pre_email: Filter by email pattern (SQL LIKE pattern) + filter_by_product_name: Filter by product name + filter_by_account_request_status: Filter by account request status + pagination_limit: Maximum number of results to return + pagination_offset: Number of results to skip (for pagination) + + Returns: + Tuple of (list of pre-registration records, total count) + """ + # Base query conditions + where_conditions = [] + + # Apply filters if provided + if filter_by_pre_email is not None: + where_conditions.append( + users_pre_registration_details.c.pre_email.ilike(f"%{filter_by_pre_email}%") + ) + + if not is_unset(filter_by_product_name): + where_conditions.append( + users_pre_registration_details.c.product_name == filter_by_product_name + ) + + if filter_by_account_request_status is not None: + where_conditions.append( + users_pre_registration_details.c.account_request_status + == filter_by_account_request_status + ) + + # Combine conditions + where_clause = sa.and_(*where_conditions) if where_conditions else sa.true() + + # Create an alias for the users table for the created_by join + creator_users_alias = sa.alias(users, name="creator") + reviewer_users_alias = sa.alias(users, name="reviewer") + + # Count query for pagination + count_query = ( + sa.select(sa.func.count().label("total")) + .select_from(users_pre_registration_details) + .where(where_clause) + ) + + # Main query to get pre-registration data + main_query = ( + sa.select( + users_pre_registration_details.c.id, + users_pre_registration_details.c.user_id, + users_pre_registration_details.c.pre_email, + users_pre_registration_details.c.pre_first_name, + users_pre_registration_details.c.pre_last_name, + users_pre_registration_details.c.pre_phone, + users_pre_registration_details.c.institution, + users_pre_registration_details.c.address, + users_pre_registration_details.c.city, + users_pre_registration_details.c.state, + users_pre_registration_details.c.postal_code, + users_pre_registration_details.c.country, + users_pre_registration_details.c.product_name, + users_pre_registration_details.c.account_request_status, + users_pre_registration_details.c.extras, + users_pre_registration_details.c.created, + users_pre_registration_details.c.modified, + users_pre_registration_details.c.created_by, + creator_users_alias.c.name.label("created_by_name"), + users_pre_registration_details.c.account_request_reviewed_by, + reviewer_users_alias.c.name.label("reviewed_by_name"), + users_pre_registration_details.c.account_request_reviewed_at, + ) + .select_from( + users_pre_registration_details.outerjoin( + creator_users_alias, + users_pre_registration_details.c.created_by == creator_users_alias.c.id, + ).outerjoin( + reviewer_users_alias, + users_pre_registration_details.c.account_request_reviewed_by + == reviewer_users_alias.c.id, + ) + ) + .where(where_clause) + .order_by( + users_pre_registration_details.c.created.desc(), + users_pre_registration_details.c.pre_email, + ) + .limit(pagination_limit) + .offset(pagination_offset) + ) + + async with pass_or_acquire_connection(engine, connection) as conn: + # Get total count + count_result = await conn.execute(count_query) + total_count = count_result.scalar_one() + + # Get pre-registration records + result = await conn.execute(main_query) + records = result.mappings().all() + + return cast(list[dict[str, Any]], list(records)), total_count + + +async def review_user_pre_registration( + engine: AsyncEngine, + connection: AsyncConnection | None = None, + *, + pre_registration_id: int, + reviewed_by: UserID, + new_status: AccountRequestStatus, + invitation_extras: dict[str, Any] | None = None, +) -> None: + """Updates the account request status of a pre-registered user. + + Args: + engine: The database engine + connection: Optional existing connection + pre_registration_id: ID of the pre-registration record + reviewed_by: ID of the user who reviewed the request + new_status: New status (APPROVED or REJECTED) + invitation_extras: Optional invitation data to store in extras field + """ + if new_status not in (AccountRequestStatus.APPROVED, AccountRequestStatus.REJECTED): + msg = f"Invalid status for review: {new_status}. Must be APPROVED or REJECTED." + raise ValueError(msg) + + async with transaction_context(engine, connection) as conn: + # Base update values + update_values = { + "account_request_status": new_status, + "account_request_reviewed_by": reviewed_by, + "account_request_reviewed_at": sa.func.now(), + } + + # Add invitation extras to the existing extras if provided + if invitation_extras is not None: + assert list(invitation_extras.keys()) == ["invitation"] # nosec + + # Get the current extras first + current_extras_result = await conn.execute( + sa.select(users_pre_registration_details.c.extras).where( + users_pre_registration_details.c.id == pre_registration_id + ) + ) + current_extras_row = current_extras_result.one_or_none() + current_extras = ( + current_extras_row.extras + if current_extras_row and current_extras_row.extras + else {} + ) + + # Merge with invitation extras + merged_extras = {**current_extras, **invitation_extras} + update_values["extras"] = merged_extras + + await conn.execute( + users_pre_registration_details.update() + .values(**update_values) + .where(users_pre_registration_details.c.id == pre_registration_id) + ) + + +# +# PRE AND REGISTERED USERS +# + + +def _create_account_request_reviewed_by_username_subquery() -> Any: + """Creates a reusable subquery for getting reviewer username by ID.""" + reviewer_alias = sa.alias(users, name="reviewer_alias") + return ( + sa.select( + reviewer_alias.c.name, + ) + .where( + users_pre_registration_details.c.account_request_reviewed_by + == reviewer_alias.c.id + ) + .label("account_request_reviewed_by_username") + ) + + +def _build_left_outer_join_query( + email_like: str | None, + product_name: ProductName | None, + columns: tuple, +) -> sa.sql.Select | None: + left_where_conditions = [] + if email_like is not None: + left_where_conditions.append( + users_pre_registration_details.c.pre_email.like(email_like) + ) + join_condition = users.c.id == users_pre_registration_details.c.user_id + if product_name: + join_condition = join_condition & ( + users_pre_registration_details.c.product_name == product_name + ) + left_outer_join = sa.select(*columns).select_from( + users_pre_registration_details.outerjoin(users, join_condition) + ) + + return ( + left_outer_join.where(sa.and_(*left_where_conditions)) + if left_where_conditions + else None + ) + + +def _build_right_outer_join_query( + email_like: str | None, + user_name_like: str | None, + primary_group_id: int | None, + product_name: ProductName | None, + columns: tuple, +) -> sa.sql.Select | None: + right_where_conditions = [] + if email_like is not None: + right_where_conditions.append(users.c.email.like(email_like)) + if user_name_like is not None: + right_where_conditions.append(users.c.name.like(user_name_like)) + if primary_group_id is not None: + right_where_conditions.append(users.c.primary_gid == primary_group_id) + join_condition = users.c.id == users_pre_registration_details.c.user_id + if product_name: + join_condition = join_condition & ( + users_pre_registration_details.c.product_name == product_name + ) + right_outer_join = sa.select(*columns).select_from( + users.outerjoin( + users_pre_registration_details, + join_condition, + ) + ) + + return ( + right_outer_join.where(sa.and_(*right_where_conditions)) + if right_where_conditions + else None + ) + + +async def search_merged_pre_and_registered_users( + engine: AsyncEngine, + connection: AsyncConnection | None = None, + *, + filter_by_email_like: str | None = None, + filter_by_user_name_like: str | None = None, + filter_by_primary_group_id: int | None = None, + product_name: ProductName | None = None, +) -> list[Row]: + """Searches and merges users from both users and pre-registration tables""" + users_alias = sa.alias(users, name="users_alias") + + invited_by = ( + sa.select( + users_alias.c.name, + ) + .where(users_pre_registration_details.c.created_by == users_alias.c.id) + .label("invited_by") + ) + + account_request_reviewed_by_username = ( + _create_account_request_reviewed_by_username_subquery() + ) + + columns = ( + users_pre_registration_details.c.id, + users.c.first_name, + users.c.last_name, + users.c.email, + users.c.phone, + users_pre_registration_details.c.pre_email, + users_pre_registration_details.c.pre_first_name, + users_pre_registration_details.c.pre_last_name, + users_pre_registration_details.c.institution, + users_pre_registration_details.c.pre_phone, + users_pre_registration_details.c.address, + users_pre_registration_details.c.city, + users_pre_registration_details.c.state, + users_pre_registration_details.c.postal_code, + users_pre_registration_details.c.country, + users_pre_registration_details.c.user_id.label("pre_reg_user_id"), + users_pre_registration_details.c.extras, + users_pre_registration_details.c.account_request_status, + users_pre_registration_details.c.account_request_reviewed_by, + users_pre_registration_details.c.account_request_reviewed_at, + invited_by, + account_request_reviewed_by_username, # account_request_reviewed_by converted to username + users_pre_registration_details.c.created, + # NOTE: some users have no pre-registration details (e.g. s4l-lite) + users.c.id.label("user_id"), # real user_id from users table + users.c.name.label("user_name"), + users.c.primary_gid.label("user_primary_group_id"), + users.c.status, + ) + + left_outer_join = _build_left_outer_join_query( + filter_by_email_like, + product_name, + columns, + ) + right_outer_join = _build_right_outer_join_query( + filter_by_email_like, + filter_by_user_name_like, + filter_by_primary_group_id, + product_name, + columns, + ) + + queries = [] + if left_outer_join is not None: + queries.append(left_outer_join) + if right_outer_join is not None: + queries.append(right_outer_join) + + if not queries: + # No search criteria provided, return empty result + return [] + + final_query = queries[0] if len(queries) == 1 else sa.union(*queries) + + async with pass_or_acquire_connection(engine, connection) as conn: + result = await conn.execute(final_query) + return result.fetchall() + + +async def list_merged_pre_and_registered_users( + engine: AsyncEngine, + connection: AsyncConnection | None = None, + *, + product_name: ProductName, + filter_any_account_request_status: list[AccountRequestStatus] | None = None, + filter_include_deleted: bool = False, + pagination_limit: int = 50, + pagination_offset: int = 0, +) -> tuple[list[dict[str, Any]], int]: + """Retrieves and merges users from both users and pre-registration tables. + + This returns: + 1. Users who are registered with the platform (in users table) + 2. Users who are pre-registered (in users_pre_registration_details table) + 3. Users who are both registered and pre-registered + + Args: + engine: Database engine + connection: Optional existing connection + product_name: Product name to filter by + filter_any_account_request_status: If provided, only returns users with account request status in this list + (only pre-registered users with any of these statuses will be included) + filter_include_deleted: Whether to include deleted users + pagination_limit: Maximum number of results to return + pagination_offset: Number of results to skip (for pagination) + + Returns: + Tuple of (list of merged user data, total count) + """ + # Base where conditions for both queries + pre_reg_where = [users_pre_registration_details.c.product_name == product_name] + users_where = [] + + # Add account request status filter if specified + if filter_any_account_request_status: + pre_reg_where.append( + users_pre_registration_details.c.account_request_status.in_( + filter_any_account_request_status + ) + ) + + # Add filter for deleted users + if not filter_include_deleted: + users_where.append(users.c.status != UserStatus.DELETED) + + # Create subquery for reviewer username + account_request_reviewed_by_username = ( + _create_account_request_reviewed_by_username_subquery() + ) + + # Query for pre-registered users + # We need to left join with users to identify if the pre-registered user is already in the system + pre_reg_query = ( + sa.select( + users_pre_registration_details.c.id, + users_pre_registration_details.c.pre_email.label("email"), + users_pre_registration_details.c.pre_first_name.label("first_name"), + users_pre_registration_details.c.pre_last_name.label("last_name"), + users_pre_registration_details.c.pre_phone.label("phone"), + users_pre_registration_details.c.institution, + users_pre_registration_details.c.address, + users_pre_registration_details.c.city, + users_pre_registration_details.c.state, + users_pre_registration_details.c.postal_code, + users_pre_registration_details.c.country, + users_pre_registration_details.c.user_id.label("pre_reg_user_id"), + users_pre_registration_details.c.extras, + users_pre_registration_details.c.created, + users_pre_registration_details.c.account_request_status, + users_pre_registration_details.c.account_request_reviewed_by, + users_pre_registration_details.c.account_request_reviewed_at, + users.c.id.label("user_id"), + users.c.name.label("user_name"), + users.c.primary_gid.label("user_primary_group_id"), + users.c.status, + # Use created_by directly instead of a subquery + users_pre_registration_details.c.created_by.label("created_by"), + account_request_reviewed_by_username, + sa.literal_column("true").label("is_pre_registered"), + ) + .select_from( + users_pre_registration_details.outerjoin( + users, users_pre_registration_details.c.user_id == users.c.id + ) + ) + .where(sa.and_(*pre_reg_where)) + ) + + # Query for users that are associated with the product through groups + users_query = ( + sa.select( + sa.literal(None).label("id"), + users.c.email, + users.c.first_name, + users.c.last_name, + users.c.phone, + sa.literal(None).label("institution"), + sa.literal(None).label("address"), + sa.literal(None).label("city"), + sa.literal(None).label("state"), + sa.literal(None).label("postal_code"), + sa.literal(None).label("country"), + sa.literal(None).label("pre_reg_user_id"), + sa.literal(None).label("extras"), + users.c.created_at.label("created"), + sa.literal(None).label("account_request_status"), + sa.literal(None).label("account_request_reviewed_by"), + sa.literal(None).label("account_request_reviewed_at"), + users.c.id.label("user_id"), + users.c.name.label("user_name"), + users.c.primary_gid.label("user_primary_group_id"), + users.c.status, + # Match the created_by field from the pre_reg query + sa.literal(None).label("created_by"), + sa.literal(None).label("account_request_reviewed_by_username"), + sa.literal_column("false").label("is_pre_registered"), + ) + .select_from( + users.join(user_to_groups, user_to_groups.c.uid == users.c.id) + .join(groups, groups.c.gid == user_to_groups.c.gid) + .join(products, products.c.group_id == groups.c.gid) + ) + .where(sa.and_(products.c.name == product_name, *users_where)) + ) + + # If filtering by account request status, we only want pre-registered users with any of those statuses + # No need to union with regular users as they don't have account_request_status + merged_query: sa.sql.Select | sa.sql.CompoundSelect + if filter_any_account_request_status: + merged_query = pre_reg_query + else: + merged_query = pre_reg_query.union_all(users_query) + + # Add distinct on email to eliminate duplicates + merged_query_subq = merged_query.subquery() + distinct_query = ( + sa.select(merged_query_subq) + .select_from(merged_query_subq) + .distinct(merged_query_subq.c.email) + .order_by( + merged_query_subq.c.email, + # Prioritize pre-registration records if duplicate emails exist + merged_query_subq.c.is_pre_registered.desc(), + merged_query_subq.c.created.desc(), + ) + .limit(pagination_limit) + .offset(pagination_offset) + ) + + # Count query (for pagination) + count_query = sa.select(sa.func.count().label("total")).select_from( + sa.select(merged_query_subq.c.email) + .select_from(merged_query_subq) + .distinct() + .subquery() + ) + + async with pass_or_acquire_connection(engine, connection) as conn: + # Get total count + count_result = await conn.execute(count_query) + total_count = count_result.scalar_one() + + # Get user records + result = await conn.execute(distinct_query) + records = result.mappings().all() + + return cast(list[dict[str, Any]], records), total_count diff --git a/services/web/server/src/simcore_service_webserver/users/_accounts_service.py b/services/web/server/src/simcore_service_webserver/users/_accounts_service.py new file mode 100644 index 000000000000..0baf07c9a1f2 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/users/_accounts_service.py @@ -0,0 +1,503 @@ +import logging +from typing import Any + +from aiohttp import web +from common_library.users_enums import AccountRequestStatus +from models_library.api_schemas_webserver.users import UserAccountGet +from models_library.emails import LowerCaseEmailStr +from models_library.products import ProductName +from models_library.users import UserID +from notifications_library._email import create_email_session +from pydantic import HttpUrl +from settings_library.email import SMTPSettings +from simcore_service_webserver.products._service import get_product + +from ..db.plugin import get_asyncpg_engine +from . import _accounts_repository, _users_repository +from .exceptions import ( + AlreadyPreRegisteredError, + PendingPreRegistrationNotFoundError, +) +from .schemas import UserAccountRestPreRegister + +_logger = logging.getLogger(__name__) + +# +# PRE-REGISTRATION +# + + +async def pre_register_user( + app: web.Application, + *, + profile: UserAccountRestPreRegister, + creator_user_id: UserID | None, + product_name: ProductName, +) -> UserAccountGet: + + found = await search_users_accounts( + app, + filter_by_email_glob=profile.email, + product_name=product_name, + include_products=False, + ) + if found: + raise AlreadyPreRegisteredError(num_found=len(found), email=profile.email) + + details = profile.model_dump( + include={ + "first_name", + "last_name", + "phone", + "institution", + "address", + "city", + "state", + "country", + "postal_code", + "extras", + }, + exclude_none=True, + ) + + for key in ("first_name", "last_name", "phone"): + if key in details: + details[f"pre_{key}"] = details.pop(key) + + await _accounts_repository.create_user_pre_registration( + get_asyncpg_engine(app), + email=profile.email, + created_by=creator_user_id, + product_name=product_name, + **details, + ) + + found = await search_users_accounts( + app, + filter_by_email_glob=profile.email, + product_name=product_name, + include_products=False, + ) + + assert len(found) == 1 # nosec + return found[0] + + +# +# USER ACCOUNTS +# + + +async def list_user_accounts( + app: web.Application, + *, + product_name: ProductName, + filter_any_account_request_status: list[AccountRequestStatus] | None = None, + pagination_limit: int = 50, + pagination_offset: int = 0, +) -> tuple[list[dict[str, Any]], int]: + """ + Get a paginated list of users for admin view with filtering options. + + Args: + app: The web application instance + filter_any_account_request_status: List of *any* account request statuses to filter by + pagination_limit: Maximum number of users to return + pagination_offset: Number of users to skip for pagination + + Returns: + A tuple containing (list of user dictionaries, total count of users) + """ + engine = get_asyncpg_engine(app) + + # Get user data with pagination + users_data, total_count = ( + await _accounts_repository.list_merged_pre_and_registered_users( + engine, + product_name=product_name, + filter_any_account_request_status=filter_any_account_request_status, + pagination_limit=pagination_limit, + pagination_offset=pagination_offset, + ) + ) + + # For each user, append additional information if needed + result = [] + for user in users_data: + # Add any additional processing needed for admin view + user_dict = dict(user) + + # Add products information if needed + user_id = user.get("user_id") + if user_id: + products = await _users_repository.get_user_products( + engine, user_id=user_id + ) + user_dict["products"] = [p.product_name for p in products] + + user_dict["registered"] = ( + user_id is not None + if user.get("pre_email") + else user.get("status") is not None + ) + + result.append(user_dict) + + return result, total_count + + +async def search_users_accounts( + app: web.Application, + *, + filter_by_email_glob: str | None = None, + filter_by_primary_group_id: int | None = None, + filter_by_user_name_glob: str | None = None, + product_name: ProductName | None = None, + include_products: bool = False, +) -> list[UserAccountGet]: + """ + WARNING: this information is reserved for admin users. Note that the returned model include UserForAdminGet + + NOTE: Functions in the service layer typically validate the caller's access rights + using parameters like product_name and user_id. However, this function skips + such checks as it is designed for scenarios (e.g., background tasks) where + no caller or context is available. + """ + + if ( + filter_by_email_glob is None + and filter_by_user_name_glob is None + and filter_by_primary_group_id is None + ): + msg = "At least one filter (email glob, user name like, or primary group ID) must be provided" + raise ValueError(msg) + + def _glob_to_sql_like(glob_pattern: str) -> str: + # Escape SQL LIKE special characters in the glob pattern + sql_like_pattern = glob_pattern.replace("%", r"\%").replace("_", r"\_") + # Convert glob wildcards to SQL LIKE wildcards + return sql_like_pattern.replace("*", "%").replace("?", "_") + + rows = await _accounts_repository.search_merged_pre_and_registered_users( + get_asyncpg_engine(app), + filter_by_email_like=( + _glob_to_sql_like(filter_by_email_glob) if filter_by_email_glob else None + ), + filter_by_primary_group_id=filter_by_primary_group_id, + filter_by_user_name_like=( + _glob_to_sql_like(filter_by_user_name_glob) + if filter_by_user_name_glob + else None + ), + product_name=product_name, + ) + + async def _list_products_or_none(user_id): + if user_id is not None and include_products: + products = await _users_repository.get_user_products( + get_asyncpg_engine(app), user_id=user_id + ) + return [_.product_name for _ in products] + return None + + return [ + UserAccountGet( + first_name=r.first_name or r.pre_first_name, + last_name=r.last_name or r.pre_last_name, + email=r.email or r.pre_email, + institution=r.institution, + phone=r.phone or r.pre_phone, + address=r.address, + city=r.city, + state=r.state, + postal_code=r.postal_code, + country=r.country, + extras=r.extras or {}, + invited_by=r.invited_by, + pre_registration_id=r.id, + pre_registration_created=r.created, + account_request_status=r.account_request_status, + account_request_reviewed_by=r.account_request_reviewed_by_username, + account_request_reviewed_at=r.account_request_reviewed_at, + products=await _list_products_or_none(r.user_id), + # NOTE: old users will not have extra details + registered=r.user_id is not None if r.pre_email else r.status is not None, + status=r.status, + # user + user_id=r.user_id, + user_name=r.user_name, + user_primary_group_id=r.user_primary_group_id, + ) + for r in rows + ] + + +async def approve_user_account( + app: web.Application, + *, + pre_registration_email: LowerCaseEmailStr, + product_name: ProductName, + reviewer_id: UserID, + invitation_extras: dict[str, Any] | None = None, +) -> int: + """Approve a user account based on their pre-registration email. + + Args: + app: The web application instance + pre_registration_email: Email of the pre-registered user to approve + product_name: Product name for which the user is being approved + reviewer_id: ID of the user approving the account + + Returns: + int: The ID of the approved pre-registration record + + Raises: + PendingPreRegistrationNotFoundError: If no pre-registration is found for the email/product + """ + engine = get_asyncpg_engine(app) + + # First, find the pre-registration entry matching the email and product + pre_registrations, _ = await _accounts_repository.list_user_pre_registrations( + engine, + filter_by_pre_email=pre_registration_email, + filter_by_product_name=product_name, + filter_by_account_request_status=AccountRequestStatus.PENDING, + ) + + if not pre_registrations: + raise PendingPreRegistrationNotFoundError( + email=pre_registration_email, product_name=product_name + ) + + # There should be only one registration matching these criteria + pre_registration = pre_registrations[0] + pre_registration_id: int = pre_registration["id"] + + # Update the pre-registration status to APPROVED using the reviewer's ID + await _accounts_repository.review_user_pre_registration( + engine, + pre_registration_id=pre_registration_id, + reviewed_by=reviewer_id, + new_status=AccountRequestStatus.APPROVED, + invitation_extras=invitation_extras, + ) + + return pre_registration_id + + +async def reject_user_account( + app: web.Application, + *, + pre_registration_email: LowerCaseEmailStr, + product_name: ProductName, + reviewer_id: UserID, +) -> int: + """Reject a user account based on their pre-registration email. + + Args: + app: The web application instance + pre_registration_email: Email of the pre-registered user to reject + product_name: Product name for which the user is being rejected + reviewer_id: ID of the user rejecting the account + + Returns: + int: The ID of the rejected pre-registration record + + Raises: + PendingPreRegistrationNotFoundError: If no pre-registration is found for the email/product + """ + engine = get_asyncpg_engine(app) + + # First, find the pre-registration entry matching the email and product + pre_registrations, _ = await _accounts_repository.list_user_pre_registrations( + engine, + filter_by_pre_email=pre_registration_email, + filter_by_product_name=product_name, + filter_by_account_request_status=AccountRequestStatus.PENDING, + ) + + if not pre_registrations: + raise PendingPreRegistrationNotFoundError( + email=pre_registration_email, product_name=product_name + ) + + # There should be only one registration matching these criteria + pre_registration = pre_registrations[0] + pre_registration_id: int = pre_registration["id"] + + # Update the pre-registration status to REJECTED using the reviewer's ID + await _accounts_repository.review_user_pre_registration( + engine, + pre_registration_id=pre_registration_id, + reviewed_by=reviewer_id, + new_status=AccountRequestStatus.REJECTED, + ) + + return pre_registration_id + + +def _create_product_and_user_data( + app: web.Application, + *, + product_name: ProductName, + user_email: LowerCaseEmailStr, + first_name: str, + last_name: str, +): + """Create ProductData and UserData objects for email rendering.""" + from notifications_library._models import ProductData, ProductUIData, UserData + + # Get product data from the app + product = get_product(app, product_name=product_name) + + # Extract vendor information + vendor_display_inline = ( + str(product.vendor.get("name")) + if product.vendor and product.vendor.get("name") is not None + else "IT'IS Foundation" + ) + + # Extract UI information from product.vendor.ui (optional) + ui_data = ProductUIData( + logo_url=( + product.vendor.get("ui", {}).get("logo_url") if product.vendor else None + ), + strong_color=( + product.vendor.get("ui", {}).get("strong_color") if product.vendor else None + ), + ) + + # Extract homepage URL + homepage_url = product.vendor.get("url") if product.vendor else None + + product_data = ProductData( + product_name=product_name, + display_name=product.display_name, + vendor_display_inline=vendor_display_inline, + support_email=product.support_email, + homepage_url=homepage_url, + ui=ui_data, + ) + + # Create user data + user_data = UserData( + user_name=f"{first_name} {last_name}".strip(), + email=user_email, + first_name=first_name, + last_name=last_name, + ) + + return product_data, user_data + + +async def send_approval_email_to_user( + app: web.Application, + *, + product_name: ProductName, + invitation_link: HttpUrl, + user_email: LowerCaseEmailStr, + first_name: str, + last_name: str, +) -> None: + from notifications_library._email import compose_email + from notifications_library._email_render import ( + get_support_address, + get_user_address, + render_email_parts, + ) + from notifications_library._render import ( + create_render_environment_from_notifications_library, + ) + + # Create product and user data + product_data, user_data = _create_product_and_user_data( + app, + product_name=product_name, + user_email=user_email, + first_name=first_name, + last_name=last_name, + ) + + # Prepare event data + event_extra_data = { + "host": str(invitation_link).split("?")[0], + "link": str(invitation_link), + } + + # Render email parts + parts = render_email_parts( + env=create_render_environment_from_notifications_library(), + event_name="on_account_approved", + user=user_data, + product=product_data, + **event_extra_data, + ) + + # Compose email + msg = compose_email( + from_=get_support_address(product_data), + to=get_user_address(user_data), + subject=parts.subject, + content_text=parts.text_content, + content_html=parts.html_content, + ) + + # Send email + async with create_email_session(settings=SMTPSettings.create_from_envs()) as smtp: + await smtp.send_message(msg) + + +async def send_rejection_email_to_user( + app: web.Application, + *, + product_name: ProductName, + user_email: LowerCaseEmailStr, + first_name: str, + last_name: str, + host: str, +) -> None: + from notifications_library._email import compose_email + from notifications_library._email_render import ( + get_support_address, + get_user_address, + render_email_parts, + ) + from notifications_library._render import ( + create_render_environment_from_notifications_library, + ) + + # Create product and user data + product_data, user_data = _create_product_and_user_data( + app, + product_name=product_name, + user_email=user_email, + first_name=first_name, + last_name=last_name, + ) + + # Prepare event data (based on test_email_events.py) + event_extra_data = { + "host": host, + } + + # Render email parts + parts = render_email_parts( + env=create_render_environment_from_notifications_library(), + event_name="on_account_rejected", + user=user_data, + product=product_data, + **event_extra_data, + ) + + # Compose email + msg = compose_email( + from_=get_support_address(product_data), + to=get_user_address(user_data), + subject=parts.subject, + content_text=parts.text_content, + content_html=parts.html_content, + ) + + # Send email + async with create_email_session(settings=SMTPSettings.create_from_envs()) as smtp: + await smtp.send_message(msg) diff --git a/services/web/server/src/simcore_service_webserver/users/_controller/__init__.py b/services/web/server/src/simcore_service_webserver/users/_controller/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/services/web/server/src/simcore_service_webserver/users/_controller/rest/__init__.py b/services/web/server/src/simcore_service_webserver/users/_controller/rest/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/services/web/server/src/simcore_service_webserver/users/_controller/rest/_rest_exceptions.py b/services/web/server/src/simcore_service_webserver/users/_controller/rest/_rest_exceptions.py new file mode 100644 index 000000000000..3fc12fca855f --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/users/_controller/rest/_rest_exceptions.py @@ -0,0 +1,85 @@ +from common_library.user_messages import user_message +from servicelib.aiohttp import status + +from ....exception_handling import ( + ExceptionToHttpErrorMap, + HttpErrorInfo, + exception_handling_decorator, + to_exceptions_handlers_map, +) +from ...exceptions import ( + AlreadyPreRegisteredError, + MissingGroupExtraPropertiesForProductError, + PendingPreRegistrationNotFoundError, + PhoneRegistrationCodeInvalidError, + PhoneRegistrationPendingNotFoundError, + PhoneRegistrationSessionInvalidError, + UserNameDuplicateError, + UserNotFoundError, +) + +_TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = { + PendingPreRegistrationNotFoundError: HttpErrorInfo( + status.HTTP_400_BAD_REQUEST, + user_message( + "No pending registration request found for email {email} in {product_name}.", + _version=2, + ), + ), + UserNotFoundError: HttpErrorInfo( + status.HTTP_404_NOT_FOUND, + user_message( + "The requested user could not be found. " + "This may be because the user is not registered or has privacy settings enabled.", + _version=1, + ), + ), + UserNameDuplicateError: HttpErrorInfo( + status.HTTP_409_CONFLICT, + user_message( + "The username '{user_name}' is already in use. " + "Please try '{alternative_user_name}' instead.", + _version=1, + ), + ), + AlreadyPreRegisteredError: HttpErrorInfo( + status.HTTP_409_CONFLICT, + user_message( + "Found {num_found} existing account(s) for '{email}'. Unable to pre-register an existing user.", + _version=1, + ), + ), + MissingGroupExtraPropertiesForProductError: HttpErrorInfo( + status.HTTP_503_SERVICE_UNAVAILABLE, + user_message( + "This product is currently being configured and is not yet ready for use. " + "Please try again later.", + _version=1, + ), + ), + PhoneRegistrationPendingNotFoundError: HttpErrorInfo( + status.HTTP_400_BAD_REQUEST, + user_message( + "No pending phone registration found. Please start the phone registration process first.", + _version=1, + ), + ), + PhoneRegistrationSessionInvalidError: HttpErrorInfo( + status.HTTP_400_BAD_REQUEST, + user_message( + "Your phone registration session is invalid or has expired. Please start the phone registration process again.", + _version=1, + ), + ), + PhoneRegistrationCodeInvalidError: HttpErrorInfo( + status.HTTP_400_BAD_REQUEST, + user_message( + "The confirmation code you entered is incorrect. Please check and try again.", + _version=1, + ), + ), +} + +handle_rest_requests_exceptions = exception_handling_decorator( + to_exceptions_handlers_map(_TO_HTTP_ERROR_MAP) +) diff --git a/services/web/server/src/simcore_service_webserver/users/_common/schemas.py b/services/web/server/src/simcore_service_webserver/users/_controller/rest/_rest_schemas.py similarity index 68% rename from services/web/server/src/simcore_service_webserver/users/_common/schemas.py rename to services/web/server/src/simcore_service_webserver/users/_controller/rest/_rest_schemas.py index cf30b9360b1e..ae46ed501d75 100644 --- a/services/web/server/src/simcore_service_webserver/users/_common/schemas.py +++ b/services/web/server/src/simcore_service_webserver/users/_controller/rest/_rest_schemas.py @@ -10,38 +10,70 @@ from typing import Annotated, Any, Final import pycountry +from common_library.basic_types import DEFAULT_FACTORY from models_library.api_schemas_webserver._base import InputSchema from models_library.api_schemas_webserver.users import UserAccountGet from models_library.emails import LowerCaseEmailStr -from models_library.users import UserID -from pydantic import BaseModel, ConfigDict, Field, field_validator, model_validator -from servicelib.request_keys import RQT_USERID_KEY +from models_library.utils.common_validators import empty_str_to_none_pre_validator +from pydantic import ( + BeforeValidator, + ConfigDict, + Field, + StringConstraints, + field_validator, + model_validator, +) + +from ....models import AuthenticatedRequestContext, PhoneNumberStr -from ...constants import RQ_PRODUCT_KEY +MAX_BYTES_SIZE_EXTRAS: Final[int] = 512 -class UsersRequestContext(BaseModel): - user_id: UserID = Field(..., alias=RQT_USERID_KEY) # type: ignore[literal-required] - product_name: str = Field(..., alias=RQ_PRODUCT_KEY) # type: ignore[literal-required] +class UsersRequestContext(AuthenticatedRequestContext): ... -MAX_BYTES_SIZE_EXTRAS: Final[int] = 512 +# +# PHONE REGISTRATION +# -class PreRegisteredUserGet(InputSchema): - # NOTE: validators need pycountry! +class MyPhoneRegister(InputSchema): + phone: Annotated[ + PhoneNumberStr, + Field(description="Phone number to register"), + ] + + +class MyPhoneConfirm(InputSchema): + code: Annotated[ + str, + StringConstraints(strip_whitespace=True, pattern=r"^[A-Za-z0-9]+$"), + Field(description="Alphanumeric confirmation code"), + ] + + +# +# USER-ACCCOUNT +# + + +class UserAccountRestPreRegister(InputSchema): + # NOTE: validators require installing `pycountry` first_name: str last_name: str email: LowerCaseEmailStr - institution: str | None = Field( - default=None, description="company, university, ..." - ) - phone: str | None + institution: Annotated[ + str | None, Field(description="company, university, ...") + ] = None + phone: Annotated[ + PhoneNumberStr | None, BeforeValidator(empty_str_to_none_pre_validator) + ] + # billing details address: str city: str - state: str | None = Field(default=None) + state: str | None = None postal_code: str country: str extras: Annotated[ @@ -50,7 +82,7 @@ class PreRegisteredUserGet(InputSchema): default_factory=dict, description="Keeps extra information provided in the request form.", ), - ] + ] = DEFAULT_FACTORY model_config = ConfigDict(str_strip_whitespace=True, str_max_length=200) @@ -107,7 +139,7 @@ def _pre_check_and_normalize_country(cls, v): return v -# asserts field names are in sync -assert set(PreRegisteredUserGet.model_fields).issubset( +assert set(UserAccountRestPreRegister.model_fields).issubset( # nosec + # asserts field names are in sync UserAccountGet.model_fields -) # nosec +) diff --git a/services/web/server/src/simcore_service_webserver/users/_controller/rest/accounts_rest.py b/services/web/server/src/simcore_service_webserver/users/_controller/rest/accounts_rest.py new file mode 100644 index 000000000000..c5eb281cfe22 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/users/_controller/rest/accounts_rest.py @@ -0,0 +1,299 @@ +import logging +from typing import Any + +from aiohttp import web +from common_library.users_enums import AccountRequestStatus +from models_library.api_schemas_invitations.invitations import ApiInvitationInputs +from models_library.api_schemas_webserver.users import ( + UserAccountApprove, + UserAccountGet, + UserAccountReject, + UserAccountSearchQueryParams, + UsersAccountListQueryParams, +) +from models_library.rest_pagination import Page +from models_library.rest_pagination_utils import paginate_data +from servicelib.aiohttp import status +from servicelib.aiohttp.requests_validation import ( + parse_request_body_as, + parse_request_query_parameters_as, +) +from servicelib.logging_utils import log_context +from servicelib.rest_constants import RESPONSE_MODEL_POLICY +from servicelib.utils import fire_and_forget_task + +from ...._meta import API_VTAG +from ....constants import APP_FIRE_AND_FORGET_TASKS_KEY +from ....invitations import api as invitations_service +from ....login.decorators import login_required +from ....security.decorators import ( + group_or_role_permission_required, + permission_required, +) +from ....utils_aiohttp import create_json_response_from_page, envelope_json_response +from ... import _accounts_service +from ._rest_exceptions import handle_rest_requests_exceptions +from ._rest_schemas import UserAccountRestPreRegister, UsersRequestContext + +_logger = logging.getLogger(__name__) + + +routes = web.RouteTableDef() + +_RESPONSE_MODEL_MINIMAL_POLICY = RESPONSE_MODEL_POLICY.copy() +_RESPONSE_MODEL_MINIMAL_POLICY["exclude_none"] = True + + +@routes.get(f"/{API_VTAG}/admin/user-accounts", name="list_users_accounts") +@login_required +@group_or_role_permission_required("admin.users.read") +@handle_rest_requests_exceptions +async def list_users_accounts(request: web.Request) -> web.Response: + req_ctx = UsersRequestContext.model_validate(request) + assert req_ctx.product_name # nosec + + query_params = parse_request_query_parameters_as( + UsersAccountListQueryParams, request + ) + + if query_params.review_status == "PENDING": + filter_any_account_request_status = [AccountRequestStatus.PENDING] + elif query_params.review_status == "REVIEWED": + filter_any_account_request_status = [ + AccountRequestStatus.APPROVED, + AccountRequestStatus.REJECTED, + ] + else: + # ALL + filter_any_account_request_status = None + + user_accounts, total_count = await _accounts_service.list_user_accounts( + request.app, + product_name=req_ctx.product_name, + filter_any_account_request_status=filter_any_account_request_status, + pagination_limit=query_params.limit, + pagination_offset=query_params.offset, + ) + + def _to_domain_model(account_details: dict[str, Any]) -> UserAccountGet: + account_details.pop("account_request_reviewed_by", None) + return UserAccountGet( + extras=account_details.pop("extras") or {}, + pre_registration_id=account_details.pop("id"), + pre_registration_created=account_details.pop("created"), + account_request_reviewed_by=account_details.pop( + "account_request_reviewed_by_username" + ), + **account_details, + ) + + page = Page[UserAccountGet].model_validate( + paginate_data( + chunk=[_to_domain_model(user) for user in user_accounts], + request_url=request.url, + total=total_count, + limit=query_params.limit, + offset=query_params.offset, + ) + ) + + return create_json_response_from_page(page) + + +@routes.get(f"/{API_VTAG}/admin/user-accounts:search", name="search_user_accounts") +@login_required +@group_or_role_permission_required("admin.users.read") +@handle_rest_requests_exceptions +async def search_user_accounts(request: web.Request) -> web.Response: + req_ctx = UsersRequestContext.model_validate(request) + assert req_ctx.product_name # nosec + + query_params: UserAccountSearchQueryParams = parse_request_query_parameters_as( + UserAccountSearchQueryParams, request + ) + + found = await _accounts_service.search_users_accounts( + request.app, + filter_by_email_glob=query_params.email, + filter_by_primary_group_id=query_params.primary_group_id, + filter_by_user_name_glob=query_params.user_name, + include_products=True, + ) + + return envelope_json_response( + [ + user_for_admin.model_dump(**_RESPONSE_MODEL_MINIMAL_POLICY) + for user_for_admin in found + ] + ) + + +@routes.post( + f"/{API_VTAG}/admin/user-accounts:pre-register", name="pre_register_user_account" +) +@login_required +@permission_required("admin.users.write") +@handle_rest_requests_exceptions +async def pre_register_user_account(request: web.Request) -> web.Response: + req_ctx = UsersRequestContext.model_validate(request) + pre_user_profile = await parse_request_body_as(UserAccountRestPreRegister, request) + + user_profile = await _accounts_service.pre_register_user( + request.app, + profile=pre_user_profile, + creator_user_id=req_ctx.user_id, + product_name=req_ctx.product_name, + ) + + return envelope_json_response( + user_profile.model_dump(**_RESPONSE_MODEL_MINIMAL_POLICY) + ) + + +@routes.post(f"/{API_VTAG}/admin/user-accounts:approve", name="approve_user_account") +@login_required +@permission_required("admin.users.write") +@handle_rest_requests_exceptions +async def approve_user_account(request: web.Request) -> web.Response: + req_ctx = UsersRequestContext.model_validate(request) + assert req_ctx.product_name # nosec + + approval_data = await parse_request_body_as(UserAccountApprove, request) + + invitation_result = None + if approval_data.invitation: + with log_context( + _logger, + logging.DEBUG, + "User is being approved with invitation %s for user %s", + approval_data.invitation.model_dump_json(indent=1), + approval_data.email, + ): + # Generate invitation + invitation_params = ApiInvitationInputs( + issuer=str(req_ctx.user_id), + guest=approval_data.email, + trial_account_days=approval_data.invitation.trial_account_days, + extra_credits_in_usd=approval_data.invitation.extra_credits_in_usd, + product=req_ctx.product_name, + ) + + invitation_result = await invitations_service.generate_invitation( + request.app, + params=invitation_params, + product_origin_url=request.url.origin(), + ) + + assert ( # nosec + invitation_result.extra_credits_in_usd + == approval_data.invitation.extra_credits_in_usd + ) + assert ( # nosec + invitation_result.trial_account_days + == approval_data.invitation.trial_account_days + ) + assert invitation_result.guest == approval_data.email # nosec + + # Approve the user account, passing the current user's ID as the reviewer + pre_registration_id = await _accounts_service.approve_user_account( + request.app, + pre_registration_email=approval_data.email, + product_name=req_ctx.product_name, + reviewer_id=req_ctx.user_id, + invitation_extras=( + {"invitation": invitation_result.model_dump(mode="json")} + if invitation_result + else None + ), + ) + assert pre_registration_id # nosec + + if invitation_result: + with log_context( + _logger, + logging.INFO, + "Sending invitation email to %s ...", + approval_data.email, + ): + # get pre-registration data + found = await _accounts_service.search_users_accounts( + request.app, + filter_by_email_glob=approval_data.email, + product_name=req_ctx.product_name, + include_products=False, + ) + user_account = found[0] + assert user_account.pre_registration_id == pre_registration_id # nosec + assert user_account.email == approval_data.email # nosec + + # send email to user + fire_and_forget_task( + _accounts_service.send_approval_email_to_user( + request.app, + product_name=req_ctx.product_name, + invitation_link=invitation_result.invitation_url, + user_email=approval_data.email, + first_name=user_account.first_name or "User", + last_name=user_account.last_name or "", + ), + task_suffix_name=f"{__name__}.send_approval_email_to_user.{approval_data.email}", + fire_and_forget_tasks_collection=request.app[ + APP_FIRE_AND_FORGET_TASKS_KEY + ], + ) + + return web.json_response(status=status.HTTP_204_NO_CONTENT) + + +@routes.post(f"/{API_VTAG}/admin/user-accounts:reject", name="reject_user_account") +@login_required +@permission_required("admin.users.write") +@handle_rest_requests_exceptions +async def reject_user_account(request: web.Request) -> web.Response: + req_ctx = UsersRequestContext.model_validate(request) + assert req_ctx.product_name # nosec + + rejection_data = await parse_request_body_as(UserAccountReject, request) + + # Reject the user account, passing the current user's ID as the reviewer + pre_registration_id = await _accounts_service.reject_user_account( + request.app, + pre_registration_email=rejection_data.email, + product_name=req_ctx.product_name, + reviewer_id=req_ctx.user_id, + ) + assert pre_registration_id # nosec + + # Send rejection email to user + with log_context( + _logger, + logging.INFO, + "Sending rejection email to %s ...", + rejection_data.email, + ): + # get pre-registration data + found = await _accounts_service.search_users_accounts( + request.app, + filter_by_email_glob=rejection_data.email, + product_name=req_ctx.product_name, + include_products=False, + ) + user_account = found[0] + assert user_account.pre_registration_id == pre_registration_id # nosec + assert user_account.email == rejection_data.email # nosec + + # send email to user + fire_and_forget_task( + _accounts_service.send_rejection_email_to_user( + request.app, + product_name=req_ctx.product_name, + user_email=rejection_data.email, + first_name=user_account.first_name or "User", + last_name=user_account.last_name or "", + host=request.host, + ), + task_suffix_name=f"{__name__}.send_rejection_email_to_user.{rejection_data.email}", + fire_and_forget_tasks_collection=request.app[APP_FIRE_AND_FORGET_TASKS_KEY], + ) + + return web.json_response(status=status.HTTP_204_NO_CONTENT) diff --git a/services/web/server/src/simcore_service_webserver/users/_controller/rest/users_rest.py b/services/web/server/src/simcore_service_webserver/users/_controller/rest/users_rest.py new file mode 100644 index 000000000000..f793de780784 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/users/_controller/rest/users_rest.py @@ -0,0 +1,199 @@ +import logging + +from aiohttp import web +from models_library.api_schemas_webserver.users import ( + MyProfileAddressGet, + MyProfileRestGet, + MyProfileRestPatch, + UserGet, + UsersSearch, +) +from servicelib.aiohttp import status +from servicelib.aiohttp.requests_validation import ( + parse_request_body_as, +) +from simcore_service_webserver.application_settings_utils import ( + requires_dev_feature_enabled, +) +from simcore_service_webserver.users.exceptions import BillingDetailsNotFoundError + +from ...._meta import API_VTAG +from ....groups import api as groups_service +from ....login.decorators import login_required +from ....products import products_web +from ....products.models import Product +from ....security.decorators import permission_required +from ....session.api import get_session +from ....utils_aiohttp import envelope_json_response +from ... import _users_service +from ..._users_web import RegistrationSessionManager +from ._rest_exceptions import handle_rest_requests_exceptions +from ._rest_schemas import MyPhoneConfirm, MyPhoneRegister, UsersRequestContext + +_logger = logging.getLogger(__name__) + +_REGISTRATION_CODE_VALUE_FAKE = ( + "123456" # NOTE: temporary fake while developing registration feature +) + + +routes = web.RouteTableDef() + +# +# MY PROFILE: /me +# + + +@routes.get(f"/{API_VTAG}/me", name="get_my_profile") +@login_required +@handle_rest_requests_exceptions +async def get_my_profile(request: web.Request) -> web.Response: + product: Product = products_web.get_current_product(request) + req_ctx = UsersRequestContext.model_validate(request) + + # Get groups + ( + groups_by_type, + my_product_group, + product_support_group, + ) = await groups_service.get_user_profile_groups( + request.app, user_id=req_ctx.user_id, product=product + ) + + assert groups_by_type.primary # nosec + assert groups_by_type.everyone # nosec + + # Get profile and preferences + my_profile, preferences = await _users_service.get_my_profile( + request.app, user_id=req_ctx.user_id, product_name=req_ctx.product_name + ) + + # Get profile address + try: + user_billing_details = await _users_service.get_user_billing_details( + request.app, product_name=product.name, user_id=req_ctx.user_id + ) + my_address = MyProfileAddressGet.model_validate( + user_billing_details, from_attributes=True + ) + except BillingDetailsNotFoundError: + my_address = None + + profile = MyProfileRestGet.from_domain_model( + my_profile, + groups_by_type, + my_product_group, + preferences, + product_support_group, + my_address, + ) + + return envelope_json_response(profile) + + +@routes.patch(f"/{API_VTAG}/me", name="update_my_profile") +@login_required +@permission_required("user.profile.update") +@handle_rest_requests_exceptions +async def update_my_profile(request: web.Request) -> web.Response: + req_ctx = UsersRequestContext.model_validate(request) + profile_update = await parse_request_body_as(MyProfileRestPatch, request) + + await _users_service.update_my_profile( + request.app, user_id=req_ctx.user_id, update=profile_update + ) + return web.json_response(status=status.HTTP_204_NO_CONTENT) + + +# +# PHONE REGISTRATION: /me/phone:* +# + + +@routes.post(f"/{API_VTAG}/me/phone:register", name="my_phone_register") +@login_required +@permission_required("user.profile.update") +@requires_dev_feature_enabled +@handle_rest_requests_exceptions +async def my_phone_register(request: web.Request) -> web.Response: + req_ctx = UsersRequestContext.model_validate(request) + phone_register = await parse_request_body_as(MyPhoneRegister, request) + + session = await get_session(request) + registration_session_manager = RegistrationSessionManager( + session, req_ctx.user_id, req_ctx.product_name + ) + registration_session_manager.start_registration( + phone_register.phone, code=_REGISTRATION_CODE_VALUE_FAKE + ) + + return web.json_response(status=status.HTTP_202_ACCEPTED) + + +@routes.post(f"/{API_VTAG}/me/phone:resend", name="my_phone_resend") +@login_required +@permission_required("user.profile.update") +@requires_dev_feature_enabled +@handle_rest_requests_exceptions +async def my_phone_resend(request: web.Request) -> web.Response: + req_ctx = UsersRequestContext.model_validate(request) + + session = await get_session(request) + registration_session_manager = RegistrationSessionManager( + session, req_ctx.user_id, req_ctx.product_name + ) + registration_session_manager.regenerate_code(new_code=_REGISTRATION_CODE_VALUE_FAKE) + + return web.json_response(status=status.HTTP_202_ACCEPTED) + + +@routes.post(f"/{API_VTAG}/me/phone:confirm", name="my_phone_confirm") +@login_required +@permission_required("user.profile.update") +@requires_dev_feature_enabled +@handle_rest_requests_exceptions +async def my_phone_confirm(request: web.Request) -> web.Response: + req_ctx = UsersRequestContext.model_validate(request) + phone_confirm = await parse_request_body_as(MyPhoneConfirm, request) + + session = await get_session(request) + registration_session_manager = RegistrationSessionManager( + session, req_ctx.user_id, req_ctx.product_name + ) + + registration = registration_session_manager.validate_pending_registration() + registration_session_manager.validate_confirmation_code(phone_confirm.code) + + await _users_service.update_user_phone( + request.app, + user_id=req_ctx.user_id, + phone=registration["data"], + ) + + registration_session_manager.clear_session() + + return web.json_response(status=status.HTTP_204_NO_CONTENT) + + +# Public Users API endpoints + + +@routes.post(f"/{API_VTAG}/users:search", name="search_users") +@login_required +@permission_required("user.read") +@handle_rest_requests_exceptions +async def search_users(request: web.Request) -> web.Response: + req_ctx = UsersRequestContext.model_validate(request) + assert req_ctx.product_name # nosec + + # NOTE: Decided for body instead of query parameters because it is easier for the front-end + search_params = await parse_request_body_as(UsersSearch, request) + + found = await _users_service.search_public_users( + request.app, + caller_id=req_ctx.user_id, + match_=search_params.match_, + limit=search_params.limit, + ) + + return envelope_json_response([UserGet.from_domain_model(user) for user in found]) diff --git a/services/web/server/src/simcore_service_webserver/users/_common/models.py b/services/web/server/src/simcore_service_webserver/users/_models.py similarity index 86% rename from services/web/server/src/simcore_service_webserver/users/_common/models.py rename to services/web/server/src/simcore_service_webserver/users/_models.py index 967f010d0b06..b9f612d34c55 100644 --- a/services/web/server/src/simcore_service_webserver/users/_common/models.py +++ b/services/web/server/src/simcore_service_webserver/users/_models.py @@ -1,5 +1,6 @@ from typing import Annotated, Any, NamedTuple, Self, TypedDict +from models_library.api_schemas_webserver.users import MyProfileRestPatch from models_library.basic_types import IDStr from models_library.emails import LowerCaseEmailStr from pydantic import BaseModel, ConfigDict, EmailStr, Field @@ -43,9 +44,9 @@ def flatten_dict(d: dict, parent_key="", sep="_"): return dict(items) -class ToUserUpdateDB(BaseModel): +class UserModelAdapter(BaseModel): """ - Maps ProfileUpdate api-model into UserUpdate db-model + Maps ProfileUpdate api schema into UserUpdate db-model """ # NOTE: field names are UserDB columns @@ -62,13 +63,13 @@ class ToUserUpdateDB(BaseModel): model_config = ConfigDict(extra="forbid") @classmethod - def from_api(cls, profile_update) -> Self: + def from_rest_schema_model(cls, profile_update: MyProfileRestPatch) -> Self: # The mapping of embed fields to flatten keys is done here return cls.model_validate( flatten_dict(profile_update.model_dump(exclude_unset=True, by_alias=False)) ) - def to_db(self) -> dict[str, Any]: + def to_db_values(self) -> dict[str, Any]: return self.model_dump(exclude_unset=True, by_alias=False) diff --git a/services/web/server/src/simcore_service_webserver/users/_notifications.py b/services/web/server/src/simcore_service_webserver/users/_notifications.py deleted file mode 100644 index 68b322dc29c4..000000000000 --- a/services/web/server/src/simcore_service_webserver/users/_notifications.py +++ /dev/null @@ -1,131 +0,0 @@ -from datetime import datetime -from enum import auto -from typing import Final, Literal -from uuid import uuid4 - -from models_library.products import ProductName -from models_library.users import UserID -from models_library.utils.enums import StrAutoEnum -from pydantic import BaseModel, ConfigDict, NonNegativeInt, field_validator - -MAX_NOTIFICATIONS_FOR_USER_TO_SHOW: Final[NonNegativeInt] = 10 -MAX_NOTIFICATIONS_FOR_USER_TO_KEEP: Final[NonNegativeInt] = 100 - - -def get_notification_key(user_id: UserID) -> str: - return f"user_id={user_id}" - - -class NotificationCategory(StrAutoEnum): - NEW_ORGANIZATION = auto() - STUDY_SHARED = auto() - TEMPLATE_SHARED = auto() - ANNOTATION_NOTE = auto() - WALLET_SHARED = auto() - - -class BaseUserNotification(BaseModel): - user_id: UserID - category: NotificationCategory - actionable_path: str - title: str - text: str - date: datetime - product: Literal["UNDEFINED"] | ProductName = "UNDEFINED" - resource_id: Literal[""] | str = "" - user_from_id: Literal[None] | UserID = None - - @field_validator("category", mode="before") - @classmethod - def category_to_upper(cls, value: str) -> str: - return value.upper() - - -class UserNotificationCreate(BaseUserNotification): - ... - - -class UserNotificationPatch(BaseModel): - read: bool - - -class UserNotification(BaseUserNotification): - # Ideally the `id` field, will be a UUID type in the future. - # Since there is no Redis data migration service, data type - # will not change to UUID nor Union[str, UUID] - id: str - read: bool - - @classmethod - def create_from_request_data( - cls, request_data: UserNotificationCreate - ) -> "UserNotification": - return cls.model_construct( - id=f"{uuid4()}", read=False, **request_data.model_dump() - ) - - model_config = ConfigDict( - json_schema_extra={ - "examples": [ - { - "id": "3fb96d89-ff5d-4d27-b5aa-d20d46e20eb8", - "user_id": "1", - "category": "NEW_ORGANIZATION", - "actionable_path": "organization/40", - "title": "New organization", - "text": "You're now member of a new Organization", - "date": "2023-02-23T16:23:13.122Z", - "product": "osparc", - "read": True, - }, - { - "id": "ba64ffce-c58c-4382-aad6-96a7787251d6", - "user_id": "1", - "category": "STUDY_SHARED", - "actionable_path": "study/27edd65c-b360-11ed-93d7-02420a000014", - "title": "Study shared", - "text": "A study was shared with you", - "date": "2023-02-23T16:25:13.122Z", - "product": "osparc", - "read": False, - }, - { - "id": "390053c9-3931-40e1-839f-585268f6fd3c", - "user_id": "1", - "category": "TEMPLATE_SHARED", - "actionable_path": "template/f60477b6-a07e-11ed-8d29-02420a00002d", - "title": "Template shared", - "text": "A template was shared with you", - "date": "2023-02-23T16:28:13.122Z", - "product": "osparc", - "read": False, - }, - { - "id": "390053c9-3931-40e1-839f-585268f6fd3d", - "user_id": "1", - "category": "ANNOTATION_NOTE", - "actionable_path": "study/27edd65c-b360-11ed-93d7-02420a000014", - "title": "Note added", - "text": "A Note was added for you", - "date": "2023-02-23T16:28:13.122Z", - "product": "s4l", - "read": False, - "resource_id": "3fb96d89-ff5d-4d27-b5aa-d20d46e20e12", - "user_from_id": "2", - }, - { - "id": "390053c9-3931-40e1-839f-585268f6fd3e", - "user_id": "1", - "category": "WALLET_SHARED", - "actionable_path": "wallet/21", - "title": "Credits shared", - "text": "A Credit account was shared with you", - "date": "2023-09-29T16:28:13.122Z", - "product": "tis", - "read": False, - "resource_id": "3fb96d89-ff5d-4d27-b5aa-d20d46e20e13", - "user_from_id": "2", - }, - ] - } - ) diff --git a/services/web/server/src/simcore_service_webserver/users/_notifications_rest.py b/services/web/server/src/simcore_service_webserver/users/_notifications_rest.py deleted file mode 100644 index 8f4f920168e5..000000000000 --- a/services/web/server/src/simcore_service_webserver/users/_notifications_rest.py +++ /dev/null @@ -1,134 +0,0 @@ -import logging - -import redis.asyncio as aioredis -from aiohttp import web -from common_library.json_serialization import json_loads -from models_library.api_schemas_webserver.users import MyPermissionGet -from models_library.users import UserPermission -from pydantic import BaseModel -from servicelib.aiohttp import status -from servicelib.aiohttp.requests_validation import ( - parse_request_body_as, - parse_request_path_parameters_as, -) -from servicelib.redis import handle_redis_returns_union_types - -from .._meta import API_VTAG -from ..login.decorators import login_required -from ..products import products_web -from ..redis import get_redis_user_notifications_client -from ..security.decorators import permission_required -from ..utils_aiohttp import envelope_json_response -from . import _users_service -from ._common.schemas import UsersRequestContext -from ._notifications import ( - MAX_NOTIFICATIONS_FOR_USER_TO_KEEP, - MAX_NOTIFICATIONS_FOR_USER_TO_SHOW, - UserNotification, - UserNotificationCreate, - UserNotificationPatch, - get_notification_key, -) - -_logger = logging.getLogger(__name__) - - -routes = web.RouteTableDef() - - -async def _get_user_notifications( - redis_client: aioredis.Redis, user_id: int, product_name: str -) -> list[UserNotification]: - """returns a list of notifications where the latest notification is at index 0""" - raw_notifications: list[str] = await handle_redis_returns_union_types( - redis_client.lrange( - get_notification_key(user_id), -1 * MAX_NOTIFICATIONS_FOR_USER_TO_SHOW, -1 - ) - ) - notifications = [json_loads(x) for x in raw_notifications] - # Make it backwards compatible - for n in notifications: - if "product" not in n: - n["product"] = "UNDEFINED" - # Filter by product - included = [product_name, "UNDEFINED"] - filtered_notifications = [n for n in notifications if n["product"] in included] - return [UserNotification.model_validate(x) for x in filtered_notifications] - - -@routes.get(f"/{API_VTAG}/me/notifications", name="list_user_notifications") -@login_required -@permission_required("user.notifications.read") -async def list_user_notifications(request: web.Request) -> web.Response: - redis_client = get_redis_user_notifications_client(request.app) - req_ctx = UsersRequestContext.model_validate(request) - product_name = products_web.get_product_name(request) - notifications = await _get_user_notifications( - redis_client, req_ctx.user_id, product_name - ) - return envelope_json_response(notifications) - - -@routes.post(f"/{API_VTAG}/me/notifications", name="create_user_notification") -@login_required -@permission_required("user.notifications.write") -async def create_user_notification(request: web.Request) -> web.Response: - # body includes the updated notification - body = await parse_request_body_as(UserNotificationCreate, request) - user_notification = UserNotification.create_from_request_data(body) - key = get_notification_key(user_notification.user_id) - - # insert at the head of the list and discard extra notifications - redis_client = get_redis_user_notifications_client(request.app) - async with redis_client.pipeline(transaction=True) as pipe: - pipe.lpush(key, user_notification.model_dump_json()) - pipe.ltrim(key, 0, MAX_NOTIFICATIONS_FOR_USER_TO_KEEP - 1) - await pipe.execute() - - return web.json_response(status=status.HTTP_204_NO_CONTENT) - - -class _NotificationPathParams(BaseModel): - notification_id: str - - -@routes.patch( - f"/{API_VTAG}/me/notifications/{{notification_id}}", - name="mark_notification_as_read", -) -@login_required -@permission_required("user.notifications.update") -async def mark_notification_as_read(request: web.Request) -> web.Response: - redis_client = get_redis_user_notifications_client(request.app) - req_ctx = UsersRequestContext.model_validate(request) - req_path_params = parse_request_path_parameters_as(_NotificationPathParams, request) - body = await parse_request_body_as(UserNotificationPatch, request) - - # NOTE: only the user's notifications can be patched - key = get_notification_key(req_ctx.user_id) - all_user_notifications: list[UserNotification] = [ - UserNotification.model_validate_json(x) - for x in await handle_redis_returns_union_types(redis_client.lrange(key, 0, -1)) - ] - for k, user_notification in enumerate(all_user_notifications): - if req_path_params.notification_id == user_notification.id: - user_notification.read = body.read - await handle_redis_returns_union_types( - redis_client.lset(key, k, user_notification.model_dump_json()) - ) - return web.json_response(status=status.HTTP_204_NO_CONTENT) - - return web.json_response(status=status.HTTP_204_NO_CONTENT) - - -@routes.get(f"/{API_VTAG}/me/permissions", name="list_user_permissions") -@login_required -@permission_required("user.permissions.read") -async def list_user_permissions(request: web.Request) -> web.Response: - req_ctx = UsersRequestContext.model_validate(request) - list_permissions: list[UserPermission] = await _users_service.list_user_permissions( - request.app, user_id=req_ctx.user_id, product_name=req_ctx.product_name - ) - return envelope_json_response( - [MyPermissionGet.from_domain_model(p) for p in list_permissions] - ) diff --git a/services/web/server/src/simcore_service_webserver/users/_preferences_repository.py b/services/web/server/src/simcore_service_webserver/users/_preferences_repository.py deleted file mode 100644 index 316da7534bc6..000000000000 --- a/services/web/server/src/simcore_service_webserver/users/_preferences_repository.py +++ /dev/null @@ -1,54 +0,0 @@ -from aiohttp import web -from models_library.products import ProductName -from models_library.user_preferences import FrontendUserPreference, PreferenceName -from models_library.users import UserID -from simcore_postgres_database.utils_user_preferences import FrontendUserPreferencesRepo - -from ..db.plugin import get_asyncpg_engine - - -def _get_user_preference_name(user_id: UserID, preference_name: PreferenceName) -> str: - return f"{user_id}/{preference_name}" - - -async def get_user_preference( - app: web.Application, - *, - user_id: UserID, - product_name: ProductName, - preference_class: type[FrontendUserPreference], -) -> FrontendUserPreference | None: - async with get_asyncpg_engine(app).connect() as conn: - preference_payload: dict | None = await FrontendUserPreferencesRepo.load( - conn, - user_id=user_id, - preference_name=_get_user_preference_name( - user_id, preference_class.get_preference_name() - ), - product_name=product_name, - ) - - return ( - None - if preference_payload is None - else preference_class.model_validate(preference_payload) - ) - - -async def set_user_preference( - app: web.Application, - *, - user_id: UserID, - product_name: ProductName, - preference: FrontendUserPreference, -) -> None: - async with get_asyncpg_engine(app).begin() as conn: - await FrontendUserPreferencesRepo.save( - conn, - user_id=user_id, - preference_name=_get_user_preference_name( - user_id, preference.get_preference_name() - ), - product_name=product_name, - payload=preference.to_db(), - ) diff --git a/services/web/server/src/simcore_service_webserver/users/_preferences_rest.py b/services/web/server/src/simcore_service_webserver/users/_preferences_rest.py deleted file mode 100644 index ba7094227322..000000000000 --- a/services/web/server/src/simcore_service_webserver/users/_preferences_rest.py +++ /dev/null @@ -1,60 +0,0 @@ -import functools - -from aiohttp import web -from models_library.api_schemas_webserver.users_preferences import ( - PatchPathParams, - PatchRequestBody, -) -from servicelib.aiohttp import status -from servicelib.aiohttp.requests_validation import ( - parse_request_body_as, - parse_request_path_parameters_as, -) -from servicelib.aiohttp.typing_extension import Handler -from simcore_postgres_database.utils_user_preferences import ( - CouldNotCreateOrUpdateUserPreferenceError, -) - -from .._meta import API_VTAG -from ..login.decorators import login_required -from ..models import RequestContext -from . import _preferences_service -from .exceptions import FrontendUserPreferenceIsNotDefinedError - -routes = web.RouteTableDef() - - -def _handle_users_exceptions(handler: Handler): - @functools.wraps(handler) - async def wrapper(request: web.Request) -> web.StreamResponse: - try: - return await handler(request) - - except ( - CouldNotCreateOrUpdateUserPreferenceError, - FrontendUserPreferenceIsNotDefinedError, - ) as exc: - raise web.HTTPNotFound(text=f"{exc}") from exc - - return wrapper - - -@routes.patch( - f"/{API_VTAG}/me/preferences/{{preference_id}}", - name="set_frontend_preference", -) -@login_required -@_handle_users_exceptions -async def set_frontend_preference(request: web.Request) -> web.Response: - req_ctx = RequestContext.model_validate(request) - req_body = await parse_request_body_as(PatchRequestBody, request) - req_path_params = parse_request_path_parameters_as(PatchPathParams, request) - - await _preferences_service.set_frontend_user_preference( - request.app, - user_id=req_ctx.user_id, - product_name=req_ctx.product_name, - frontend_preference_identifier=req_path_params.preference_id, - value=req_body.value, - ) - return web.json_response(status=status.HTTP_204_NO_CONTENT) diff --git a/services/web/server/src/simcore_service_webserver/users/_tokens_service.py b/services/web/server/src/simcore_service_webserver/users/_tokens_service.py deleted file mode 100644 index 18e2f6323fd6..000000000000 --- a/services/web/server/src/simcore_service_webserver/users/_tokens_service.py +++ /dev/null @@ -1,90 +0,0 @@ -""" Private user tokens from external services (e.g. dat-core) - - Implemented as a stand-alone API but currently only exposed to the handlers -""" -import sqlalchemy as sa -from aiohttp import web -from models_library.users import UserID, UserThirdPartyToken -from sqlalchemy import and_, literal_column - -from ..db.models import tokens -from ..db.plugin import get_database_engine -from .exceptions import TokenNotFoundError - - -async def create_token( - app: web.Application, user_id: UserID, token: UserThirdPartyToken -) -> UserThirdPartyToken: - async with get_database_engine(app).acquire() as conn: - await conn.execute( - tokens.insert().values( - user_id=user_id, - token_service=token.service, - token_data=token.model_dump(mode="json"), - ) - ) - return token - - -async def list_tokens( - app: web.Application, user_id: UserID -) -> list[UserThirdPartyToken]: - user_tokens: list[UserThirdPartyToken] = [] - async with get_database_engine(app).acquire() as conn: - async for row in conn.execute( - sa.select(tokens.c.token_data).where(tokens.c.user_id == user_id) - ): - user_tokens.append(UserThirdPartyToken.model_construct(**row["token_data"])) - return user_tokens - - -async def get_token( - app: web.Application, user_id: UserID, service_id: str -) -> UserThirdPartyToken: - async with get_database_engine(app).acquire() as conn: - result = await conn.execute( - sa.select(tokens.c.token_data).where( - and_(tokens.c.user_id == user_id, tokens.c.token_service == service_id) - ) - ) - if row := await result.first(): - return UserThirdPartyToken.model_construct(**row["token_data"]) - raise TokenNotFoundError(service_id=service_id) - - -async def update_token( - app: web.Application, user_id: UserID, service_id: str, token_data: dict[str, str] -) -> UserThirdPartyToken: - async with get_database_engine(app).acquire() as conn: - result = await conn.execute( - sa.select(tokens.c.token_data, tokens.c.token_id).where( - (tokens.c.user_id == user_id) & (tokens.c.token_service == service_id) - ) - ) - row = await result.first() - if not row: - raise TokenNotFoundError(service_id=service_id) - - data = dict(row["token_data"]) - tid = row["token_id"] - data.update(token_data) - - resp = await conn.execute( - tokens.update() - .where(tokens.c.token_id == tid) - .values(token_data=data) - .returning(literal_column("*")) - ) - assert resp.rowcount == 1 # nosec - updated_token = await resp.fetchone() - assert updated_token # nosec - return UserThirdPartyToken.model_construct(**updated_token["token_data"]) - - -async def delete_token(app: web.Application, user_id: UserID, service_id: str) -> None: - async with get_database_engine(app).acquire() as conn: - await conn.execute( - tokens.delete().where( - and_(tokens.c.user_id == user_id, tokens.c.token_service == service_id) - ) - ) diff --git a/services/web/server/src/simcore_service_webserver/users/_users_repository.py b/services/web/server/src/simcore_service_webserver/users/_users_repository.py index 41c358cb520a..eeaebdd4599d 100644 --- a/services/web/server/src/simcore_service_webserver/users/_users_repository.py +++ b/services/web/server/src/simcore_service_webserver/users/_users_repository.py @@ -1,11 +1,10 @@ import contextlib import logging -from typing import Any, cast +from typing import Any import sqlalchemy as sa from aiohttp import web -from common_library.exclude import Unset, is_unset -from common_library.users_enums import AccountRequestStatus, UserRole +from common_library.users_enums import UserRole from models_library.groups import GroupID from models_library.products import ProductName from models_library.users import ( @@ -19,10 +18,6 @@ from simcore_postgres_database.models.groups import groups, user_to_groups from simcore_postgres_database.models.products import products from simcore_postgres_database.models.users import UserStatus, users -from simcore_postgres_database.models.users_details import ( - users_pre_registration_details, -) -from simcore_postgres_database.utils import as_postgres_sql_query_str from simcore_postgres_database.utils_groups_extra_properties import ( GroupExtraPropertiesNotFoundError, GroupExtraPropertiesRepo, @@ -43,7 +38,7 @@ from sqlalchemy.ext.asyncio import AsyncConnection, AsyncEngine from ..db.plugin import get_asyncpg_engine -from ._common.models import FullNameDict, ToUserUpdateDB +from ._models import FullNameDict from .exceptions import ( BillingDetailsNotFoundError, UserNameDuplicateError, @@ -199,6 +194,18 @@ async def get_user_id_from_pgid(app: web.Application, *, primary_gid: int) -> Us return user_id +async def get_user_email_legacy(engine: AsyncEngine, *, user_id: UserID | None) -> str: + if not user_id: + return "not_a_user@unknown.com" + async with pass_or_acquire_connection(engine=engine) as conn: + email: str | None = await conn.scalar( + sa.select( + users.c.email, + ).where(users.c.id == user_id) + ) + return email or "Unknown" + + async def get_user_fullname(app: web.Application, *, user_id: UserID) -> FullNameDict: """ :raises UserNotFoundError: @@ -359,19 +366,23 @@ async def get_user_products( async def get_user_billing_details( - engine: AsyncEngine, connection: AsyncConnection | None = None, *, user_id: UserID + engine: AsyncEngine, + connection: AsyncConnection | None = None, + *, + user_id: UserID, + product_name: ProductName, ) -> UserBillingDetails: """ + Returns UserBillingDetails for the given user when registered in a product or None Raises: BillingDetailsNotFoundError """ - async with pass_or_acquire_connection(engine, connection) as conn: - query = UsersRepo.get_billing_details_query(user_id=user_id) - result = await conn.execute(query) - row = result.first() - if not row: - raise BillingDetailsNotFoundError(user_id=user_id) - return UserBillingDetails.model_validate(row) + row = await UsersRepo(engine).get_billing_details( + connection, product_name=product_name, user_id=user_id + ) + if not row: + raise BillingDetailsNotFoundError(user_id=user_id) + return UserBillingDetails.model_validate(row) async def delete_user_by_id( @@ -433,6 +444,7 @@ async def get_my_profile(app: web.Application, *, user_id: UserID) -> MyProfile: users.c.last_name, users.c.email, users.c.role, + users.c.phone, sa.func.json_build_object( "hide_username", users.c.privacy_hide_username, @@ -450,7 +462,7 @@ async def get_my_profile(app: web.Application, *, user_id: UserID) -> MyProfile: ).label("expiration_date"), ).where(users.c.id == user_id) ) - row = await result.first() + row = await result.one_or_none() if not row: raise UserNotFoundError(user_id=user_id) @@ -464,16 +476,16 @@ async def update_user_profile( app: web.Application, *, user_id: UserID, - update: ToUserUpdateDB, + updated_values: dict[str, Any], ) -> None: """ Raises: UserNotFoundError UserNameAlreadyExistsError """ - user_id = _parse_as_user(user_id) + if updated_values: + user_id = _parse_as_user(user_id) - if updated_values := update.to_db(): try: async with transaction_context(engine=get_asyncpg_engine(app)) as conn: await conn.execute( @@ -494,460 +506,3 @@ async def update_user_profile( ) from err raise # not due to name duplication - - -# -# PRE-REGISTRATION -# - - -async def create_user_pre_registration( - engine: AsyncEngine, - connection: AsyncConnection | None = None, - *, - email: str, - created_by: UserID | None = None, - product_name: ProductName, - link_to_existing_user: bool = True, - **other_values, -) -> int: - """Creates a user pre-registration entry. - - Args: - engine: Database engine - connection: Optional existing connection - email: Email address for the pre-registration - created_by: ID of the user creating the pre-registration (None for anonymous) - product_name: Product name the user is requesting access to - link_to_existing_user: Whether to link the pre-registration to an existing user with the same email - **other_values: Additional values to insert in the pre-registration entry - - Returns: - ID of the created pre-registration - """ - async with transaction_context(engine, connection) as conn: - # If link_to_existing_user is True, try to find a matching user - user_id = None - if link_to_existing_user: - result = await conn.execute( - sa.select(users.c.id).where(users.c.email == email) - ) - user = result.one_or_none() - if user: - user_id = user.id - - # Insert the pre-registration record - values = { - "pre_email": email, - "product_name": product_name, - **other_values, - } - - # Only add created_by if not None - if created_by is not None: - values["created_by"] = created_by - - # Add user_id if found - if user_id is not None: - values["user_id"] = user_id - - result = await conn.execute( - sa.insert(users_pre_registration_details) - .values(**values) - .returning(users_pre_registration_details.c.id) - ) - pre_registration_id: int = result.scalar_one() - return pre_registration_id - - -async def list_user_pre_registrations( - engine: AsyncEngine, - connection: AsyncConnection | None = None, - *, - filter_by_pre_email: str | None = None, - filter_by_product_name: ProductName | Unset = Unset.VALUE, - filter_by_account_request_status: AccountRequestStatus | None = None, - pagination_limit: int = 50, - pagination_offset: int = 0, -) -> tuple[list[dict[str, Any]], int]: - """Lists user pre-registrations with optional filters. - - Args: - engine: Database engine - connection: Optional existing connection - filter_by_pre_email: Filter by email pattern (SQL LIKE pattern) - filter_by_product_name: Filter by product name - filter_by_account_request_status: Filter by account request status - pagination_limit: Maximum number of results to return - pagination_offset: Number of results to skip (for pagination) - - Returns: - Tuple of (list of pre-registration records, total count) - """ - # Base query conditions - where_conditions = [] - - # Apply filters if provided - if filter_by_pre_email is not None: - where_conditions.append( - users_pre_registration_details.c.pre_email.ilike(f"%{filter_by_pre_email}%") - ) - - if not is_unset(filter_by_product_name): - where_conditions.append( - users_pre_registration_details.c.product_name == filter_by_product_name - ) - - if filter_by_account_request_status is not None: - where_conditions.append( - users_pre_registration_details.c.account_request_status - == filter_by_account_request_status - ) - - # Combine conditions - where_clause = sa.and_(*where_conditions) if where_conditions else sa.true() - - # Create an alias for the users table for the created_by join - creator_users_alias = sa.alias(users, name="creator") - reviewer_users_alias = sa.alias(users, name="reviewer") - - # Count query for pagination - count_query = ( - sa.select(sa.func.count().label("total")) - .select_from(users_pre_registration_details) - .where(where_clause) - ) - - # Main query to get pre-registration data - main_query = ( - sa.select( - users_pre_registration_details.c.id, - users_pre_registration_details.c.user_id, - users_pre_registration_details.c.pre_email, - users_pre_registration_details.c.pre_first_name, - users_pre_registration_details.c.pre_last_name, - users_pre_registration_details.c.pre_phone, - users_pre_registration_details.c.institution, - users_pre_registration_details.c.address, - users_pre_registration_details.c.city, - users_pre_registration_details.c.state, - users_pre_registration_details.c.postal_code, - users_pre_registration_details.c.country, - users_pre_registration_details.c.product_name, - users_pre_registration_details.c.account_request_status, - users_pre_registration_details.c.extras, - users_pre_registration_details.c.created, - users_pre_registration_details.c.modified, - users_pre_registration_details.c.created_by, - creator_users_alias.c.name.label("created_by_name"), - users_pre_registration_details.c.account_request_reviewed_by, - reviewer_users_alias.c.name.label("reviewed_by_name"), - users_pre_registration_details.c.account_request_reviewed_at, - ) - .select_from( - users_pre_registration_details.outerjoin( - creator_users_alias, - users_pre_registration_details.c.created_by == creator_users_alias.c.id, - ).outerjoin( - reviewer_users_alias, - users_pre_registration_details.c.account_request_reviewed_by - == reviewer_users_alias.c.id, - ) - ) - .where(where_clause) - .order_by( - users_pre_registration_details.c.created.desc(), - users_pre_registration_details.c.pre_email, - ) - .limit(pagination_limit) - .offset(pagination_offset) - ) - - async with pass_or_acquire_connection(engine, connection) as conn: - # Get total count - count_result = await conn.execute(count_query) - total_count = count_result.scalar_one() - - # Get pre-registration records - result = await conn.execute(main_query) - records = result.mappings().all() - - return cast(list[dict[str, Any]], list(records)), total_count - - -async def review_user_pre_registration( - engine: AsyncEngine, - connection: AsyncConnection | None = None, - *, - pre_registration_id: int, - reviewed_by: UserID, - new_status: AccountRequestStatus, -) -> None: - """Updates the account request status of a pre-registered user. - - Args: - engine: The database engine - connection: Optional existing connection - pre_registration_id: ID of the pre-registration record - reviewed_by: ID of the user who reviewed the request - new_status: New status (APPROVED or REJECTED) - """ - if new_status not in (AccountRequestStatus.APPROVED, AccountRequestStatus.REJECTED): - msg = f"Invalid status for review: {new_status}. Must be APPROVED or REJECTED." - raise ValueError(msg) - - async with transaction_context(engine, connection) as conn: - await conn.execute( - users_pre_registration_details.update() - .values( - account_request_status=new_status, - account_request_reviewed_by=reviewed_by, - account_request_reviewed_at=sa.func.now(), - ) - .where(users_pre_registration_details.c.id == pre_registration_id) - ) - - -# -# PRE AND REGISTERED USERS -# - - -async def search_merged_pre_and_registered_users( - engine: AsyncEngine, - connection: AsyncConnection | None = None, - *, - email_like: str, - product_name: ProductName | None = None, -) -> list[Row]: - users_alias = sa.alias(users, name="users_alias") - - invited_by = ( - sa.select( - users_alias.c.name, - ) - .where(users_pre_registration_details.c.created_by == users_alias.c.id) - .label("invited_by") - ) - - async with pass_or_acquire_connection(engine, connection) as conn: - columns = ( - users_pre_registration_details.c.id, - users.c.first_name, - users.c.last_name, - users.c.email, - users.c.phone, - users_pre_registration_details.c.pre_email, - users_pre_registration_details.c.pre_first_name, - users_pre_registration_details.c.pre_last_name, - users_pre_registration_details.c.institution, - users_pre_registration_details.c.pre_phone, - users_pre_registration_details.c.address, - users_pre_registration_details.c.city, - users_pre_registration_details.c.state, - users_pre_registration_details.c.postal_code, - users_pre_registration_details.c.country, - users_pre_registration_details.c.user_id, - users_pre_registration_details.c.extras, - users_pre_registration_details.c.account_request_status, - users_pre_registration_details.c.account_request_reviewed_by, - users_pre_registration_details.c.account_request_reviewed_at, - users.c.status, - invited_by, - ) - - join_condition = users.c.id == users_pre_registration_details.c.user_id - if product_name: - join_condition = join_condition & ( - users_pre_registration_details.c.product_name == product_name - ) - - left_outer_join = ( - sa.select(*columns) - .select_from( - users_pre_registration_details.outerjoin(users, join_condition) - ) - .where(users_pre_registration_details.c.pre_email.like(email_like)) - ) - right_outer_join = ( - sa.select(*columns) - .select_from( - users.outerjoin( - users_pre_registration_details, - join_condition, - ) - ) - .where(users.c.email.like(email_like)) - ) - - result = await conn.stream(sa.union(left_outer_join, right_outer_join)) - return [row async for row in result] - - -async def list_merged_pre_and_registered_users( - engine: AsyncEngine, - connection: AsyncConnection | None = None, - *, - product_name: ProductName, - filter_any_account_request_status: list[AccountRequestStatus] | None = None, - filter_include_deleted: bool = False, - pagination_limit: int = 50, - pagination_offset: int = 0, -) -> tuple[list[dict[str, Any]], int]: - """Retrieves and merges users from both users and pre-registration tables. - - This returns: - 1. Users who are registered with the platform (in users table) - 2. Users who are pre-registered (in users_pre_registration_details table) - 3. Users who are both registered and pre-registered - - Args: - engine: Database engine - connection: Optional existing connection - product_name: Product name to filter by - filter_any_account_request_status: If provided, only returns users with account request status in this list - (only pre-registered users with any of these statuses will be included) - filter_include_deleted: Whether to include deleted users - pagination_limit: Maximum number of results to return - pagination_offset: Number of results to skip (for pagination) - - Returns: - Tuple of (list of merged user data, total count) - """ - # Base where conditions for both queries - pre_reg_where = [users_pre_registration_details.c.product_name == product_name] - users_where = [] - - # Add account request status filter if specified - if filter_any_account_request_status: - pre_reg_where.append( - users_pre_registration_details.c.account_request_status.in_( - filter_any_account_request_status - ) - ) - - # Add filter for deleted users - if not filter_include_deleted: - users_where.append(users.c.status != UserStatus.DELETED) - - # Query for pre-registered users that are not yet in the users table - # We need to left join with users to identify if the pre-registered user is already in the system - pre_reg_query = ( - sa.select( - users_pre_registration_details.c.id, - users_pre_registration_details.c.pre_email.label("email"), - users_pre_registration_details.c.pre_first_name.label("first_name"), - users_pre_registration_details.c.pre_last_name.label("last_name"), - users_pre_registration_details.c.pre_phone.label("phone"), - users_pre_registration_details.c.institution, - users_pre_registration_details.c.address, - users_pre_registration_details.c.city, - users_pre_registration_details.c.state, - users_pre_registration_details.c.postal_code, - users_pre_registration_details.c.country, - users_pre_registration_details.c.user_id.label("pre_reg_user_id"), - users_pre_registration_details.c.extras, - users_pre_registration_details.c.created, - users_pre_registration_details.c.account_request_status, - users_pre_registration_details.c.account_request_reviewed_by, - users_pre_registration_details.c.account_request_reviewed_at, - users.c.id.label("user_id"), - users.c.name.label("user_name"), - users.c.status, - # Use created_by directly instead of a subquery - users_pre_registration_details.c.created_by.label("created_by"), - sa.literal(True).label("is_pre_registered"), - ) - .select_from( - users_pre_registration_details.outerjoin( - users, users_pre_registration_details.c.user_id == users.c.id - ) - ) - .where(sa.and_(*pre_reg_where)) - ) - - # Query for users that are associated with the product through groups - users_query = ( - sa.select( - sa.literal(None).label("id"), - users.c.email, - users.c.first_name, - users.c.last_name, - users.c.phone, - sa.literal(None).label("institution"), - sa.literal(None).label("address"), - sa.literal(None).label("city"), - sa.literal(None).label("state"), - sa.literal(None).label("postal_code"), - sa.literal(None).label("country"), - sa.literal(None).label("pre_reg_user_id"), - sa.literal(None).label("extras"), - users.c.created_at.label("created"), - sa.literal(None).label("account_request_status"), - sa.literal(None).label("account_request_reviewed_by"), - sa.literal(None).label("account_request_reviewed_at"), - users.c.id.label("user_id"), - users.c.name.label("user_name"), - users.c.status, - # Match the created_by field from the pre_reg query - sa.literal(None).label("created_by"), - sa.literal(False).label("is_pre_registered"), - ) - .select_from( - users.join(user_to_groups, user_to_groups.c.uid == users.c.id) - .join(groups, groups.c.gid == user_to_groups.c.gid) - .join(products, products.c.group_id == groups.c.gid) - ) - .where(sa.and_(products.c.name == product_name, *users_where)) - ) - - # If filtering by account request status, we only want pre-registered users with any of those statuses - # No need to union with regular users as they don't have account_request_status - merged_query: sa.sql.Select | sa.sql.CompoundSelect - if filter_any_account_request_status: - merged_query = pre_reg_query - else: - merged_query = pre_reg_query.union_all(users_query) - - # Add distinct on email to eliminate duplicates - merged_query_subq = merged_query.subquery() - distinct_query = ( - sa.select(merged_query_subq) - .select_from(merged_query_subq) - .distinct(merged_query_subq.c.email) - .order_by( - merged_query_subq.c.email, - # Prioritize pre-registration records if duplicate emails exist - merged_query_subq.c.is_pre_registered.desc(), - merged_query_subq.c.created.desc(), - ) - .limit(pagination_limit) - .offset(pagination_offset) - ) - - # Count query (for pagination) - count_query = sa.select(sa.func.count().label("total")).select_from( - sa.select(merged_query_subq.c.email) - .select_from(merged_query_subq) - .distinct() - .subquery() - ) - - _logger.debug( - "%s\n%s\n%s\n%s", - "-" * 100, - as_postgres_sql_query_str(distinct_query), - "-" * 100, - as_postgres_sql_query_str(count_query), - ) - - async with pass_or_acquire_connection(engine, connection) as conn: - # Get total count - count_result = await conn.execute(count_query) - total_count = count_result.scalar_one() - - # Get user records - result = await conn.execute(distinct_query) - records = result.mappings().all() - - return cast(list[dict[str, Any]], records), total_count diff --git a/services/web/server/src/simcore_service_webserver/users/_users_rest.py b/services/web/server/src/simcore_service_webserver/users/_users_rest.py deleted file mode 100644 index 66bfa26e3fda..000000000000 --- a/services/web/server/src/simcore_service_webserver/users/_users_rest.py +++ /dev/null @@ -1,323 +0,0 @@ -import logging -from contextlib import suppress -from typing import Any - -from aiohttp import web -from common_library.users_enums import AccountRequestStatus -from models_library.api_schemas_webserver.users import ( - MyProfileGet, - MyProfilePatch, - UserAccountApprove, - UserAccountGet, - UserAccountReject, - UserAccountSearchQueryParams, - UserGet, - UsersAccountListQueryParams, - UsersSearch, -) -from models_library.rest_pagination import Page -from models_library.rest_pagination_utils import paginate_data -from servicelib.aiohttp import status -from servicelib.aiohttp.requests_validation import ( - parse_request_body_as, - parse_request_query_parameters_as, -) -from servicelib.rest_constants import RESPONSE_MODEL_POLICY - -from .._meta import API_VTAG -from ..exception_handling import ( - ExceptionToHttpErrorMap, - HttpErrorInfo, - exception_handling_decorator, - to_exceptions_handlers_map, -) -from ..groups import api as groups_api -from ..groups.exceptions import GroupNotFoundError -from ..login.decorators import login_required -from ..products import products_web -from ..products.models import Product -from ..security.decorators import permission_required -from ..utils_aiohttp import create_json_response_from_page, envelope_json_response -from . import _users_service -from ._common.schemas import PreRegisteredUserGet, UsersRequestContext -from .exceptions import ( - AlreadyPreRegisteredError, - MissingGroupExtraPropertiesForProductError, - PendingPreRegistrationNotFoundError, - UserNameDuplicateError, - UserNotFoundError, -) - -_logger = logging.getLogger(__name__) - - -_TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = { - PendingPreRegistrationNotFoundError: HttpErrorInfo( - status.HTTP_400_BAD_REQUEST, - PendingPreRegistrationNotFoundError.msg_template, - ), - UserNotFoundError: HttpErrorInfo( - status.HTTP_404_NOT_FOUND, - "This user cannot be found. Either it is not registered or has enabled privacy settings.", - ), - UserNameDuplicateError: HttpErrorInfo( - status.HTTP_409_CONFLICT, - "Username '{user_name}' is already taken. " - "Consider '{alternative_user_name}' instead.", - ), - AlreadyPreRegisteredError: HttpErrorInfo( - status.HTTP_409_CONFLICT, - "Found {num_found} matches for '{email}'. Cannot pre-register existing user", - ), - MissingGroupExtraPropertiesForProductError: HttpErrorInfo( - status.HTTP_503_SERVICE_UNAVAILABLE, - "The product is not ready for use until the configuration is fully completed. " - "Please wait and try again. ", - ), -} - -_handle_users_exceptions = exception_handling_decorator( - # Transforms raised service exceptions into controller-errors (i.e. http 4XX,5XX responses) - to_exceptions_handlers_map(_TO_HTTP_ERROR_MAP) -) - - -routes = web.RouteTableDef() - -# -# MY PROFILE: /me -# - - -@routes.get(f"/{API_VTAG}/me", name="get_my_profile") -@login_required -@_handle_users_exceptions -async def get_my_profile(request: web.Request) -> web.Response: - product: Product = products_web.get_current_product(request) - req_ctx = UsersRequestContext.model_validate(request) - - groups_by_type = await groups_api.list_user_groups_with_read_access( - request.app, user_id=req_ctx.user_id - ) - - assert groups_by_type.primary - assert groups_by_type.everyone - - my_product_group = None - - if product.group_id: - with suppress(GroupNotFoundError): - # Product is optional - my_product_group = await groups_api.get_product_group_for_user( - app=request.app, - user_id=req_ctx.user_id, - product_gid=product.group_id, - ) - - my_profile, preferences = await _users_service.get_my_profile( - request.app, user_id=req_ctx.user_id, product_name=req_ctx.product_name - ) - - profile = MyProfileGet.from_domain_model( - my_profile, groups_by_type, my_product_group, preferences - ) - - return envelope_json_response(profile) - - -@routes.patch(f"/{API_VTAG}/me", name="update_my_profile") -@login_required -@permission_required("user.profile.update") -@_handle_users_exceptions -async def update_my_profile(request: web.Request) -> web.Response: - req_ctx = UsersRequestContext.model_validate(request) - profile_update = await parse_request_body_as(MyProfilePatch, request) - - await _users_service.update_my_profile( - request.app, user_id=req_ctx.user_id, update=profile_update - ) - return web.json_response(status=status.HTTP_204_NO_CONTENT) - - -# -# USERS (public) -# - - -@routes.post(f"/{API_VTAG}/users:search", name="search_users") -@login_required -@permission_required("user.read") -@_handle_users_exceptions -async def search_users(request: web.Request) -> web.Response: - req_ctx = UsersRequestContext.model_validate(request) - assert req_ctx.product_name # nosec - - # NOTE: Decided for body instead of query parameters because it is easier for the front-end - search_params = await parse_request_body_as(UsersSearch, request) - - found = await _users_service.search_public_users( - request.app, - caller_id=req_ctx.user_id, - match_=search_params.match_, - limit=search_params.limit, - ) - - return envelope_json_response([UserGet.from_domain_model(user) for user in found]) - - -# -# USERS (only POs) -# - -_RESPONSE_MODEL_MINIMAL_POLICY = RESPONSE_MODEL_POLICY.copy() -_RESPONSE_MODEL_MINIMAL_POLICY["exclude_none"] = True - - -@routes.get(f"/{API_VTAG}/admin/user-accounts", name="list_users_accounts") -@login_required -@permission_required("admin.users.read") -@_handle_users_exceptions -async def list_users_accounts(request: web.Request) -> web.Response: - req_ctx = UsersRequestContext.model_validate(request) - assert req_ctx.product_name # nosec - - query_params = parse_request_query_parameters_as( - UsersAccountListQueryParams, request - ) - - if query_params.review_status == "PENDING": - filter_any_account_request_status = [AccountRequestStatus.PENDING] - elif query_params.review_status == "REVIEWED": - filter_any_account_request_status = [ - AccountRequestStatus.APPROVED, - AccountRequestStatus.REJECTED, - ] - else: - # ALL - filter_any_account_request_status = None - - users, total_count = await _users_service.list_user_accounts( - request.app, - product_name=req_ctx.product_name, - filter_any_account_request_status=filter_any_account_request_status, - pagination_limit=query_params.limit, - pagination_offset=query_params.offset, - ) - - def _to_domain_model(user: dict[str, Any]) -> UserAccountGet: - return UserAccountGet( - extras=user.pop("extras") or {}, pre_registration_id=user.pop("id"), **user - ) - - page = Page[UserAccountGet].model_validate( - paginate_data( - chunk=[_to_domain_model(user) for user in users], - request_url=request.url, - total=total_count, - limit=query_params.limit, - offset=query_params.offset, - ) - ) - - return create_json_response_from_page(page) - - -@routes.get(f"/{API_VTAG}/admin/user-accounts:search", name="search_user_accounts") -@login_required -@permission_required("admin.users.read") -@_handle_users_exceptions -async def search_user_accounts(request: web.Request) -> web.Response: - req_ctx = UsersRequestContext.model_validate(request) - assert req_ctx.product_name # nosec - - query_params: UserAccountSearchQueryParams = parse_request_query_parameters_as( - UserAccountSearchQueryParams, request - ) - - found = await _users_service.search_users_accounts( - request.app, email_glob=query_params.email, include_products=True - ) - - return envelope_json_response( - [ - user_for_admin.model_dump(**_RESPONSE_MODEL_MINIMAL_POLICY) - for user_for_admin in found - ] - ) - - -@routes.post( - f"/{API_VTAG}/admin/user-accounts:pre-register", name="pre_register_user_account" -) -@login_required -@permission_required("admin.users.write") -@_handle_users_exceptions -async def pre_register_user_account(request: web.Request) -> web.Response: - req_ctx = UsersRequestContext.model_validate(request) - pre_user_profile = await parse_request_body_as(PreRegisteredUserGet, request) - - user_profile = await _users_service.pre_register_user( - request.app, - profile=pre_user_profile, - creator_user_id=req_ctx.user_id, - product_name=req_ctx.product_name, - ) - - return envelope_json_response( - user_profile.model_dump(**_RESPONSE_MODEL_MINIMAL_POLICY) - ) - - -@routes.post(f"/{API_VTAG}/admin/user-accounts:approve", name="approve_user_account") -@login_required -@permission_required("admin.users.write") -@_handle_users_exceptions -async def approve_user_account(request: web.Request) -> web.Response: - req_ctx = UsersRequestContext.model_validate(request) - assert req_ctx.product_name # nosec - - approval_data = await parse_request_body_as(UserAccountApprove, request) - - if approval_data.invitation: - _logger.debug( - "TODO: User is being approved with invitation %s: \n" - "1. Approve user account\n" - "2. Generate invitation\n" - "3. Store invitation in extras\n" - "4. Send invitation to user %s\n", - approval_data.invitation.model_dump_json(indent=1), - approval_data.email, - ) - - # Approve the user account, passing the current user's ID as the reviewer - pre_registration_id = await _users_service.approve_user_account( - request.app, - pre_registration_email=approval_data.email, - product_name=req_ctx.product_name, - reviewer_id=req_ctx.user_id, - ) - assert pre_registration_id # nosec - - return web.json_response(status=status.HTTP_204_NO_CONTENT) - - -@routes.post(f"/{API_VTAG}/admin/user-accounts:reject", name="reject_user_account") -@login_required -@permission_required("admin.users.write") -@_handle_users_exceptions -async def reject_user_account(request: web.Request) -> web.Response: - req_ctx = UsersRequestContext.model_validate(request) - assert req_ctx.product_name # nosec - - rejection_data = await parse_request_body_as(UserAccountReject, request) - - # Reject the user account, passing the current user's ID as the reviewer - pre_registration_id = await _users_service.reject_user_account( - request.app, - pre_registration_email=rejection_data.email, - product_name=req_ctx.product_name, - reviewer_id=req_ctx.user_id, - ) - assert pre_registration_id # nosec - - return web.json_response(status=status.HTTP_204_NO_CONTENT) diff --git a/services/web/server/src/simcore_service_webserver/users/_users_service.py b/services/web/server/src/simcore_service_webserver/users/_users_service.py index 7d968373f8ad..6b715d29fae1 100644 --- a/services/web/server/src/simcore_service_webserver/users/_users_service.py +++ b/services/web/server/src/simcore_service_webserver/users/_users_service.py @@ -3,94 +3,39 @@ import pycountry from aiohttp import web -from common_library.users_enums import AccountRequestStatus -from models_library.api_schemas_webserver.users import MyProfilePatch, UserAccountGet +from models_library.api_schemas_webserver.users import MyProfileRestPatch +from models_library.api_schemas_webserver.users_preferences import AggregatedPreferences from models_library.basic_types import IDStr from models_library.emails import LowerCaseEmailStr from models_library.groups import GroupID from models_library.payments import UserInvoiceAddress from models_library.products import ProductName -from models_library.users import UserBillingDetails, UserID, UserPermission +from models_library.users import MyProfile, UserBillingDetails, UserID, UserPermission from pydantic import TypeAdapter from simcore_postgres_database.models.users import UserStatus from simcore_postgres_database.utils_groups_extra_properties import ( GroupExtraPropertiesNotFoundError, ) +from simcore_postgres_database.utils_users import UsersRepo from ..db.plugin import get_asyncpg_engine -from ..security.api import clean_auth_policy_cache -from . import _preferences_service, _users_repository -from ._common.models import ( +from ..security import security_service +from ..user_preferences import user_preferences_service +from . import _users_repository +from ._models import ( FullNameDict, - ToUserUpdateDB, UserCredentialsTuple, UserDisplayAndIdNamesTuple, UserIdNamesTuple, + UserModelAdapter, ) -from ._common.schemas import PreRegisteredUserGet from .exceptions import ( - AlreadyPreRegisteredError, MissingGroupExtraPropertiesForProductError, - PendingPreRegistrationNotFoundError, + UserNotFoundError, ) _logger = logging.getLogger(__name__) -# -# PRE-REGISTRATION -# - - -async def pre_register_user( - app: web.Application, - *, - profile: PreRegisteredUserGet, - creator_user_id: UserID, - product_name: ProductName, -) -> UserAccountGet: - - found = await search_users_accounts( - app, email_glob=profile.email, product_name=product_name, include_products=False - ) - if found: - raise AlreadyPreRegisteredError(num_found=len(found), email=profile.email) - - details = profile.model_dump( - include={ - "first_name", - "last_name", - "phone", - "institution", - "address", - "city", - "state", - "country", - "postal_code", - "extras", - }, - exclude_none=True, - ) - - for key in ("first_name", "last_name", "phone"): - if key in details: - details[f"pre_{key}"] = details.pop(key) - - await _users_repository.create_user_pre_registration( - get_asyncpg_engine(app), - email=profile.email, - created_by=creator_user_id, - product_name=product_name, - **details, - ) - - found = await search_users_accounts( - app, email_glob=profile.email, product_name=product_name, include_products=False - ) - - assert len(found) == 1 # nosec - return found[0] - - # # GET USERS # @@ -124,6 +69,15 @@ async def get_user(app: web.Application, user_id: UserID) -> dict[str, Any]: ) +async def get_user_email_legacy(app: web.Application, user_id: UserID | None) -> str: + """ + :raises UserNotFoundError: if missing but NOT if marked for deletion! + """ + return await _users_repository.get_user_email_legacy( + engine=get_asyncpg_engine(app), user_id=user_id + ) + + async def get_user_primary_group_id(app: web.Application, user_id: UserID) -> GroupID: return await _users_repository.get_user_primary_group_id( engine=get_asyncpg_engine(app), user_id=user_id @@ -207,21 +161,19 @@ async def get_user_display_and_id_names( async def get_user_credentials( app: web.Application, *, user_id: UserID ) -> UserCredentialsTuple: - row = await _users_repository.get_user_or_raise( - get_asyncpg_engine(app), - user_id=user_id, - return_column_names=[ - "name", - "first_name", - "email", - "password_hash", - ], - ) + + repo = UsersRepo(get_asyncpg_engine(app)) + + user_row = await repo.get_user_by_id_or_none(user_id=user_id) + if user_row is None: + raise UserNotFoundError(user_id=user_id) + + user_password_hash = await repo.get_password_hash(user_id=user_id) return UserCredentialsTuple( - email=TypeAdapter(LowerCaseEmailStr).validate_python(row["email"]), - password_hash=row["password_hash"], - display_name=row["first_name"] or row["name"].capitalize(), + email=TypeAdapter(LowerCaseEmailStr).validate_python(user_row.email), + password_hash=user_password_hash, + display_name=user_row.first_name or user_row.name.capitalize(), ) @@ -237,22 +189,30 @@ async def list_user_permissions( return permissions +async def get_user_billing_details( + app: web.Application, *, user_id: UserID, product_name: ProductName +) -> UserBillingDetails: + return await _users_repository.get_user_billing_details( + get_asyncpg_engine(app), user_id=user_id, product_name=product_name + ) + + async def get_user_invoice_address( - app: web.Application, *, user_id: UserID + app: web.Application, + *, + product_name: ProductName, + user_id: UserID, ) -> UserInvoiceAddress: - user_billing_details: UserBillingDetails = ( - await _users_repository.get_user_billing_details( - get_asyncpg_engine(app), user_id=user_id - ) + user_billing_details = await get_user_billing_details( + app, user_id=user_id, product_name=product_name ) - _user_billing_country = pycountry.countries.lookup(user_billing_details.country) - _user_billing_country_alpha_2_format = _user_billing_country.alpha_2 + return UserInvoiceAddress( line1=user_billing_details.address, state=user_billing_details.state, postal_code=user_billing_details.postal_code, city=user_billing_details.city, - country=_user_billing_country_alpha_2_format, + country=pycountry.countries.lookup(user_billing_details.country).alpha_2, ) @@ -261,7 +221,9 @@ async def get_user_invoice_address( # -async def delete_user_without_projects(app: web.Application, user_id: UserID) -> None: +async def delete_user_without_projects( + app: web.Application, *, user_id: UserID, clean_cache: bool = True +) -> None: """Deletes a user from the database if the user exists""" # WARNING: user cannot be deleted without deleting first all ist project # otherwise this function will raise asyncpg.exceptions.ForeignKeyViolationError @@ -276,9 +238,10 @@ async def delete_user_without_projects(app: web.Application, user_id: UserID) -> ) return - # This user might be cached in the auth. If so, any request - # with this user-id will get thru producing unexpected side-effects - await clean_auth_policy_cache(app) + if clean_cache: + # This user might be cached in the auth. If so, any request + # with this user-id will get thru producing unexpected side-effects + await security_service.clean_auth_policy_cache(app) async def set_user_as_deleted(app: web.Application, *, user_id: UserID) -> None: @@ -298,7 +261,7 @@ async def update_expired_users(app: web.Application) -> list[UserID]: async def get_my_profile( app: web.Application, *, user_id: UserID, product_name: ProductName -): +) -> tuple[MyProfile, AggregatedPreferences]: """Caller and target user is the same. Privacy settings do not apply here :raises UserNotFoundError: @@ -308,7 +271,7 @@ async def get_my_profile( try: preferences = ( - await _preferences_service.get_frontend_user_preferences_aggregation( + await user_preferences_service.get_frontend_user_preferences_aggregation( app, user_id=user_id, product_name=product_name ) ) @@ -325,239 +288,31 @@ async def update_my_profile( app: web.Application, *, user_id: UserID, - update: MyProfilePatch, + update: MyProfileRestPatch, ) -> None: await _users_repository.update_user_profile( app, user_id=user_id, - update=ToUserUpdateDB.from_api(update), + updated_values=UserModelAdapter.from_rest_schema_model(update).to_db_values(), ) -# -# USER ACCOUNTS -# - - -async def list_user_accounts( +async def update_user_phone( app: web.Application, *, - product_name: ProductName, - filter_any_account_request_status: list[AccountRequestStatus] | None = None, - pagination_limit: int = 50, - pagination_offset: int = 0, -) -> tuple[list[dict[str, Any]], int]: - """ - Get a paginated list of users for admin view with filtering options. - - Args: - app: The web application instance - filter_approved: If set, filters users by their approval status - pagination_limit: Maximum number of users to return - pagination_offset: Number of users to skip for pagination - - Returns: - A tuple containing (list of user dictionaries, total count of users) - """ - engine = get_asyncpg_engine(app) - - # Get user data with pagination - users_data, total_count = ( - await _users_repository.list_merged_pre_and_registered_users( - engine, - product_name=product_name, - filter_any_account_request_status=filter_any_account_request_status, - pagination_limit=pagination_limit, - pagination_offset=pagination_offset, - ) - ) - - # For each user, append additional information if needed - result = [] - for user in users_data: - # Add any additional processing needed for admin view - user_dict = dict(user) - - # Add products information if needed - user_id = user.get("user_id") - if user_id: - products = await _users_repository.get_user_products( - engine, user_id=user_id - ) - user_dict["products"] = [p.product_name for p in products] - - user_dict["registered"] = ( - user_id is not None - if user.get("pre_email") - else user.get("status") is not None - ) - - result.append(user_dict) - - return result, total_count - - -async def search_users_accounts( - app: web.Application, - *, - email_glob: str, - product_name: ProductName | None = None, - include_products: bool = False, -) -> list[UserAccountGet]: - """ - WARNING: this information is reserved for admin users. Note that the returned model include UserForAdminGet - - NOTE: Functions in the service layer typically validate the caller's access rights - using parameters like product_name and user_id. However, this function skips - such checks as it is designed for scenarios (e.g., background tasks) where - no caller or context is available. - """ - - def _glob_to_sql_like(glob_pattern: str) -> str: - # Escape SQL LIKE special characters in the glob pattern - sql_like_pattern = glob_pattern.replace("%", r"\%").replace("_", r"\_") - # Convert glob wildcards to SQL LIKE wildcards - return sql_like_pattern.replace("*", "%").replace("?", "_") - - rows = await _users_repository.search_merged_pre_and_registered_users( - get_asyncpg_engine(app), - email_like=_glob_to_sql_like(email_glob), - product_name=product_name, - ) - - async def _list_products_or_none(user_id): - if user_id is not None and include_products: - products = await _users_repository.get_user_products( - get_asyncpg_engine(app), user_id=user_id - ) - return [_.product_name for _ in products] - return None - - return [ - UserAccountGet( - first_name=r.first_name or r.pre_first_name, - last_name=r.last_name or r.pre_last_name, - email=r.email or r.pre_email, - institution=r.institution, - phone=r.phone or r.pre_phone, - address=r.address, - city=r.city, - state=r.state, - postal_code=r.postal_code, - country=r.country, - extras=r.extras or {}, - invited_by=r.invited_by, - pre_registration_id=r.id, - account_request_status=r.account_request_status, - account_request_reviewed_by=r.account_request_reviewed_by, - account_request_reviewed_at=r.account_request_reviewed_at, - products=await _list_products_or_none(r.user_id), - # NOTE: old users will not have extra details - registered=r.user_id is not None if r.pre_email else r.status is not None, - status=r.status, - ) - for r in rows - ] - - -async def approve_user_account( - app: web.Application, - *, - pre_registration_email: LowerCaseEmailStr, - product_name: ProductName, - reviewer_id: UserID, -) -> int: - """Approve a user account based on their pre-registration email. - - Args: - app: The web application instance - pre_registration_email: Email of the pre-registered user to approve - product_name: Product name for which the user is being approved - reviewer_id: ID of the user approving the account - - Returns: - int: The ID of the approved pre-registration record - - Raises: - PendingPreRegistrationNotFoundError: If no pre-registration is found for the email/product - """ - engine = get_asyncpg_engine(app) - - # First, find the pre-registration entry matching the email and product - pre_registrations, _ = await _users_repository.list_user_pre_registrations( - engine, - filter_by_pre_email=pre_registration_email, - filter_by_product_name=product_name, - filter_by_account_request_status=AccountRequestStatus.PENDING, - ) - - if not pre_registrations: - raise PendingPreRegistrationNotFoundError( - email=pre_registration_email, product_name=product_name - ) - - # There should be only one registration matching these criteria - pre_registration = pre_registrations[0] - pre_registration_id: int = pre_registration["id"] - - # Update the pre-registration status to APPROVED using the reviewer's ID - await _users_repository.review_user_pre_registration( - engine, - pre_registration_id=pre_registration_id, - reviewed_by=reviewer_id, - new_status=AccountRequestStatus.APPROVED, - ) - - return pre_registration_id - - -async def reject_user_account( - app: web.Application, - *, - pre_registration_email: LowerCaseEmailStr, - product_name: ProductName, - reviewer_id: UserID, -) -> int: - """Reject a user account based on their pre-registration email. + user_id: UserID, + phone: str, +) -> None: + """Update user's phone number after successful verification Args: - app: The web application instance - pre_registration_email: Email of the pre-registered user to reject - product_name: Product name for which the user is being rejected - reviewer_id: ID of the user rejecting the account - - Returns: - int: The ID of the rejected pre-registration record - - Raises: - PendingPreRegistrationNotFoundError: If no pre-registration is found for the email/product + app: Web application instance + user_id: ID of the user whose phone to update + phone: Verified phone number to set """ - engine = get_asyncpg_engine(app) - - # First, find the pre-registration entry matching the email and product - pre_registrations, _ = await _users_repository.list_user_pre_registrations( - engine, - filter_by_pre_email=pre_registration_email, - filter_by_product_name=product_name, - filter_by_account_request_status=AccountRequestStatus.PENDING, - ) - - if not pre_registrations: - raise PendingPreRegistrationNotFoundError( - email=pre_registration_email, product_name=product_name - ) - - # There should be only one registration matching these criteria - pre_registration = pre_registrations[0] - pre_registration_id: int = pre_registration["id"] - - # Update the pre-registration status to REJECTED using the reviewer's ID - await _users_repository.review_user_pre_registration( - engine, - pre_registration_id=pre_registration_id, - reviewed_by=reviewer_id, - new_status=AccountRequestStatus.REJECTED, + await _users_repository.update_user_profile( + app, + user_id=user_id, + updated_values={"phone": phone}, ) - - return pre_registration_id diff --git a/services/web/server/src/simcore_service_webserver/users/_users_web.py b/services/web/server/src/simcore_service_webserver/users/_users_web.py new file mode 100644 index 000000000000..91df2de62fb6 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/users/_users_web.py @@ -0,0 +1,73 @@ +import logging +from typing import Literal, TypedDict + +from aiohttp_session import Session +from models_library.users import UserID + +from .exceptions import ( + PhoneRegistrationCodeInvalidError, + PhoneRegistrationPendingNotFoundError, + PhoneRegistrationSessionInvalidError, +) + +_logger = logging.getLogger(__name__) + +# Registration session keys +_REGISTRATION_KEY = "registration" +_REGISTRATION_PENDING_KEY = "registration_pending" +_REGISTRATION_CODE_KEY = "registration_code" + + +class RegistrationData(TypedDict): + """Registration session data structure.""" + + user_id: UserID + data: str + status: Literal["pending_confirmation"] + + +class RegistrationSessionManager: + def __init__(self, session: Session, user_id: UserID, product_name: str): + self._session = session + self._user_id = user_id + self._product_name = product_name + + def start_registration(self, data: str, code: str) -> None: + registration_data: RegistrationData = { + "user_id": self._user_id, + "data": data, # keep data + "status": "pending_confirmation", + } + self._session[_REGISTRATION_KEY] = registration_data + self._session[_REGISTRATION_CODE_KEY] = code + self._session[_REGISTRATION_PENDING_KEY] = True + + def validate_pending_registration(self) -> RegistrationData: + if not self._session.get(_REGISTRATION_PENDING_KEY): + raise PhoneRegistrationPendingNotFoundError( + user_id=self._user_id, product_name=self._product_name + ) + + registration: RegistrationData | None = self._session.get(_REGISTRATION_KEY) + if not registration or registration["user_id"] != self._user_id: + raise PhoneRegistrationSessionInvalidError( + user_id=self._user_id, product_name=self._product_name + ) + + return registration + + def regenerate_code(self, new_code: str) -> None: + self.validate_pending_registration() + self._session[_REGISTRATION_CODE_KEY] = new_code + + def validate_confirmation_code(self, provided_code: str) -> None: + expected_code = self._session.get(_REGISTRATION_CODE_KEY) + if not expected_code or provided_code != expected_code: + raise PhoneRegistrationCodeInvalidError( + user_id=self._user_id, product_name=self._product_name + ) + + def clear_session(self) -> None: + self._session.pop(_REGISTRATION_KEY, None) + self._session.pop(_REGISTRATION_PENDING_KEY, None) + self._session.pop(_REGISTRATION_CODE_KEY, None) diff --git a/services/web/server/src/simcore_service_webserver/users/exceptions.py b/services/web/server/src/simcore_service_webserver/users/exceptions.py index b1533222195d..a71472b8374d 100644 --- a/services/web/server/src/simcore_service_webserver/users/exceptions.py +++ b/services/web/server/src/simcore_service_webserver/users/exceptions.py @@ -75,3 +75,15 @@ def __init__(self, *, email: str, product_name: str, **ctx: Any): super().__init__(**ctx) self.email = email self.product_name = product_name + + +class PhoneRegistrationPendingNotFoundError(UsersBaseError): + msg_template = "No pending phone registration found" + + +class PhoneRegistrationSessionInvalidError(UsersBaseError): + msg_template = "Invalid phone registration session" + + +class PhoneRegistrationCodeInvalidError(UsersBaseError): + msg_template = "Invalid confirmation code" diff --git a/services/web/server/src/simcore_service_webserver/users/models.py b/services/web/server/src/simcore_service_webserver/users/models.py new file mode 100644 index 000000000000..bcc04f1b57b9 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/users/models.py @@ -0,0 +1,6 @@ +# mypy: disable-error-code=truthy-function + +from ._models import FullNameDict, UserDisplayAndIdNamesTuple + +__all__: tuple[str, ...] = ("FullNameDict", "UserDisplayAndIdNamesTuple") +# nopycln: file diff --git a/services/web/server/src/simcore_service_webserver/users/plugin.py b/services/web/server/src/simcore_service_webserver/users/plugin.py index e9fb7d2ea537..f81c1012f6b2 100644 --- a/services/web/server/src/simcore_service_webserver/users/plugin.py +++ b/services/web/server/src/simcore_service_webserver/users/plugin.py @@ -1,21 +1,26 @@ -""" users management subsystem - -""" +"""users management subsystem""" import logging +from typing import Final from aiohttp import web -from servicelib.aiohttp.application_keys import APP_SETTINGS_KEY -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup from servicelib.aiohttp.observer import setup_observer_registry -from . import _notifications_rest, _preferences_rest, _tokens_rest, _users_rest -from ._preferences_models import overwrite_user_preferences_defaults +from ..application_setup import ModuleCategory, app_setup_func +from ..constants import APP_SETTINGS_KEY +from ..user_notifications.bootstrap import ( + setup_user_notification_feature, +) +from ..user_preferences.bootstrap import setup_user_preferences_feature +from ..user_tokens.bootstrap import setup_user_tokens_feature +from ._controller.rest import accounts_rest, users_rest _logger = logging.getLogger(__name__) +APP_USERS_CLIENT_KEY: Final = web.AppKey("APP_USERS_CLIENT_KEY", object) -@app_module_setup( + +@app_setup_func( __name__, ModuleCategory.ADDON, settings_name="WEBSERVER_USERS", @@ -25,9 +30,10 @@ def setup_users(app: web.Application): assert app[APP_SETTINGS_KEY].WEBSERVER_USERS # nosec setup_observer_registry(app) - overwrite_user_preferences_defaults(app) - app.router.add_routes(_users_rest.routes) - app.router.add_routes(_tokens_rest.routes) - app.router.add_routes(_notifications_rest.routes) - app.router.add_routes(_preferences_rest.routes) + app.router.add_routes(users_rest.routes) + app.router.add_routes(accounts_rest.routes) + + setup_user_notification_feature(app) + setup_user_preferences_feature(app) + setup_user_tokens_feature(app) diff --git a/services/web/server/src/simcore_service_webserver/users/schemas.py b/services/web/server/src/simcore_service_webserver/users/schemas.py new file mode 100644 index 000000000000..e4520d253114 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/users/schemas.py @@ -0,0 +1,9 @@ +from ._controller.rest._rest_schemas import ( + UserAccountRestPreRegister, + UsersRequestContext, +) + +__all__: tuple[str, ...] = ("UserAccountRestPreRegister", "UsersRequestContext") + + +# nopycln: file diff --git a/services/web/server/src/simcore_service_webserver/users/api.py b/services/web/server/src/simcore_service_webserver/users/users_service.py similarity index 85% rename from services/web/server/src/simcore_service_webserver/users/api.py rename to services/web/server/src/simcore_service_webserver/users/users_service.py index 238cd68b20ec..4001737cad28 100644 --- a/services/web/server/src/simcore_service_webserver/users/api.py +++ b/services/web/server/src/simcore_service_webserver/users/users_service.py @@ -1,12 +1,16 @@ # mypy: disable-error-code=truthy-function -from ._common.models import FullNameDict, UserDisplayAndIdNamesTuple +from ._accounts_service import ( + pre_register_user, +) +from ._models import FullNameDict from ._users_service import ( delete_user_without_projects, get_guest_user_ids_and_names, get_user, get_user_credentials, get_user_display_and_id_names, + get_user_email_legacy, get_user_fullname, get_user_id_from_gid, get_user_invoice_address, @@ -22,12 +26,12 @@ __all__: tuple[str, ...] = ( "FullNameDict", - "UserDisplayAndIdNamesTuple", "delete_user_without_projects", "get_guest_user_ids_and_names", "get_user", "get_user_credentials", "get_user_display_and_id_names", + "get_user_email_legacy", "get_user_fullname", "get_user_id_from_gid", "get_user_invoice_address", @@ -36,8 +40,9 @@ "get_user_role", "get_users_in_group", "is_user_in_product", - "set_user_as_deleted", + "pre_register_user", "search_public_users", + "set_user_as_deleted", "update_expired_users", ) # nopycln: file diff --git a/services/web/server/src/simcore_service_webserver/utils.py b/services/web/server/src/simcore_service_webserver/utils.py index 4a6c9d0169e6..8928deb21c60 100644 --- a/services/web/server/src/simcore_service_webserver/utils.py +++ b/services/web/server/src/simcore_service_webserver/utils.py @@ -3,59 +3,18 @@ """ import asyncio -import hashlib import logging import os -import sys -import traceback import tracemalloc from datetime import datetime -from pathlib import Path from common_library.error_codes import ErrorCodeStr from typing_extensions import ( # https://docs.pydantic.dev/latest/api/standard_library_types/#typeddict TypedDict, ) -_CURRENT_DIR = ( - Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent -) _logger = logging.getLogger(__name__) - -def is_osparc_repo_dir(path: Path) -> bool: - return all( - any(path.glob(expression)) for expression in [".github", "packages", "services"] - ) - - -def search_osparc_repo_dir(max_iter=8): - """Returns path to root repo dir or None - - NOTE: assumes this file within repo, i.e. only happens in edit mode! - """ - root_dir = _CURRENT_DIR - if "services/web/server" in str(root_dir): - it = 1 - while not is_osparc_repo_dir(root_dir) and it < max_iter: - root_dir = root_dir.parent - it += 1 - - if is_osparc_repo_dir(root_dir): - return root_dir - return None - - -def gravatar_hash(email: str) -> str: - return hashlib.md5(email.lower().encode("utf-8")).hexdigest() # nosec - - -# ----------------------------------------------- -# -# DATE/TIME -# -# - DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ" SECOND: int = 1 @@ -69,8 +28,6 @@ def now() -> datetime: def format_datetime(snapshot: datetime) -> str: - # TODO: this fullfills datetime schema!!! - # FIXME: ensure snapshot is ZULU time! return "{}Z".format(snapshot.isoformat(timespec="milliseconds")) @@ -108,7 +65,7 @@ class TaskInfoDict(TypedDict): def get_task_info(task: asyncio.Task) -> TaskInfoDict: def _format_frame(f): - return StackInfoDict(f_code=f.f_code, f_lineno=f.f_lineno) + return StackInfoDict(f_code=str(f.f_code), f_lineno=str(f.f_lineno)) info = TaskInfoDict( txt=str(task), @@ -163,13 +120,3 @@ def compose_support_error_msg( ) return ". ".join(sentences) - - -# ----------------------------------------------- -# -# FORMATTING -# - - -def get_traceback_string(exception: BaseException) -> str: - return "".join(traceback.format_exception(exception)) diff --git a/services/web/server/src/simcore_service_webserver/utils_aiohttp.py b/services/web/server/src/simcore_service_webserver/utils_aiohttp.py index b70a6c6897aa..e0b753e8aee9 100644 --- a/services/web/server/src/simcore_service_webserver/utils_aiohttp.py +++ b/services/web/server/src/simcore_service_webserver/utils_aiohttp.py @@ -35,8 +35,9 @@ def get_routes_view(routes: RouteTableDef) -> str: return fh.getvalue() -def create_url_for_function(request: web.Request) -> Callable: - app = request.app +def create_url_for_function( + app: web.Application, request_url: URL, request_headers: dict[str, str] +) -> Callable: def _url_for(route_name: str, **params: dict[str, Any]) -> str: """Reverse URL constructing using named resources""" @@ -44,16 +45,16 @@ def _url_for(route_name: str, **params: dict[str, Any]) -> str: rel_url: URL = app.router[route_name].url_for( **{k: f"{v}" for k, v in params.items()} ) - url: URL = ( - request.url.origin() + _url: URL = ( + request_url.origin() .with_scheme( # Custom header by traefik. See labels in docker-compose as: # - traefik.http.middlewares.${SWARM_STACK_NAME_NO_HYPHEN}_sslheader.headers.customrequestheaders.X-Forwarded-Proto=http - request.headers.get(X_FORWARDED_PROTO, request.url.scheme) + request_headers.get(X_FORWARDED_PROTO, request_url.scheme) ) .with_path(str(rel_url)) ) - return f"{url}" + return f"{_url}" except KeyError as err: msg = f"Cannot find URL because there is no resource registered as {route_name=}Check name spelling or whether the router was not registered" diff --git a/services/web/server/src/simcore_service_webserver/utils_rate_limiting.py b/services/web/server/src/simcore_service_webserver/utils_rate_limiting.py index 2266170c5ac9..5311231a29b2 100644 --- a/services/web/server/src/simcore_service_webserver/utils_rate_limiting.py +++ b/services/web/server/src/simcore_service_webserver/utils_rate_limiting.py @@ -6,6 +6,7 @@ from typing import Final, NamedTuple from aiohttp.web_exceptions import HTTPTooManyRequests +from common_library.user_messages import user_message from models_library.rest_error import EnvelopedError, ErrorGet from servicelib.aiohttp import status @@ -15,7 +16,7 @@ class RateLimitSetup(NamedTuple): interval_seconds: float -MSG_TOO_MANY_REQUESTS: Final[str] = ( +MSG_TOO_MANY_REQUESTS: Final[str] = user_message( "Requests are being made too frequently. Please wait a moment before trying again." ) diff --git a/services/web/server/src/simcore_service_webserver/wallets/_api.py b/services/web/server/src/simcore_service_webserver/wallets/_api.py index dd092e2a39fc..77ef2dbdd72f 100644 --- a/services/web/server/src/simcore_service_webserver/wallets/_api.py +++ b/services/web/server/src/simcore_service_webserver/wallets/_api.py @@ -16,8 +16,8 @@ from pydantic import TypeAdapter from ..resource_usage.service import get_wallet_total_available_credits -from ..users import api as users_service -from ..users import preferences_api as user_preferences_api +from ..user_preferences import user_preferences_service +from ..users import users_service from ..users.exceptions import UserDefaultWalletNotFoundError from . import _db as db from .errors import WalletAccessForbiddenError @@ -108,17 +108,44 @@ async def get_wallet_with_available_credits_by_user_and_wallet( ) +async def get_wallet_with_available_credits( + app: web.Application, + *, + wallet_id: WalletID, + product_name: ProductName, +) -> WalletGetWithAvailableCredits: + wallet_db: WalletDB = await db.get_wallet( + app=app, wallet_id=wallet_id, product_name=product_name + ) + + available_credits: WalletTotalCredits = await get_wallet_total_available_credits( + app, product_name, wallet_db.wallet_id + ) + + return WalletGetWithAvailableCredits( + wallet_id=wallet_db.wallet_id, + name=IDStr(wallet_db.name), + description=wallet_db.description, + owner=wallet_db.owner, + thumbnail=wallet_db.thumbnail, + status=wallet_db.status, + created=wallet_db.created, + modified=wallet_db.modified, + available_credits=available_credits.available_osparc_credits, + ) + + async def get_user_default_wallet_with_available_credits( app: web.Application, *, user_id: UserID, product_name: ProductName, ) -> WalletGetWithAvailableCredits: - user_default_wallet_preference = await user_preferences_api.get_frontend_user_preference( + user_default_wallet_preference = await user_preferences_service.get_frontend_user_preference( app, user_id=user_id, product_name=product_name, - preference_class=user_preferences_api.PreferredWalletIdFrontendUserPreference, + preference_class=user_preferences_service.PreferredWalletIdFrontendUserPreference, ) if user_default_wallet_preference is None: raise UserDefaultWalletNotFoundError(uid=user_id) @@ -176,7 +203,7 @@ async def update_wallet( ) if wallet.write is False: raise WalletAccessForbiddenError( - reason=f"Wallet {wallet_id} does not have write permission", + details=f"Wallet {wallet_id} does not have write permission", user_id=user_id, wallet_id=wallet_id, product_name=product_name, @@ -210,7 +237,7 @@ async def delete_wallet( ) if wallet.delete is False: raise WalletAccessForbiddenError( - reason=f"Wallet {wallet_id} does not have delete permission", + details=f"Wallet {wallet_id} does not have delete permission", user_id=user_id, wallet_id=wallet_id, product_name=product_name, @@ -233,7 +260,7 @@ async def get_wallet_by_user( ) if wallet.read is False: raise WalletAccessForbiddenError( - reason=f"User {user_id} does not have read permission on wallet {wallet_id}", + details=f"User {user_id} does not have read permission on wallet {wallet_id}", user_id=user_id, wallet_id=wallet_id, product_name=product_name, diff --git a/services/web/server/src/simcore_service_webserver/wallets/_constants.py b/services/web/server/src/simcore_service_webserver/wallets/_constants.py index eab6335e3df7..3cf69f28779c 100644 --- a/services/web/server/src/simcore_service_webserver/wallets/_constants.py +++ b/services/web/server/src/simcore_service_webserver/wallets/_constants.py @@ -1,10 +1,14 @@ from typing import Final -MSG_PRICE_NOT_DEFINED_ERROR: Final[ - str -] = "No payments are accepted until this product has a price" +from common_library.user_messages import user_message -MSG_BILLING_DETAILS_NOT_DEFINED_ERROR: Final[str] = ( - "Payments cannot be processed: Required billing details (e.g. country for tax) are missing from your account." - "Please contact support to resolve this configuration issue." +MSG_PRICE_NOT_DEFINED_ERROR: Final[str] = user_message( + "Payments are not currently available for this product as pricing has not been configured.", + _version=1, +) + +MSG_BILLING_DETAILS_NOT_DEFINED_ERROR: Final[str] = user_message( + "Unable to process payment because required billing information (such as country for tax purposes) is missing from your account. " + "Please contact support to complete your billing setup.", + _version=1, ) diff --git a/services/web/server/src/simcore_service_webserver/wallets/_db.py b/services/web/server/src/simcore_service_webserver/wallets/_db.py index 4d17c742925a..ad99a398d39a 100644 --- a/services/web/server/src/simcore_service_webserver/wallets/_db.py +++ b/services/web/server/src/simcore_service_webserver/wallets/_db.py @@ -12,7 +12,7 @@ from sqlalchemy.dialects.postgresql import BOOLEAN, INTEGER from sqlalchemy.sql import select -from ..db.plugin import get_database_engine +from ..db.plugin import get_database_engine_legacy from .errors import WalletAccessForbiddenError, WalletNotFoundError _logger = logging.getLogger(__name__) @@ -26,7 +26,7 @@ async def create_wallet( description: str | None, thumbnail: str | None, ) -> WalletDB: - async with get_database_engine(app).acquire() as conn: + async with get_database_engine_legacy(app).acquire() as conn: result = await conn.execute( wallets.insert() .values( @@ -90,7 +90,7 @@ async def list_wallets_for_user( ) ) - async with get_database_engine(app).acquire() as conn: + async with get_database_engine_legacy(app).acquire() as conn: result = await conn.execute(stmt) rows = await result.fetchall() or [] output: list[UserWalletDB] = [UserWalletDB.model_validate(row) for row in rows] @@ -112,7 +112,7 @@ async def list_wallets_owned_by_user( & (wallets.c.product_name == product_name) ) ) - async with get_database_engine(app).acquire() as conn: + async with get_database_engine_legacy(app).acquire() as conn: results = await conn.execute(stmt) rows = await results.fetchall() or [] return [row.wallet_id for row in rows] @@ -145,12 +145,12 @@ async def get_wallet_for_user( ) ) - async with get_database_engine(app).acquire() as conn: + async with get_database_engine_legacy(app).acquire() as conn: result = await conn.execute(stmt) row = await result.first() if row is None: raise WalletAccessForbiddenError( - reason=f"User does not have access to the wallet {wallet_id}. Or wallet does not exist.", + details=f"User does not have access to the wallet {wallet_id}. Or wallet does not exist.", user_id=user_id, wallet_id=wallet_id, product_name=product_name, @@ -178,11 +178,11 @@ async def get_wallet( & (wallets.c.product_name == product_name) ) ) - async with get_database_engine(app).acquire() as conn: + async with get_database_engine_legacy(app).acquire() as conn: result = await conn.execute(stmt) row = await result.first() if row is None: - raise WalletNotFoundError(reason=f"Wallet {wallet_id} not found.") + raise WalletNotFoundError(details=f"Wallet {wallet_id} not found.") return WalletDB.model_validate(row) @@ -195,7 +195,7 @@ async def update_wallet( status: WalletStatus, product_name: ProductName, ) -> WalletDB: - async with get_database_engine(app).acquire() as conn: + async with get_database_engine_legacy(app).acquire() as conn: result = await conn.execute( wallets.update() .values( @@ -213,7 +213,7 @@ async def update_wallet( ) row = await result.first() if row is None: - raise WalletNotFoundError(reason=f"Wallet {wallet_id} not found.") + raise WalletNotFoundError(details=f"Wallet {wallet_id} not found.") return WalletDB.model_validate(row) @@ -222,7 +222,7 @@ async def delete_wallet( wallet_id: WalletID, product_name: ProductName, ) -> None: - async with get_database_engine(app).acquire() as conn: + async with get_database_engine_legacy(app).acquire() as conn: await conn.execute( wallets.delete().where( (wallets.c.wallet_id == wallet_id) diff --git a/services/web/server/src/simcore_service_webserver/wallets/_events.py b/services/web/server/src/simcore_service_webserver/wallets/_events.py index 3aea74cdb83a..ace45203b502 100644 --- a/services/web/server/src/simcore_service_webserver/wallets/_events.py +++ b/services/web/server/src/simcore_service_webserver/wallets/_events.py @@ -9,8 +9,8 @@ from ..products import products_service from ..resource_usage.service import add_credits_to_wallet -from ..users import preferences_api -from ..users.api import get_user_display_and_id_names +from ..user_preferences import user_preferences_service +from ..users import users_service from ._api import any_wallet_owned_by_user, create_wallet _WALLET_NAME_TEMPLATE = "{} Credits" @@ -19,6 +19,7 @@ async def _auto_add_default_wallet( app: web.Application, + *, user_id: UserID, product_name: ProductName, extra_credits_in_usd: PositiveInt | None = None, @@ -26,7 +27,7 @@ async def _auto_add_default_wallet( if not await any_wallet_owned_by_user( app, user_id=user_id, product_name=product_name ): - user = await get_user_display_and_id_names(app, user_id=user_id) + user = await users_service.get_user_display_and_id_names(app, user_id=user_id) product = products_service.get_product(app, product_name) wallet = await create_wallet( @@ -53,9 +54,9 @@ async def _auto_add_default_wallet( ) preference_id = ( - preferences_api.PreferredWalletIdFrontendUserPreference().preference_identifier + user_preferences_service.PreferredWalletIdFrontendUserPreference().preference_identifier ) - await preferences_api.set_frontend_user_preference( + await user_preferences_service.set_frontend_user_preference( app, user_id=user_id, product_name=product_name, diff --git a/services/web/server/src/simcore_service_webserver/wallets/_groups_api.py b/services/web/server/src/simcore_service_webserver/wallets/_groups_api.py index 05b6625ae5eb..35b03376ba44 100644 --- a/services/web/server/src/simcore_service_webserver/wallets/_groups_api.py +++ b/services/web/server/src/simcore_service_webserver/wallets/_groups_api.py @@ -8,7 +8,7 @@ from models_library.wallets import UserWalletDB, WalletID from pydantic import BaseModel, ConfigDict -from ..users import api as users_service +from ..users import users_service from . import _db as wallets_db from . import _groups_db as wallets_groups_db from ._groups_db import WalletGroupGetDB @@ -44,7 +44,7 @@ async def create_wallet_group( ) if wallet.write is False: raise WalletAccessForbiddenError( - reason=f"User does not have write access to wallet {wallet_id}", + details=f"User does not have write access to wallet {wallet_id}", user_id=user_id, wallet_id=wallet_id, product_name=product_name, @@ -78,7 +78,7 @@ async def list_wallet_groups_by_user_and_wallet( ) if wallet.read is False: raise WalletAccessForbiddenError( - reason=f"User does not have read access to wallet {wallet_id}", + details=f"User does not have read access to wallet {wallet_id}", user_id=user_id, wallet_id=wallet_id, product_name=product_name, @@ -132,14 +132,14 @@ async def update_wallet_group( ) if wallet.write is False: raise WalletAccessForbiddenError( - reason=f"User does not have write access to wallet {wallet_id}" + details=f"User does not have write access to wallet {wallet_id}" ) if wallet.owner == group_id: user: dict = await users_service.get_user(app, user_id) if user["primary_gid"] != wallet.owner: # Only the owner of the wallet can modify the owner group raise WalletAccessForbiddenError( - reason=f"User does not have access to modify owner wallet group in wallet {wallet_id}", + details=f"User does not have access to modify owner wallet group in wallet {wallet_id}", user_id=user_id, wallet_id=wallet_id, product_name=product_name, @@ -174,14 +174,14 @@ async def delete_wallet_group( ) if wallet.delete is False: raise WalletAccessForbiddenError( - reason=f"User does not have delete access to wallet {wallet_id}" + details=f"User does not have delete access to wallet {wallet_id}" ) if wallet.owner == group_id: user: dict = await users_service.get_user(app, user_id) if user["primary_gid"] != wallet.owner: # Only the owner of the wallet can delete the owner group raise WalletAccessForbiddenError( - reason=f"User does not have access to modify owner wallet group in wallet {wallet_id}" + details=f"User does not have access to modify owner wallet group in wallet {wallet_id}" ) await wallets_groups_db.delete_wallet_group( diff --git a/services/web/server/src/simcore_service_webserver/wallets/_groups_db.py b/services/web/server/src/simcore_service_webserver/wallets/_groups_db.py index c7e24fff4b80..e8ae3c804d7f 100644 --- a/services/web/server/src/simcore_service_webserver/wallets/_groups_db.py +++ b/services/web/server/src/simcore_service_webserver/wallets/_groups_db.py @@ -9,7 +9,7 @@ from sqlalchemy import func, literal_column from sqlalchemy.sql import select -from ..db.plugin import get_database_engine +from ..db.plugin import get_database_engine_legacy from .errors import WalletGroupNotFoundError _logger = logging.getLogger(__name__) @@ -38,7 +38,7 @@ async def create_wallet_group( write: bool, delete: bool, ) -> WalletGroupGetDB: - async with get_database_engine(app).acquire() as conn: + async with get_database_engine_legacy(app).acquire() as conn: result = await conn.execute( wallet_to_groups.insert() .values( @@ -73,7 +73,7 @@ async def list_wallet_groups( .where(wallet_to_groups.c.wallet_id == wallet_id) ) - async with get_database_engine(app).acquire() as conn: + async with get_database_engine_legacy(app).acquire() as conn: result = await conn.execute(stmt) rows = await result.fetchall() or [] return TypeAdapter(list[WalletGroupGetDB]).validate_python(rows) @@ -100,12 +100,12 @@ async def get_wallet_group( ) ) - async with get_database_engine(app).acquire() as conn: + async with get_database_engine_legacy(app).acquire() as conn: result = await conn.execute(stmt) row = await result.first() if row is None: raise WalletGroupNotFoundError( - reason=f"Wallet {wallet_id} group {group_id} not found" + details=f"Wallet {wallet_id} group {group_id} not found" ) return WalletGroupGetDB.model_validate(row) @@ -119,7 +119,7 @@ async def update_wallet_group( write: bool, delete: bool, ) -> WalletGroupGetDB: - async with get_database_engine(app).acquire() as conn: + async with get_database_engine_legacy(app).acquire() as conn: result = await conn.execute( wallet_to_groups.update() .values( @@ -136,7 +136,7 @@ async def update_wallet_group( row = await result.first() if row is None: raise WalletGroupNotFoundError( - reason=f"Wallet {wallet_id} group {group_id} not found" + details=f"Wallet {wallet_id} group {group_id} not found" ) return WalletGroupGetDB.model_validate(row) @@ -146,7 +146,7 @@ async def delete_wallet_group( wallet_id: WalletID, group_id: GroupID, ) -> None: - async with get_database_engine(app).acquire() as conn: + async with get_database_engine_legacy(app).acquire() as conn: await conn.execute( wallet_to_groups.delete().where( (wallet_to_groups.c.wallet_id == wallet_id) diff --git a/services/web/server/src/simcore_service_webserver/wallets/_groups_handlers.py b/services/web/server/src/simcore_service_webserver/wallets/_groups_handlers.py index 3f567ca64349..dda619a9de4f 100644 --- a/services/web/server/src/simcore_service_webserver/wallets/_groups_handlers.py +++ b/services/web/server/src/simcore_service_webserver/wallets/_groups_handlers.py @@ -16,7 +16,7 @@ from .._meta import api_version_prefix as VTAG from ..login.decorators import login_required -from ..models import RequestContext +from ..models import AuthenticatedRequestContext from ..security.decorators import permission_required from ..utils_aiohttp import envelope_json_response from . import _groups_api @@ -69,7 +69,7 @@ class _WalletsGroupsBodyParams(BaseModel): @permission_required("wallets.*") @_handle_wallets_groups_exceptions async def create_wallet_group(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_WalletsGroupsPathParams, request) body_params = await parse_request_body_as(_WalletsGroupsBodyParams, request) @@ -92,7 +92,7 @@ async def create_wallet_group(request: web.Request): @permission_required("wallets.*") @_handle_wallets_groups_exceptions async def list_wallet_groups(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(WalletsPathParams, request) wallets: list[WalletGroupGet] = ( @@ -115,7 +115,7 @@ async def list_wallet_groups(request: web.Request): @permission_required("wallets.*") @_handle_wallets_groups_exceptions async def update_wallet_group(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_WalletsGroupsPathParams, request) body_params = await parse_request_body_as(_WalletsGroupsBodyParams, request) @@ -139,7 +139,7 @@ async def update_wallet_group(request: web.Request): @permission_required("wallets.*") @_handle_wallets_groups_exceptions async def delete_wallet_group(request: web.Request): - req_ctx = RequestContext.model_validate(request) + req_ctx = AuthenticatedRequestContext.model_validate(request) path_params = parse_request_path_parameters_as(_WalletsGroupsPathParams, request) await _groups_api.delete_wallet_group( diff --git a/services/web/server/src/simcore_service_webserver/wallets/_handlers.py b/services/web/server/src/simcore_service_webserver/wallets/_handlers.py index 4dcef92b71cc..b6d49275e9b0 100644 --- a/services/web/server/src/simcore_service_webserver/wallets/_handlers.py +++ b/services/web/server/src/simcore_service_webserver/wallets/_handlers.py @@ -2,7 +2,6 @@ import logging from aiohttp import web -from common_library.error_codes import create_error_code from models_library.api_schemas_webserver.wallets import ( CreateWalletBodyParams, PutWalletBodyParams, @@ -13,13 +12,12 @@ from models_library.users import UserID from models_library.wallets import WalletID from pydantic import Field +from servicelib.aiohttp.request_keys import RQT_USERID_KEY from servicelib.aiohttp.requests_validation import ( parse_request_body_as, parse_request_path_parameters_as, ) from servicelib.aiohttp.typing_extension import Handler -from servicelib.logging_errors import create_troubleshotting_log_kwargs -from servicelib.request_keys import RQT_USERID_KEY from .._meta import API_VTAG as VTAG from ..application_settings_utils import requires_dev_feature_enabled @@ -95,19 +93,9 @@ async def wrapper(request: web.Request) -> web.StreamResponse: raise web.HTTPPaymentRequired(text=f"{exc}") from exc except BillingDetailsNotFoundError as exc: - - error_code = create_error_code(exc) - user_error_msg = MSG_BILLING_DETAILS_NOT_DEFINED_ERROR - - _logger.exception( - **create_troubleshotting_log_kwargs( - user_error_msg, - error=exc, - error_code=error_code, - ) - ) - - raise web.HTTPServiceUnavailable(text=user_error_msg) from exc + raise web.HTTPServiceUnavailable( + text=MSG_BILLING_DETAILS_NOT_DEFINED_ERROR + ) from exc return wrapper diff --git a/services/web/server/src/simcore_service_webserver/wallets/_payments_handlers.py b/services/web/server/src/simcore_service_webserver/wallets/_payments_handlers.py index 2751abc457e1..9193d2068df2 100644 --- a/services/web/server/src/simcore_service_webserver/wallets/_payments_handlers.py +++ b/services/web/server/src/simcore_service_webserver/wallets/_payments_handlers.py @@ -2,6 +2,7 @@ import logging from aiohttp import web +from common_library.logging.logging_base import get_log_record_extra from models_library.api_schemas_webserver.wallets import ( CreateWalletPayment, GetWalletAutoRecharge, @@ -15,17 +16,16 @@ from models_library.rest_pagination import Page, PageQueryParameters from models_library.rest_pagination_utils import paginate_data from servicelib.aiohttp import status -from servicelib.aiohttp.application_keys import APP_FIRE_AND_FORGET_TASKS_KEY from servicelib.aiohttp.requests_validation import ( parse_request_body_as, parse_request_path_parameters_as, parse_request_query_parameters_as, ) -from servicelib.logging_utils import get_log_record_extra, log_context +from servicelib.logging_utils import log_context from servicelib.utils import fire_and_forget_task -from simcore_service_webserver.products._models import CreditResult from .._meta import API_VTAG as VTAG +from ..constants import APP_FIRE_AND_FORGET_TASKS_KEY from ..login.decorators import login_required from ..payments import api from ..payments.api import ( @@ -43,6 +43,7 @@ replace_wallet_payment_autorecharge, ) from ..products import products_service +from ..products._models import CreditResult from ..security.decorators import permission_required from ..utils_aiohttp import envelope_json_response from ._handlers import ( diff --git a/services/web/server/src/simcore_service_webserver/wallets/api.py b/services/web/server/src/simcore_service_webserver/wallets/api.py index 8df130d49059..bdeabab0f29d 100644 --- a/services/web/server/src/simcore_service_webserver/wallets/api.py +++ b/services/web/server/src/simcore_service_webserver/wallets/api.py @@ -1,5 +1,6 @@ from ._api import ( get_wallet_by_user, + get_wallet_with_available_credits, get_wallet_with_available_credits_by_user_and_wallet, get_wallet_with_permissions_by_user, list_wallets_for_user, @@ -8,6 +9,7 @@ __all__: tuple[str, ...] = ( "get_wallet_by_user", + "get_wallet_with_available_credits", "get_wallet_with_permissions_by_user", "get_wallet_with_available_credits_by_user_and_wallet", "list_wallets_for_user", diff --git a/services/web/server/src/simcore_service_webserver/wallets/errors.py b/services/web/server/src/simcore_service_webserver/wallets/errors.py index 320605bb14d3..573d5750e18a 100644 --- a/services/web/server/src/simcore_service_webserver/wallets/errors.py +++ b/services/web/server/src/simcore_service_webserver/wallets/errors.py @@ -1,24 +1,23 @@ from ..errors import WebServerBaseError -class WalletsValueError(WebServerBaseError, ValueError): - ... +class WalletsValueError(WebServerBaseError, ValueError): ... class WalletNotFoundError(WalletsValueError): - msg_template = "Wallet not found. {reason}" + msg_template = "Wallet not found: {details}" class WalletAccessForbiddenError(WalletsValueError): - msg_template = "Wallet access forbidden. {reason}" + msg_template = "Wallet access forbidden: {details}" class WalletNotEnoughCreditsError(WalletsValueError): - msg_template = "Wallet does not have enough credits. {reason}" + msg_template = "Wallet does not have enough credits: {details}" # Wallet groups class WalletGroupNotFoundError(WalletsValueError): - msg_template = "Wallet group not found. {reason}" + msg_template = "Wallet group not found: {details}" diff --git a/services/web/server/src/simcore_service_webserver/wallets/plugin.py b/services/web/server/src/simcore_service_webserver/wallets/plugin.py index bd739d08d3a6..77a9c3f42f1a 100644 --- a/services/web/server/src/simcore_service_webserver/wallets/plugin.py +++ b/services/web/server/src/simcore_service_webserver/wallets/plugin.py @@ -1,12 +1,11 @@ -""" tags management subsystem +"""tags management subsystem""" -""" import logging from aiohttp import web -from servicelib.aiohttp.application_keys import APP_SETTINGS_KEY -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from ..application_setup import ModuleCategory, app_setup_func +from ..constants import APP_SETTINGS_KEY from ..payments.plugin import setup_payments from . import _groups_handlers, _handlers, _payments_handlers from ._events import setup_wallets_events @@ -14,7 +13,7 @@ _logger = logging.getLogger(__name__) -@app_module_setup( +@app_setup_func( __name__, ModuleCategory.ADDON, settings_name="WEBSERVER_WALLETS", diff --git a/services/web/server/src/simcore_service_webserver/web_utils.py b/services/web/server/src/simcore_service_webserver/web_utils.py new file mode 100644 index 000000000000..31b57e4cbeb1 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/web_utils.py @@ -0,0 +1,27 @@ +from dataclasses import asdict +from typing import Any + +from aiohttp import web +from common_library.json_serialization import json_dumps +from models_library.rest_error import LogMessageType +from servicelib.aiohttp.status import HTTP_200_OK + + +def envelope_response(data: Any, *, status: int = HTTP_200_OK) -> web.Response: + return web.json_response( + { + "data": data, + "error": None, + }, + dumps=json_dumps, + status=status, + ) + + +def flash_response( + message: str, level: str = "INFO", *, status: int = HTTP_200_OK +) -> web.Response: + return envelope_response( + data=asdict(LogMessageType(message, level)), + status=status, + ) diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_common/exceptions_handlers.py b/services/web/server/src/simcore_service_webserver/workspaces/_common/exceptions_handlers.py index 32bb81224a79..5c29a4f7d9de 100644 --- a/services/web/server/src/simcore_service_webserver/workspaces/_common/exceptions_handlers.py +++ b/services/web/server/src/simcore_service_webserver/workspaces/_common/exceptions_handlers.py @@ -1,5 +1,6 @@ import logging +from common_library.user_messages import user_message from servicelib.aiohttp import status from ...exception_handling import ( @@ -21,24 +22,37 @@ _TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = { WorkspaceGroupNotFoundError: HttpErrorInfo( status.HTTP_404_NOT_FOUND, - "Workspace {workspace_id} group {group_id} not found.", + user_message( + "The requested workspace {workspace_id} group {group_id} could not be found.", + _version=1, + ), ), WorkspaceAccessForbiddenError: HttpErrorInfo( status.HTTP_403_FORBIDDEN, - "Does not have access to this workspace", + user_message( + "You do not have permission to access this workspace.", _version=1 + ), ), WorkspaceNotFoundError: HttpErrorInfo( status.HTTP_404_NOT_FOUND, - "Workspace not found. {reason}", + user_message( + "The requested workspace could not be found: {details}", _version=1 + ), ), # Trashing ProjectRunningConflictError: HttpErrorInfo( status.HTTP_409_CONFLICT, - "One or more studies in this workspace are in use and cannot be trashed. Please stop all services first and try again", + user_message( + "Unable to delete workspace because one or more projects are currently running. Please stop all running services and try again.", + _version=1, + ), ), ProjectStoppingError: HttpErrorInfo( status.HTTP_503_SERVICE_UNAVAILABLE, - "Something went wrong while stopping running services in studies within this workspace before trashing. Aborting trash.", + user_message( + "Something went wrong while stopping running services in projects within this workspace before trashing. Aborting trash.", + _version=1, + ), ), } diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_common/models.py b/services/web/server/src/simcore_service_webserver/workspaces/_common/models.py index 05d962a30d5a..0ed5b72911bc 100644 --- a/services/web/server/src/simcore_service_webserver/workspaces/_common/models.py +++ b/services/web/server/src/simcore_service_webserver/workspaces/_common/models.py @@ -16,7 +16,7 @@ from models_library.utils.common_validators import empty_str_to_none_pre_validator from models_library.workspaces import WorkspaceID from pydantic import BaseModel, BeforeValidator, ConfigDict, Field -from servicelib.request_keys import RQT_USERID_KEY +from servicelib.aiohttp.request_keys import RQT_USERID_KEY from ...constants import RQ_PRODUCT_KEY @@ -32,15 +32,15 @@ class WorkspacesPathParams(StrictRequestParameters): workspace_id: WorkspaceID -_WorkspacesListOrderQueryParams: type[ - RequestParameters -] = create_ordering_query_model_class( - ordering_fields={ - "modified_at", - "name", - }, - default=OrderBy(field=IDStr("modified_at"), direction=OrderDirection.DESC), - ordering_fields_api_to_column_map={"modified_at": "modified"}, +_WorkspacesListOrderQueryParams: type[RequestParameters] = ( + create_ordering_query_model_class( + ordering_fields={ + "modified_at", + "name", + }, + default=OrderBy(field=IDStr("modified_at"), direction=OrderDirection.DESC), + ordering_fields_api_to_column_map={"modified_at": "modified"}, + ) ) @@ -49,13 +49,13 @@ class WorkspacesFilters(Filters): default=False, description="Set to true to list trashed, false to list non-trashed (default), None to list all", ) - text: Annotated[ - str | None, BeforeValidator(empty_str_to_none_pre_validator) - ] = Field( - default=None, - description="Multi column full text search", - max_length=100, - examples=["My Workspace"], + text: Annotated[str | None, BeforeValidator(empty_str_to_none_pre_validator)] = ( + Field( + default=None, + description="Multi column full text search", + max_length=100, + examples=["My Workspace"], + ) ) @@ -63,8 +63,7 @@ class WorkspacesListQueryParams( PageQueryParameters, FiltersQueryParameters[WorkspacesFilters], _WorkspacesListOrderQueryParams, # type: ignore[misc, valid-type] -): - ... +): ... class WorkspacesGroupsPathParams(BaseModel): @@ -80,5 +79,4 @@ class WorkspacesGroupsBodyParams(BaseModel): model_config = ConfigDict(extra="forbid") -class WorkspaceTrashQueryParams(RemoveQueryParams): - ... +class WorkspaceTrashQueryParams(RemoveQueryParams): ... diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_groups_service.py b/services/web/server/src/simcore_service_webserver/workspaces/_groups_service.py index d2ac47751b70..18fbc8bc9b88 100644 --- a/services/web/server/src/simcore_service_webserver/workspaces/_groups_service.py +++ b/services/web/server/src/simcore_service_webserver/workspaces/_groups_service.py @@ -8,7 +8,7 @@ from models_library.workspaces import UserWorkspaceWithAccessRights, WorkspaceID from pydantic import BaseModel, ConfigDict -from ..users import api as users_service +from ..users import users_service from . import _groups_repository as workspaces_groups_db from . import _workspaces_repository as workspaces_workspaces_repository from ._groups_repository import WorkspaceGroupGetDB @@ -134,14 +134,14 @@ async def update_workspace_group( ) if workspace.my_access_rights.write is False: raise WorkspaceAccessForbiddenError( - reason=f"User does not have write access to workspace {workspace_id}" + details=f"User does not have write access to workspace {workspace_id}" ) if workspace.owner_primary_gid == group_id: user: dict = await users_service.get_user(app, user_id) if user["primary_gid"] != workspace.owner_primary_gid: # Only the owner of the workspace can modify the owner group raise WorkspaceAccessForbiddenError( - reason=f"User does not have access to modify owner workspace group in workspace {workspace_id}" + details=f"User does not have access to modify owner workspace group in workspace {workspace_id}" ) workspace_group_db: WorkspaceGroupGetDB = ( @@ -180,14 +180,16 @@ async def delete_workspace_group( ) if user["primary_gid"] != group_id and workspace.my_access_rights.delete is False: raise WorkspaceAccessForbiddenError( - reason=f"User does not have delete access to workspace {workspace_id}" + details=f"User does not have delete access to workspace {workspace_id}" + ) + if ( + workspace.owner_primary_gid == group_id + and user["primary_gid"] != workspace.owner_primary_gid + ): + # Only the owner of the workspace can delete the owner group + raise WorkspaceAccessForbiddenError( + details=f"User does not have access to modify owner workspace group in workspace {workspace_id}" ) - if workspace.owner_primary_gid == group_id: - if user["primary_gid"] != workspace.owner_primary_gid: - # Only the owner of the workspace can delete the owner group - raise WorkspaceAccessForbiddenError( - reason=f"User does not have access to modify owner workspace group in workspace {workspace_id}" - ) await workspaces_groups_db.delete_workspace_group( app=app, workspace_id=workspace_id, group_id=group_id diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_trash_service.py b/services/web/server/src/simcore_service_webserver/workspaces/_trash_service.py index 249b1f0d21d0..10996802c520 100644 --- a/services/web/server/src/simcore_service_webserver/workspaces/_trash_service.py +++ b/services/web/server/src/simcore_service_webserver/workspaces/_trash_service.py @@ -17,9 +17,6 @@ WorkspaceUpdates, ) from simcore_postgres_database.utils_repos import transaction_context -from simcore_service_webserver.folders.service import list_folders -from simcore_service_webserver.projects.api import list_projects -from simcore_service_webserver.projects.models import ProjectTypeAPI from ..db.plugin import get_asyncpg_engine from ..folders._trash_service import ( @@ -27,11 +24,14 @@ trash_folder, untrash_folder, ) +from ..folders.service import list_folders from ..projects._trash_service import ( batch_delete_projects_in_root_workspace_as_admin, trash_project, untrash_project, ) +from ..projects.api import list_projects +from ..projects.models import ProjectTypeAPI from . import _workspaces_repository, _workspaces_service, _workspaces_service_crud_read from .errors import WorkspaceBatchDeleteError, WorkspaceNotTrashedError @@ -269,7 +269,7 @@ async def delete_trashed_workspace( raise WorkspaceNotTrashedError( workspace_id=workspace_id, user_id=user_id, - reason="Cannot delete trashed workspace since it does not fit current criteria", + details="Cannot delete trashed workspace since it does not fit current criteria", ) # NOTE: this function deletes workspace AND its content recursively! diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_repository.py b/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_repository.py index b08e86284e7b..58e26416c12a 100644 --- a/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_repository.py +++ b/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_repository.py @@ -196,7 +196,7 @@ async def get_workspace_for_user( row = result.one_or_none() if row is None: raise WorkspaceAccessForbiddenError( - reason=f"User {user_id} does not have access to the workspace {workspace_id}. Or workspace does not exist.", + details=f"User {user_id} does not have access to the workspace {workspace_id}. Or workspace does not exist.", ) return UserWorkspaceWithAccessRights.model_validate(row) @@ -217,7 +217,7 @@ async def get_workspace_db_get( ) row = result.one_or_none() if row is None: - raise WorkspaceNotFoundError(reason=f"Workspace {workspace_id} not found.") + raise WorkspaceNotFoundError(details=f"Workspace {workspace_id} not found.") return WorkspaceDBGet.model_validate(row) @@ -247,7 +247,7 @@ async def update_workspace( ) row = await result.first() if row is None: - raise WorkspaceNotFoundError(reason=f"Workspace {workspace_id} not found.") + raise WorkspaceNotFoundError(details=f"Workspace {workspace_id} not found.") return Workspace.model_validate(row) diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_service.py b/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_service.py index 4779e39fbd97..619d630a206d 100644 --- a/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_service.py +++ b/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_service.py @@ -20,7 +20,7 @@ from ..folders.service import delete_folder_with_all_content, list_folders from ..projects.api import delete_project_by_user, list_projects from ..projects.models import ProjectTypeAPI -from ..users.api import get_user +from ..users import users_service from . import _workspaces_repository as db from ._workspaces_service_crud_read import check_user_workspace_access @@ -36,7 +36,7 @@ async def create_workspace( thumbnail: str | None, product_name: ProductName, ) -> UserWorkspaceWithAccessRights: - user = await get_user(app, user_id=user_id) + user = await users_service.get_user(app, user_id=user_id) created = await db.create_workspace( app, product_name=product_name, diff --git a/services/web/server/src/simcore_service_webserver/workspaces/errors.py b/services/web/server/src/simcore_service_webserver/workspaces/errors.py index 66991deaaef9..0af34814670f 100644 --- a/services/web/server/src/simcore_service_webserver/workspaces/errors.py +++ b/services/web/server/src/simcore_service_webserver/workspaces/errors.py @@ -8,11 +8,11 @@ class WorkspacesRuntimeError(WebServerBaseError, RuntimeError): ... class WorkspaceNotFoundError(WorkspacesValueError): - msg_template = "Workspace not found. {reason}" + msg_template = "Workspace not found: {details}" class WorkspaceAccessForbiddenError(WorkspacesValueError): - msg_template = "Workspace access forbidden. {reason}" + msg_template = "Workspace access forbidden: {details}" class WorkspaceBatchDeleteError(WorkspacesValueError): @@ -31,4 +31,4 @@ class WorkspaceFolderInconsistencyError(WorkspacesValueError): class WorkspaceNotTrashedError(WorkspacesRuntimeError): - msg_template = "Cannot delete workspace {workspace_id} since it was not trashed first: {reason}" + msg_template = "Cannot delete workspace {workspace_id} since it was not trashed first: {details}" diff --git a/services/web/server/src/simcore_service_webserver/workspaces/plugin.py b/services/web/server/src/simcore_service_webserver/workspaces/plugin.py index b5936e128db4..b715988020e0 100644 --- a/services/web/server/src/simcore_service_webserver/workspaces/plugin.py +++ b/services/web/server/src/simcore_service_webserver/workspaces/plugin.py @@ -1,18 +1,17 @@ -""" tags management subsystem +"""tags management subsystem""" -""" import logging from aiohttp import web -from servicelib.aiohttp.application_keys import APP_SETTINGS_KEY -from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup +from ..application_setup import ModuleCategory, app_setup_func +from ..constants import APP_SETTINGS_KEY from . import _groups_rest, _trash_rest, _workspaces_rest _logger = logging.getLogger(__name__) -@app_module_setup( +@app_setup_func( __name__, ModuleCategory.ADDON, settings_name="WEBSERVER_WORKSPACES", diff --git a/services/web/server/tests/conftest.py b/services/web/server/tests/conftest.py index dc15b475984c..f1dba37445ad 100644 --- a/services/web/server/tests/conftest.py +++ b/services/web/server/tests/conftest.py @@ -3,10 +3,9 @@ # pylint: disable=unused-argument # pylint: disable=unused-variable -import asyncio +import contextlib import json import logging -import random import sys from collections.abc import AsyncIterator, Awaitable, Callable from copy import deepcopy @@ -19,26 +18,32 @@ from aiohttp.test_utils import TestClient from common_library.json_serialization import json_dumps from faker import Faker -from models_library.api_schemas_webserver.projects import ProjectGet +from models_library.api_schemas_webserver.projects import ( + ProjectGet, + ProjectStateOutputSchema, +) from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID -from models_library.projects_state import ProjectState +from pydantic import TypeAdapter from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.faker_factories import random_phone_number from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict -from pytest_simcore.helpers.webserver_login import LoggedUser, NewUser, UserInfoDict +from pytest_simcore.helpers.webserver_login import LoggedUser +from pytest_simcore.helpers.webserver_users import NewUser, UserInfoDict from pytest_simcore.simcore_webserver_projects_rest_api import NEW_PROJECT from servicelib.aiohttp import status -from servicelib.aiohttp.long_running_tasks.server import TaskStatus from servicelib.common_headers import ( X_SIMCORE_PARENT_NODE_ID, X_SIMCORE_PARENT_PROJECT_UUID, ) +from servicelib.long_running_tasks.models import TaskStatus from simcore_service_webserver.application_settings_utils import ( AppConfigDict, convert_to_environ_vars, ) from simcore_service_webserver.db.models import UserRole +from simcore_service_webserver.models import PhoneNumberStr from simcore_service_webserver.projects._crud_api_create import ( OVERRIDABLE_DOCUMENT_KEYS, ) @@ -64,6 +69,7 @@ # imports the fixtures for the integration tests pytest_plugins = [ "aiohttp.pytest_plugin", + "pytest_simcore.asyncio_event_loops", "pytest_simcore.cli_runner", "pytest_simcore.db_entries_mocks", "pytest_simcore.docker_compose", @@ -72,6 +78,7 @@ "pytest_simcore.environment_configs", "pytest_simcore.faker_users_data", "pytest_simcore.hypothesis_type_strategies", + "pytest_simcore.logging", "pytest_simcore.openapi_specs", "pytest_simcore.postgres_service", "pytest_simcore.pydantic_models", @@ -88,6 +95,13 @@ ] +@pytest.fixture +async def exit_stack() -> AsyncIterator[contextlib.AsyncExitStack]: + """Provides an AsyncExitStack that gets cleaned up after each test""" + async with contextlib.AsyncExitStack() as stack: + yield stack + + @pytest.fixture(scope="session") def package_dir() -> Path: """osparc-simcore installed directory""" @@ -133,6 +147,11 @@ def fake_project(tests_data_dir: Path) -> ProjectDict: return json.loads(fpath.read_text()) +@pytest.fixture +def user_phone_number(faker: Faker) -> PhoneNumberStr: + return TypeAdapter(PhoneNumberStr).validate_python(random_phone_number(faker)) + + @pytest.fixture async def user(client: TestClient) -> AsyncIterator[UserInfoDict]: async with NewUser( @@ -146,7 +165,10 @@ async def user(client: TestClient) -> AsyncIterator[UserInfoDict]: @pytest.fixture async def logged_user( - client: TestClient, user_role: UserRole, faker: Faker + client: TestClient, + user_role: UserRole, + faker: Faker, + user_phone_number: PhoneNumberStr, ) -> AsyncIterator[UserInfoDict]: """adds a user in db and logs in with client @@ -158,8 +180,7 @@ async def logged_user( "role": user_role.name, "first_name": faker.first_name(), "last_name": faker.last_name(), - "phone": faker.phone_number() - + f"{random.randint(1000, 9999)}", # noqa: S311 + "phone": user_phone_number, }, check_if_succeeds=user_role != UserRole.ANONYMOUS, ) as user: @@ -207,7 +228,7 @@ async def request_create_project() -> ( # noqa: C901, PLR0915 created_project_uuids = [] used_clients = [] - async def _setup( + async def _setup( # noqa: C901 client: TestClient, *, project: dict | None = None, @@ -294,7 +315,7 @@ async def _setup( } return url, project_data, expected_data, headers - async def _creator( + async def _creator( # noqa: PLR0915 client: TestClient, expected_accepted_response: HTTPStatus, expected_creation_response: HTTPStatus, @@ -408,9 +429,9 @@ async def _creator( # now check returned is as expected if new_project: # has project state - assert not ProjectState( + assert not ProjectStateOutputSchema( **new_project.get("state", {}) - ).locked.value, "Newly created projects should be unlocked" + ).share_state.locked, "Newly created projects should be unlocked" # updated fields assert expected_data["uuid"] != new_project["uuid"] @@ -482,8 +503,12 @@ def mock_dynamic_scheduler(mocker: MockerFixture) -> None: @pytest.fixture -async def loop( - event_loop: asyncio.AbstractEventLoop, -) -> asyncio.AbstractEventLoop: - """Override the event loop inside pytest-aiohttp with the one from pytest-asyncio.""" - return event_loop +def with_dev_features_enabled( + app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch +) -> None: + setenvs_from_dict( + monkeypatch, + { + "WEBSERVER_DEV_FEATURES_ENABLED": "1", + }, + ) diff --git a/services/web/server/tests/data/default_app_config-integration.yaml b/services/web/server/tests/data/default_app_config-integration.yaml index 5b8ac2ded05f..2ccee731e919 100644 --- a/services/web/server/tests/data/default_app_config-integration.yaml +++ b/services/web/server/tests/data/default_app_config-integration.yaml @@ -24,10 +24,9 @@ db: enabled: true postgres: database: ${POSTGRES_DB} - endpoint: ${POSTGRES_ENDPOINT} host: ${POSTGRES_HOST} maxsize: 50 - minsize: 1 + minsize: 2 password: ${POSTGRES_PASSWORD} port: ${POSTGRES_PORT} user: ${POSTGRES_USER} diff --git a/services/web/server/tests/data/default_app_config-unit.yaml b/services/web/server/tests/data/default_app_config-unit.yaml index c7f4705cd6a9..194a130e744c 100644 --- a/services/web/server/tests/data/default_app_config-unit.yaml +++ b/services/web/server/tests/data/default_app_config-unit.yaml @@ -29,7 +29,7 @@ db: host: 127.0.0.1 port: 5432 maxsize: 5 - minsize: 1 + minsize: 2 endpoint: 127.0.0.1:5432 diagnostics: enabled: false diff --git a/services/web/server/tests/data/fake-project-with-conversation.json b/services/web/server/tests/data/fake-project-with-conversation.json new file mode 100644 index 000000000000..5c8ddfa9e356 --- /dev/null +++ b/services/web/server/tests/data/fake-project-with-conversation.json @@ -0,0 +1,89 @@ +{ + "accessRights": {}, + "uuid": "de2578c5-431e-6257-a462-d7bf73b76c0c", + "name": "fake-project-name", + "description": "anim sint pariatur do dolore", + "prjOwner": "foo@foo.com", + "creationDate": "1865-11-30T04:00:14.000Z", + "lastChangeDate": "7364-11-30T10:04:52.000Z", + "thumbnail": "https://some_fake_project_thumbnail.com/fake", + "tags": [], + "classifiers": [], + "workbench": { + "b4b20476-e7c0-47c2-8cc4-f66ac21a13bf": { + "key": "simcore/services/frontend/file-picker", + "version": "1.0.0", + "label": "File Picker 0D", + "inputs": {}, + "inputNodes": [], + "outputs": {}, + "position": { + "x": 50, + "y": 150 + } + }, + "5739e377-17f7-4f09-a6ad-62659fb7fdec": { + "key": "simcore/services/comp/ucdavis-singlecell-cardiac-model", + "version": "1.0.0", + "label": "DBP-Clancy-Rabbit-Single-Cell solver", + "inputAccess": { + "Na": "ReadAndWrite", + "Kr": "ReadOnly", + "BCL": "ReadAndWrite", + "NBeats": "ReadOnly", + "Ligand": "Invisible", + "cAMKII": "Invisible" + }, + "inputs": { + "Na": 0, + "Kr": 0, + "BCL": 200, + "NBeats": 5, + "Ligand": 0, + "cAMKII": "WT", + "initfile": { + "nodeUuid": "b4b20476-e7c0-47c2-8cc4-f66ac21a13bf", + "output": "outFile" + } + }, + "inputNodes": [ + "b4b20476-e7c0-47c2-8cc4-f66ac21a13bf" + ], + "outputs": {}, + "position": { + "x": 300, + "y": 150 + } + }, + "351fd505-1ee3-466d-ad6c-ea2915ffd364": { + "key": "simcore/services/dynamic/raw-graphs", + "version": "2.10.4", + "label": "2D plot", + "inputs": {}, + "outputs": {}, + "position": { + "x": 1073, + "y": 307 + }, + "progress": 100 + } + }, + "ui": { + "annotations": { + "b8a7e8e2-1c2d-4f3a-9c5e-123456789abc": { + "type": "conversation", + "attributes": { + "conversationId": 2, + "x": 415, + "y": 100, + "title": "My chat" + } + } + } + }, + "quality": {}, + "dev": {}, + "workspaceId": null, + "type": "STANDARD", + "templateType": null +} diff --git a/services/web/server/tests/data/fake-project.json b/services/web/server/tests/data/fake-project.json index b4c6c34562eb..62171dadd6ba 100644 --- a/services/web/server/tests/data/fake-project.json +++ b/services/web/server/tests/data/fake-project.json @@ -20,6 +20,12 @@ "position": { "x": 50, "y": 150 + }, + "state": { + "currentStatus": "NOT_STARTED", + "lock_state": { + "locked": false + } } }, "5739e377-17f7-4f09-a6ad-62659fb7fdec": { @@ -53,6 +59,12 @@ "position": { "x": 300, "y": 150 + }, + "state": { + "currentStatus": "NOT_STARTED", + "lock_state": { + "locked": false + } } }, "351fd505-1ee3-466d-ad6c-ea2915ffd364": { @@ -65,7 +77,13 @@ "x": 1073, "y": 307 }, - "progress": 100 + "progress": 100.0, + "state": { + "currentStatus": "NOT_STARTED", + "lock_state": { + "locked": false + } + } } }, "ui": {}, diff --git a/services/web/server/tests/data/workbench_2connected_jupyterlabs.json b/services/web/server/tests/data/workbench_2connected_jupyterlabs.json new file mode 100644 index 000000000000..af2efefe7eae --- /dev/null +++ b/services/web/server/tests/data/workbench_2connected_jupyterlabs.json @@ -0,0 +1,37 @@ +{ + "e8eae2cd-ae0f-4ba6-ae0b-86eeadf99b42": { + "key": "simcore/services/dynamic/jupyter-math", + "version": "3.0.5", + "label": "JupyterLab Math (Python+Octave)", + "inputs": {}, + "inputsRequired": [], + "inputNodes": [] + }, + "f7d6dc1e-a6dc-44e1-9588-a2f4b05d3d9c": { + "key": "simcore/services/dynamic/jupyter-math", + "version": "3.0.5", + "label": "JupyterLab Math (Python+Octave)_2", + "inputs": { + "input_1": { + "nodeUuid": "e8eae2cd-ae0f-4ba6-ae0b-86eeadf99b42", + "output": "output_1" + }, + "input_2": { + "nodeUuid": "e8eae2cd-ae0f-4ba6-ae0b-86eeadf99b42", + "output": "output_2" + }, + "input_3": { + "nodeUuid": "e8eae2cd-ae0f-4ba6-ae0b-86eeadf99b42", + "output": "output_3" + }, + "input_4": { + "nodeUuid": "e8eae2cd-ae0f-4ba6-ae0b-86eeadf99b42", + "output": "output_4" + } + }, + "inputsRequired": [], + "inputNodes": [ + "e8eae2cd-ae0f-4ba6-ae0b-86eeadf99b42" + ] + } +} diff --git a/services/web/server/tests/integration/02/conftest.py b/services/web/server/tests/integration/01/conftest.py similarity index 100% rename from services/web/server/tests/integration/02/conftest.py rename to services/web/server/tests/integration/01/conftest.py diff --git a/services/web/server/tests/integration/02/notifications/test_rabbitmq_consumers.py b/services/web/server/tests/integration/01/notifications/test_rabbitmq_consumers.py similarity index 78% rename from services/web/server/tests/integration/02/notifications/test_rabbitmq_consumers.py rename to services/web/server/tests/integration/01/notifications/test_rabbitmq_consumers.py index 051e4e1c56f0..d29c362f8a77 100644 --- a/services/web/server/tests/integration/02/notifications/test_rabbitmq_consumers.py +++ b/services/web/server/tests/integration/01/notifications/test_rabbitmq_consumers.py @@ -8,13 +8,12 @@ from typing import Any from unittest import mock -import aiopg -import aiopg.sa import pytest import socketio import sqlalchemy as sa from aiohttp.test_utils import TestClient from faker import Faker +from models_library.api_schemas_webserver.socketio import SocketIORoomStr from models_library.progress_bar import ProgressReport from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID @@ -30,7 +29,7 @@ from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder from pytest_mock import MockerFixture -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from redis import Redis from servicelib.aiohttp.application import create_safe_application from servicelib.aiohttp.monitor_services import ( @@ -39,7 +38,6 @@ ) from servicelib.rabbitmq import RabbitMQClient from settings_library.rabbit import RabbitSettings -from simcore_postgres_database.models.projects import projects from simcore_postgres_database.models.users import UserRole from simcore_service_webserver.application_settings import setup_settings from simcore_service_webserver.db.plugin import setup_db @@ -54,10 +52,10 @@ from simcore_service_webserver.rest.plugin import setup_rest from simcore_service_webserver.security.plugin import setup_security from simcore_service_webserver.session.plugin import setup_session +from simcore_service_webserver.socketio._utils import get_socket_server from simcore_service_webserver.socketio.messages import ( SOCKET_IO_EVENT, SOCKET_IO_LOG_EVENT, - SOCKET_IO_NODE_UPDATED_EVENT, ) from simcore_service_webserver.socketio.models import WebSocketNodeProgress from simcore_service_webserver.socketio.plugin import setup_socketio @@ -73,7 +71,10 @@ "redis", ] -pytest_simcore_ops_services_selection = [] +pytest_simcore_ops_services_selection = [ + "adminer", + "redis-commander", +] _STABLE_DELAY_S = 2 @@ -113,7 +114,7 @@ async def _assert_handler_called_with_json( handler: mock.Mock, expected_call: dict[str, Any] ) -> None: async for attempt in AsyncRetrying( - wait=wait_fixed(0.1), + wait=wait_fixed(0.2), stop=stop_after_delay(10), retry=retry_if_exception_type(AssertionError), reraise=True, @@ -130,16 +131,17 @@ async def _assert_handler_called_with_json( @pytest.fixture async def client( - mock_redis_socket_timeout: None, + docker_registry: str, aiohttp_client: Callable, app_config: dict[str, Any], rabbit_service: RabbitSettings, postgres_db: sa.engine.Engine, redis_client: Redis, monkeypatch_setenv_from_app_config: Callable, + simcore_services_ready: None, ) -> TestClient: app_config["storage"]["enabled"] = False - + app_config["db"]["postgres"]["minsize"] = 2 monkeypatch_setenv_from_app_config(app_config) app = create_safe_application(app_config) @@ -186,13 +188,13 @@ def user_project_id(user_project: ProjectDict) -> ProjectID: @pytest.fixture def user_id(logged_user: UserInfoDict) -> UserID: - return UserID(logged_user["id"]) + return logged_user["id"] @pytest.fixture def sender_user_id(user_id: UserID, sender_same_user_id: bool, faker: Faker) -> UserID: if sender_same_user_id is False: - return UserID(faker.pyint(min_value=user_id + 1)) + return faker.pyint(min_value=user_id + 1) return user_id @@ -207,8 +209,8 @@ async def test_log_workflow( client: TestClient, rabbitmq_publisher: RabbitMQClient, subscribe_to_logs: bool, - socketio_client_factory: Callable[ - [str | None, TestClient | None], Awaitable[socketio.AsyncClient] + create_socketio_connection: Callable[ + [str | None, TestClient | None], Awaitable[tuple[socketio.AsyncClient, str]] ], # user sender_same_user_id: bool, @@ -223,7 +225,7 @@ async def test_log_workflow( RabbitMQ (TOPIC) --> Webserver --> Redis --> webclient (socketio) """ - socket_io_conn = await socketio_client_factory(None, client) + socket_io_conn, *_ = await create_socketio_connection(None, client) mock_log_handler = mocker.MagicMock() socket_io_conn.on(SOCKET_IO_LOG_EVENT, handler=mock_log_handler) @@ -290,7 +292,6 @@ async def test_log_workflow_only_receives_messages_if_subscribed( "event_type": SOCKET_IO_LOG_EVENT, "data": log_message.model_dump(exclude={"user_id", "channel_name"}), }, - ignore_queue=True, ), ) mocked_send_messages.reset_mock() @@ -316,8 +317,8 @@ async def test_log_workflow_only_receives_messages_if_subscribed( async def test_progress_non_computational_workflow( client: TestClient, rabbitmq_publisher: RabbitMQClient, - socketio_client_factory: Callable[ - [str | None, TestClient | None], Awaitable[socketio.AsyncClient] + create_socketio_connection: Callable[ + [str | None, TestClient | None], Awaitable[tuple[socketio.AsyncClient, str]] ], subscribe_to_logs: bool, progress_type: ProgressType, @@ -333,9 +334,11 @@ async def test_progress_non_computational_workflow( RabbitMQ (TOPIC) --> Webserver --> Redis --> webclient (socketio) """ - socket_io_conn = await socketio_client_factory(None, client) + socket_io_conn, *_ = await create_socketio_connection(None, client) + # the project must be opened here mock_progress_handler = mocker.MagicMock() + socket_io_conn.on( WebSocketNodeProgress.get_event_type(), handler=mock_progress_handler ) @@ -344,6 +347,10 @@ async def test_progress_non_computational_workflow( assert client.app await project_logs.subscribe(client.app, user_project_id) + # this simulates the user openning the project + await get_socket_server(client.app).enter_room( + socket_io_conn.get_sid(), SocketIORoomStr.from_project_id(user_project_id) + ) progress_message = ProgressRabbitMessageNode( user_id=sender_user_id, project_id=user_project_id, @@ -353,7 +360,7 @@ async def test_progress_non_computational_workflow( ) await rabbitmq_publisher.publish(progress_message.channel_name, progress_message) - call_expected = sender_same_user_id and subscribe_to_logs + call_expected = subscribe_to_logs if call_expected: expected_call = WebSocketNodeProgress.from_rabbit_message( progress_message @@ -363,85 +370,6 @@ async def test_progress_non_computational_workflow( await _assert_handler_not_called(mock_progress_handler) -@pytest.mark.parametrize("user_role", [UserRole.GUEST], ids=str) -@pytest.mark.parametrize( - "sender_same_user_id", [True, False], ids=lambda id_: f"same_sender_id={id_}" -) -@pytest.mark.parametrize( - "subscribe_to_logs", [True, False], ids=lambda id_: f"subscribed={id_}" -) -async def test_progress_computational_workflow( - client: TestClient, - rabbitmq_publisher: RabbitMQClient, - user_project: ProjectDict, - socketio_client_factory: Callable[ - [str | None, TestClient | None], Awaitable[socketio.AsyncClient] - ], - mocker: MockerFixture, - aiopg_engine: aiopg.sa.Engine, - subscribe_to_logs: bool, - # user - sender_same_user_id: bool, - sender_user_id: UserID, - # project - random_node_id_in_user_project: NodeID, - user_project_id: ProjectID, -): - """ - RabbitMQ (TOPIC) --> Webserver --> DB (get project) - Redis --> webclient (socketio) - - """ - socket_io_conn = await socketio_client_factory(None, client) - - mock_progress_handler = mocker.MagicMock() - socket_io_conn.on(SOCKET_IO_NODE_UPDATED_EVENT, handler=mock_progress_handler) - - if subscribe_to_logs: - assert client.app - await project_logs.subscribe(client.app, user_project_id) - progress_message = ProgressRabbitMessageNode( - user_id=sender_user_id, - project_id=user_project_id, - node_id=random_node_id_in_user_project, - progress_type=ProgressType.COMPUTATION_RUNNING, - report=ProgressReport(actual_value=0.3, total=1), - ) - await rabbitmq_publisher.publish(progress_message.channel_name, progress_message) - - call_expected = sender_same_user_id and subscribe_to_logs - if call_expected: - expected_call = jsonable_encoder( - progress_message, include={"node_id", "project_id"} - ) - expected_call |= { - "data": user_project["workbench"][f"{random_node_id_in_user_project}"] - } - expected_call["data"]["progress"] = int( - progress_message.report.percent_value * 100 - ) - await _assert_handler_called_with_json(mock_progress_handler, expected_call) - else: - await _assert_handler_not_called(mock_progress_handler) - - # check the database. doing it after the waiting calls above is safe - async with aiopg_engine.acquire() as conn: - assert projects is not None - result = await conn.execute( - sa.select(projects.c.workbench).where( - projects.c.uuid == str(user_project_id) - ) - ) - row = await result.fetchone() - assert row - project_workbench = dict(row[projects.c.workbench]) - # NOTE: the progress might still be present but is not used anymore - assert ( - project_workbench[f"{random_node_id_in_user_project}"].get("progress", 0) - == 0 - ) - - @pytest.mark.parametrize("user_role", [UserRole.GUEST], ids=str) @pytest.mark.parametrize("metrics_name", ["service_started", "service_stopped"]) async def test_instrumentation_workflow( @@ -499,8 +427,8 @@ async def test_event_workflow( mocker: MockerFixture, client: TestClient, rabbitmq_publisher: RabbitMQClient, - socketio_client_factory: Callable[ - [str | None, TestClient | None], Awaitable[socketio.AsyncClient] + create_socketio_connection: Callable[ + [str | None, TestClient | None], Awaitable[tuple[socketio.AsyncClient, str]] ], # user sender_same_user_id: bool, @@ -513,7 +441,7 @@ async def test_event_workflow( RabbitMQ --> Webserver --> Redis --> webclient (socketio) """ - socket_io_conn = await socketio_client_factory(None, client) + socket_io_conn, *_ = await create_socketio_connection(None, client) mock_event_handler = mocker.MagicMock() socket_io_conn.on(SOCKET_IO_EVENT, handler=mock_event_handler) diff --git a/services/web/server/tests/integration/02/scicrunch/conftest.py b/services/web/server/tests/integration/01/scicrunch/conftest.py similarity index 100% rename from services/web/server/tests/integration/02/scicrunch/conftest.py rename to services/web/server/tests/integration/01/scicrunch/conftest.py diff --git a/services/web/server/tests/integration/02/scicrunch/test_scicrunch__resolver.py b/services/web/server/tests/integration/01/scicrunch/test_scicrunch__resolver.py similarity index 100% rename from services/web/server/tests/integration/02/scicrunch/test_scicrunch__resolver.py rename to services/web/server/tests/integration/01/scicrunch/test_scicrunch__resolver.py diff --git a/services/web/server/tests/integration/02/scicrunch/test_scicrunch__rest.py b/services/web/server/tests/integration/01/scicrunch/test_scicrunch__rest.py similarity index 92% rename from services/web/server/tests/integration/02/scicrunch/test_scicrunch__rest.py rename to services/web/server/tests/integration/01/scicrunch/test_scicrunch__rest.py index d42e8c42e90d..7f474d2dc673 100644 --- a/services/web/server/tests/integration/02/scicrunch/test_scicrunch__rest.py +++ b/services/web/server/tests/integration/01/scicrunch/test_scicrunch__rest.py @@ -3,14 +3,14 @@ # pylint:disable=redefined-outer-name """ - Tests raw communication with scicrunch service API +Tests raw communication with scicrunch service API - - Use for systematic exploration of the API - - Analyzes responses of the API to known situtations - - Ensures parts of the scicrunch service API that we use do not change interface or behaviour +- Use for systematic exploration of the API +- Analyzes responses of the API to known situtations +- Ensures parts of the scicrunch service API that we use do not change interface or behaviour - NOTE: this is intended for manual testing during development - NOTE: skipped if it does not define a valid SCICRUNCH_API_KEY +NOTE: this is intended for manual testing during development +NOTE: skipped if it does not define a valid SCICRUNCH_API_KEY """ import os diff --git a/services/web/server/tests/integration/02/test_computation.py b/services/web/server/tests/integration/01/test_computation.py similarity index 63% rename from services/web/server/tests/integration/02/test_computation.py rename to services/web/server/tests/integration/01/test_computation.py index 200deba64f31..d9ac678cde73 100644 --- a/services/web/server/tests/integration/02/test_computation.py +++ b/services/web/server/tests/integration/01/test_computation.py @@ -7,23 +7,31 @@ import asyncio import json import time -from collections.abc import Callable +from collections.abc import Awaitable, Callable from copy import deepcopy from pathlib import Path from typing import Any, NamedTuple import pytest +import socketio import sqlalchemy as sa from aiohttp.test_utils import TestClient from common_library.json_serialization import json_dumps +from faker import Faker +from models_library.projects_nodes import Node +from models_library.projects_nodes_io import NodeID from models_library.projects_state import RunningState +from pydantic import TypeAdapter +from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status from servicelib.aiohttp import status from servicelib.aiohttp.application import create_safe_application from servicelib.status_codes_utils import get_code_display_name from settings_library.rabbit import RabbitSettings from settings_library.redis import RedisSettings +from simcore_postgres_database.models.comp_runs_collections import comp_runs_collections from simcore_postgres_database.models.projects import projects +from simcore_postgres_database.models.projects_metadata import projects_metadata from simcore_postgres_database.models.users import UserRole from simcore_postgres_database.webserver_models import ( NodeClass, @@ -46,8 +54,10 @@ from simcore_service_webserver.rest.plugin import setup_rest from simcore_service_webserver.security.plugin import setup_security from simcore_service_webserver.session.plugin import setup_session +from simcore_service_webserver.socketio.messages import SOCKET_IO_NODE_UPDATED_EVENT from simcore_service_webserver.socketio.plugin import setup_socketio from simcore_service_webserver.users.plugin import setup_users +from sqlalchemy.ext.asyncio import AsyncEngine from tenacity.asyncio import AsyncRetrying from tenacity.retry import retry_if_exception_type from tenacity.stop import stop_after_delay @@ -59,6 +69,8 @@ "catalog", "dask-scheduler", "dask-sidecar", + "docker-api-proxy", + "dynamic-schdlr", "director-v2", "director", "migration", @@ -114,7 +126,7 @@ def user_role_response(): @pytest.fixture async def client( - postgres_db: sa.engine.Engine, + sqlalchemy_async_engine: AsyncEngine, rabbit_service: RabbitSettings, redis_settings: RedisSettings, aiohttp_client: Callable, @@ -166,29 +178,29 @@ def fake_workbench_adjacency_list(tests_data_dir: Path) -> dict[str, Any]: return json.load(fp) -def _assert_db_contents( +async def _assert_db_contents( project_id: str, - postgres_db: sa.engine.Engine, + sqlalchemy_async_engine: AsyncEngine, fake_workbench_payload: dict[str, Any], fake_workbench_adjacency_list: dict[str, Any], check_outputs: bool, ) -> None: - with postgres_db.connect() as conn: - pipeline_db = conn.execute( - sa.select(comp_pipeline).where(comp_pipeline.c.project_id == project_id) - ).fetchone() - assert pipeline_db + async with sqlalchemy_async_engine.connect() as conn: + pipeline_db = ( + await conn.execute( + sa.select(comp_pipeline).where(comp_pipeline.c.project_id == project_id) + ) + ).one() - assert pipeline_db[comp_pipeline.c.project_id] == project_id - assert ( - pipeline_db[comp_pipeline.c.dag_adjacency_list] - == fake_workbench_adjacency_list - ) + assert pipeline_db.project_id == project_id + assert pipeline_db.dag_adjacency_list == fake_workbench_adjacency_list # check db comp_tasks - tasks_db = conn.execute( - sa.select(comp_tasks).where(comp_tasks.c.project_id == project_id) - ).fetchall() + tasks_db = ( + await conn.execute( + sa.select(comp_tasks).where(comp_tasks.c.project_id == project_id) + ) + ).all() assert tasks_db mock_pipeline = fake_workbench_payload @@ -210,45 +222,43 @@ def _assert_db_contents( NodeIdStr = str -def _get_computational_tasks_from_db( +async def _get_computational_tasks_from_db( project_id: str, - postgres_db: sa.engine.Engine, + sqlalchemy_async_engine: AsyncEngine, ) -> dict[NodeIdStr, Any]: # this check is only there to check the comp_pipeline is there - with postgres_db.connect() as conn: + async with sqlalchemy_async_engine.connect() as conn: assert ( - conn.execute( + await conn.execute( sa.select(comp_pipeline).where(comp_pipeline.c.project_id == project_id) - ).fetchone() - is not None - ), f"missing pipeline in the database under comp_pipeline {project_id}" + ) + ).one(), f"missing pipeline in the database under comp_pipeline {project_id}" # get the computational tasks - tasks_db = conn.execute( - sa.select(comp_tasks).where( - (comp_tasks.c.project_id == project_id) - & (comp_tasks.c.node_class == NodeClass.COMPUTATIONAL) + tasks_db = ( + await conn.execute( + sa.select(comp_tasks).where( + (comp_tasks.c.project_id == project_id) + & (comp_tasks.c.node_class == NodeClass.COMPUTATIONAL) + ) ) - ).fetchall() + ).all() print(f"--> tasks from DB: {tasks_db=}") return {t.node_id: t for t in tasks_db} -def _get_project_workbench_from_db( +async def _get_project_workbench_from_db( project_id: str, - postgres_db: sa.engine.Engine, + sqlalchemy_async_engine: AsyncEngine, ) -> dict[str, Any]: # this check is only there to check the comp_pipeline is there print(f"--> looking for project {project_id=} in projects table...") - with postgres_db.connect() as conn: - project_in_db = conn.execute( - sa.select(projects).where(projects.c.uuid == project_id) - ).fetchone() - - assert ( - project_in_db - ), f"missing pipeline in the database under comp_pipeline {project_id}" + async with sqlalchemy_async_engine.connect() as conn: + project_in_db = ( + await conn.execute(sa.select(projects).where(projects.c.uuid == project_id)) + ).one() + print( f"<-- found following workbench: {json_dumps(project_in_db.workbench, indent=2)}" ) @@ -291,7 +301,7 @@ async def _assert_and_wait_for_pipeline_state( async def _assert_and_wait_for_comp_task_states_to_be_transmitted_in_projects( project_id: str, - postgres_db: sa.engine.Engine, + sqlalchemy_async_engine: AsyncEngine, ) -> None: async for attempt in AsyncRetrying( reraise=True, @@ -303,11 +313,15 @@ async def _assert_and_wait_for_comp_task_states_to_be_transmitted_in_projects( print( f"--> waiting for pipeline results to move to projects table, attempt {attempt.retry_state.attempt_number}..." ) - comp_tasks_in_db: dict[NodeIdStr, Any] = _get_computational_tasks_from_db( - project_id, postgres_db + comp_tasks_in_db: dict[NodeIdStr, Any] = ( + await _get_computational_tasks_from_db( + project_id, sqlalchemy_async_engine + ) ) - workbench_in_db: dict[NodeIdStr, Any] = _get_project_workbench_from_db( - project_id, postgres_db + workbench_in_db: dict[NodeIdStr, Any] = ( + await _get_project_workbench_from_db( + project_id, sqlalchemy_async_engine + ) ) for node_id, node_values in comp_tasks_in_db.items(): assert ( @@ -339,7 +353,7 @@ async def _assert_and_wait_for_comp_task_states_to_be_transmitted_in_projects( async def test_start_stop_computation( client: TestClient, sleeper_service: dict[str, str], - postgres_db: sa.engine.Engine, + sqlalchemy_async_engine: AsyncEngine, logged_user: dict[str, Any], user_project: dict[str, Any], fake_workbench_adjacency_list: dict[str, Any], @@ -365,9 +379,9 @@ async def test_start_stop_computation( assert "pipeline_id" in data assert data["pipeline_id"] == project_id - _assert_db_contents( + await _assert_db_contents( project_id, - postgres_db, + sqlalchemy_async_engine, fake_workbench_payload, fake_workbench_adjacency_list, check_outputs=False, @@ -378,7 +392,7 @@ async def test_start_stop_computation( ) # we need to wait until the webserver has updated the projects DB before starting another round await _assert_and_wait_for_comp_task_states_to_be_transmitted_in_projects( - project_id, postgres_db + project_id, sqlalchemy_async_engine ) # restart the computation, this should produce a 422 since the computation was complete resp = await client.post(f"{url_start}") @@ -404,7 +418,7 @@ async def test_start_stop_computation( ) # we need to wait until the webserver has updated the projects DB await _assert_and_wait_for_comp_task_states_to_be_transmitted_in_projects( - project_id, postgres_db + project_id, sqlalchemy_async_engine ) @@ -412,7 +426,7 @@ async def test_start_stop_computation( async def test_run_pipeline_and_check_state( client: TestClient, sleeper_service: dict[str, str], - postgres_db: sa.engine.Engine, + sqlalchemy_async_engine: AsyncEngine, # logged_user: dict[str, Any], user_project: dict[str, Any], fake_workbench_adjacency_list: dict[str, Any], @@ -436,9 +450,9 @@ async def test_run_pipeline_and_check_state( assert "pipeline_id" in data assert data["pipeline_id"] == project_id - _assert_db_contents( + await _assert_db_contents( project_id, - postgres_db, + sqlalchemy_async_engine, fake_workbench_payload, fake_workbench_adjacency_list, check_outputs=False, @@ -504,8 +518,8 @@ async def test_run_pipeline_and_check_state( f"--> pipeline completed with state {received_study_state=}! That's great: {json_dumps(attempt.retry_state.retry_object.statistics)}", ) assert pipeline_state == RunningState.SUCCESS - comp_tasks_in_db: dict[NodeIdStr, Any] = _get_computational_tasks_from_db( - project_id, postgres_db + comp_tasks_in_db: dict[NodeIdStr, Any] = await _get_computational_tasks_from_db( + project_id, sqlalchemy_async_engine ) is_success = [t.state == StateType.SUCCESS for t in comp_tasks_in_db.values()] assert all(is_success), ( @@ -514,7 +528,176 @@ async def test_run_pipeline_and_check_state( ) # we need to wait until the webserver has updated the projects DB await _assert_and_wait_for_comp_task_states_to_be_transmitted_in_projects( - project_id, postgres_db + project_id, sqlalchemy_async_engine ) print(f"<-- pipeline completed successfully in {time.monotonic() - start} seconds") + + +@pytest.fixture +async def populated_project_metadata( + client: TestClient, + logged_user: dict[str, Any], + user_project: dict[str, Any], + faker: Faker, + sqlalchemy_async_engine: AsyncEngine, +): + assert client.app + project_uuid = user_project["uuid"] + async with sqlalchemy_async_engine.begin() as con: + await con.execute( + projects_metadata.insert().values( + project_uuid=project_uuid, + custom={ + "job_name": "My Job Name", + "group_id": faker.uuid4(), + "group_name": "My Group Name", + }, + ) + ) + yield + async with sqlalchemy_async_engine.begin() as con: + await con.execute(projects_metadata.delete()) + await con.execute(comp_runs_collections.delete()) # cleanup + + +@pytest.mark.parametrize(*user_role_response(), ids=str) +async def test_start_multiple_computation_with_the_same_collection_run_id( + client: TestClient, + sleeper_service: dict[str, str], + sqlalchemy_async_engine: AsyncEngine, + populated_project_metadata: None, + logged_user: dict[str, Any], + user_project: dict[str, Any], + fake_workbench_adjacency_list: dict[str, Any], + user_role: UserRole, + expected: _ExpectedResponseTuple, +): + assert client.app + project_id = user_project["uuid"] + + url_start = client.app.router["start_computation"].url_for(project_id=project_id) + assert url_start == URL(f"/{API_VTAG}/computations/{project_id}:start") + + # POST /v0/computations/{project_id}:start + resp = await client.post(f"{url_start}") + await assert_status(resp, expected.created) + + resp = await client.post(f"{url_start}") + # starting again should be disallowed, since it's already running + assert resp.status == expected.confict + + # NOTE: This tests that there is only one entry in comp_runs_collections table created + # as the project metadata has the same group_id + + +@pytest.mark.parametrize(*user_role_response(), ids=str) +async def test_running_computation_sends_progress_updates_via_socketio( + client: TestClient, + sleeper_service: dict[str, str], + sqlalchemy_async_engine: AsyncEngine, + logged_user: dict[str, Any], + user_project: dict[str, Any], + fake_workbench_adjacency_list: dict[str, Any], + expected: _ExpectedResponseTuple, + create_socketio_connection: Callable[ + [str | None, TestClient | None], Awaitable[tuple[socketio.AsyncClient, str]] + ], + mocker: MockerFixture, +): + assert client.app + socket_io_conn, client_id = await create_socketio_connection(None, client) + mock_node_updated_handler = mocker.MagicMock() + socket_io_conn.on(SOCKET_IO_NODE_UPDATED_EVENT, handler=mock_node_updated_handler) + + project_id = user_project["uuid"] + + # NOTE: we need to open the project so that the computation pipeline messages are transmitted and we get all the node updates + url_open = client.app.router["open_project"].url_for(project_id=project_id) + assert url_open == URL(f"/{API_VTAG}/projects/{project_id}:open") + resp = await client.post(f"{url_open}", json=client_id) + data, error = await assert_status(resp, expected.ok) + + # start the computation + url_start = client.app.router["start_computation"].url_for(project_id=project_id) + assert url_start == URL(f"/{API_VTAG}/computations/{project_id}:start") + + # POST /v0/computations/{project_id}:start + resp = await client.post(f"{url_start}") + data, error = await assert_status(resp, status.HTTP_201_CREATED) + assert not error + + assert "pipeline_id" in data + assert data["pipeline_id"] == project_id + + await _assert_db_contents( + project_id, + sqlalchemy_async_engine, + user_project["workbench"], + fake_workbench_adjacency_list, + check_outputs=False, + ) + + # wait for the computation to complete successfully + await _assert_and_wait_for_pipeline_state( + client, + project_id, + RunningState.SUCCESS, + expected, + ) + + # check that the progress updates were sent + assert mock_node_updated_handler.call_count > 0, ( + "expected progress updates to be sent via socketio, " + f"but got {mock_node_updated_handler.call_count} calls" + ) + + # Get all computational nodes from the workbench (exclude file-picker nodes) + computational_node_ids = { + node_id + for node_id, node_data in user_project["workbench"].items() + if node_data.get("key", "").startswith("simcore/services/comp/") + } + + # Collect all node IDs that received progress updates + received_progress_node_ids = set() + for call_args in mock_node_updated_handler.call_args_list: + assert len(call_args[0]) == 1, ( + "expected the progress handler to be called with a single argument, " + f"but got {len(call_args[0])} arguments" + ) + message = call_args[0][0] + assert "node_id" in message + assert "project_id" in message + assert "data" in message + assert "errors" in message + node_data = TypeAdapter(Node).validate_python(message["data"]) + assert node_data + + received_progress_node_ids.add(message["node_id"]) + + # Verify that progress updates were sent for ALL computational nodes + missing_nodes = computational_node_ids - received_progress_node_ids + assert not missing_nodes, ( + f"expected progress updates for all computational nodes {computational_node_ids}, " + f"but missing updates for {missing_nodes}. " + f"Received updates for: {received_progress_node_ids}" + ) + + # check that a node update was sent for each computational node at the end that unlocks the node + node_id_data_map: dict[NodeID, list[Node]] = {} + for mock_call in mock_node_updated_handler.call_args_list: + node_id = NodeID(mock_call[0][0]["node_id"]) + node_data = TypeAdapter(Node).validate_python(mock_call[0][0]["data"]) + node_id_data_map.setdefault(node_id, []).append(node_data) + + for node_id, node_data_list in node_id_data_map.items(): + # find the last update for this node + last_node_data = node_data_list[-1] + assert last_node_data.state + assert last_node_data.state.current_status == RunningState.SUCCESS + assert last_node_data.state.lock_state + assert last_node_data.state.lock_state.locked is False, ( + f"expected node {node_id} to be unlocked at the end of the pipeline, " + "but it is still locked." + ) diff --git a/services/web/server/tests/integration/01/test_garbage_collection.py b/services/web/server/tests/integration/01/test_garbage_collection.py index 0618647f01c4..fea1c6f9d4a8 100644 --- a/services/web/server/tests/integration/01/test_garbage_collection.py +++ b/services/web/server/tests/integration/01/test_garbage_collection.py @@ -3,6 +3,7 @@ # pylint: disable=unused-variable import asyncio +import contextlib import logging import re from collections.abc import AsyncIterable, Awaitable, Callable @@ -23,10 +24,13 @@ from aiohttp.test_utils import TestClient from aioresponses import aioresponses from models_library.groups import EVERYONE_GROUP_ID, StandardGroupCreate +from models_library.projects import ProjectID from models_library.projects_state import RunningState from pytest_mock import MockerFixture -from pytest_simcore.helpers.webserver_login import UserInfoDict, log_client_in -from pytest_simcore.helpers.webserver_projects import create_project, empty_project_data +from pytest_simcore.helpers import webserver_projects +from pytest_simcore.helpers.webserver_login import log_client_in +from pytest_simcore.helpers.webserver_users import UserInfoDict +from servicelib.aiohttp import status from servicelib.aiohttp.application import create_safe_application from settings_library.rabbit import RabbitSettings from settings_library.redis import RedisDatabase, RedisSettings @@ -41,15 +45,17 @@ from simcore_service_webserver.groups._groups_service import create_standard_group from simcore_service_webserver.groups.api import add_user_in_group from simcore_service_webserver.login.plugin import setup_login +from simcore_service_webserver.projects import _projects_repository from simcore_service_webserver.projects._crud_api_delete import get_scheduled_tasks from simcore_service_webserver.projects._groups_repository import ( update_or_insert_project_group, ) +from simcore_service_webserver.projects.exceptions import ProjectNotFoundError from simcore_service_webserver.projects.models import ProjectDict from simcore_service_webserver.projects.plugin import setup_projects from simcore_service_webserver.resource_manager.plugin import setup_resource_manager from simcore_service_webserver.resource_manager.registry import ( - UserSessionDict, + UserSession, get_registry, ) from simcore_service_webserver.rest.plugin import setup_rest @@ -63,15 +69,16 @@ log = logging.getLogger(__name__) pytest_simcore_core_services_selection = [ - "migration", + "migration", # NOTE: rebuild! "postgres", "rabbit", "redis", - "storage", + "storage", # NOTE: rebuild! ] pytest_simcore_ops_services_selection = [ "minio", "adminer", + "redis-commander", ] @@ -98,11 +105,6 @@ async def _delete_all_redis_keys(redis_settings: RedisSettings): await client.aclose(close_connection_pool=True) -@pytest.fixture(scope="session") -def osparc_product_name() -> str: - return "osparc" - - @pytest.fixture async def director_v2_service_mock( mocker: MockerFixture, @@ -130,7 +132,7 @@ async def director_v2_service_mock( with aioresponses(passthrough=PASSTHROUGH_REQUESTS_PREFIXES) as mock: mock.get( get_computation_pattern, - status=202, + status=status.HTTP_202_ACCEPTED, payload={"state": str(RunningState.NOT_STARTED.value)}, repeat=True, ) @@ -177,7 +179,9 @@ async def client( setup_socketio(app) setup_projects(app) setup_director_v2(app) + assert setup_resource_manager(app) + setup_garbage_collector(app) return await aiohttp_client( @@ -190,43 +194,71 @@ async def client( def disable_garbage_collector_task(mocker: MockerFixture) -> mock.MagicMock: """patch the setup of the garbage collector so we can call it manually""" - async def _fake_background_task(app: web.Application): - # startup - await asyncio.sleep(0.1) - yield - # teardown - await asyncio.sleep(0.1) + def _fake_factory(): + async def _cleanup_ctx_fun(app: web.Application): + # startup + await asyncio.sleep(0.1) + yield + # teardown + await asyncio.sleep(0.1) + + return _cleanup_ctx_fun return mocker.patch( - "simcore_service_webserver.garbage_collector.plugin._tasks_core.run_background_task", - side_effect=_fake_background_task, + "simcore_service_webserver.garbage_collector.plugin._tasks_core.create_background_task_for_garbage_collection", + side_effect=_fake_factory, ) -async def login_user(client: TestClient): +async def login_user(client: TestClient, *, exit_stack: contextlib.AsyncExitStack): """returns a logged in regular user""" - return await log_client_in(client=client, user_data={"role": UserRole.USER.name}) + return await log_client_in( + client=client, user_data={"role": UserRole.USER.name}, exit_stack=exit_stack + ) -async def login_guest_user(client: TestClient): +async def login_guest_user( + client: TestClient, *, exit_stack: contextlib.AsyncExitStack +): """returns a logged in Guest user""" - return await log_client_in(client=client, user_data={"role": UserRole.GUEST.name}) + return await log_client_in( + client=client, user_data={"role": UserRole.GUEST.name}, exit_stack=exit_stack + ) -async def new_project( +async def _setup_project_cleanup( + client: TestClient, + project: dict[str, Any], + exit_stack: contextlib.AsyncExitStack, +) -> None: + """Helper function to setup project cleanup after test completion""" + + async def _delete_project(project_uuid): + assert client.app + with contextlib.suppress(ProjectNotFoundError): + # Sometimes the test deletes the project + await _projects_repository.delete_project( + client.app, project_uuid=project_uuid + ) + + exit_stack.push_async_callback(_delete_project, ProjectID(project["uuid"])) + + +async def create_standard_project( client: TestClient, user: UserInfoDict, product_name: str, tests_data_dir: Path, + exit_stack: contextlib.AsyncExitStack, access_rights: dict[str, Any] | None = None, ): """returns a project for the given user""" - project_data = empty_project_data() + project_data = webserver_projects.empty_project_data() if access_rights is not None: project_data["accessRights"] = access_rights assert client.app - project = await create_project( + project = await webserver_projects.create_project( client.app, project_data, user["id"], @@ -244,14 +276,17 @@ async def new_project( write=permissions["write"], delete=permissions["delete"], ) + + await _setup_project_cleanup(client, project, exit_stack) return project -async def get_template_project( +async def create_template_project( client: TestClient, user: UserInfoDict, product_name: str, project_data: ProjectDict, + exit_stack: contextlib.AsyncExitStack, access_rights=None, ): """returns a tempalte shared with all""" @@ -266,7 +301,7 @@ async def get_template_project( if access_rights is not None: project_data["accessRights"].update(access_rights) - return await create_project( + project = await webserver_projects.create_project( client.app, project_data, user["id"], @@ -274,6 +309,9 @@ async def get_template_project( default_project_json=None, ) + await _setup_project_cleanup(client, project, exit_stack) + return project + async def get_group(client: TestClient, user: UserInfoDict): """Creates a group for a given user""" @@ -316,23 +354,24 @@ async def change_user_role( class SioConnectionData(NamedTuple): sio: socketio.AsyncClient - resource_key: UserSessionDict + resource_key: UserSession async def connect_to_socketio( client: TestClient, - user, - socketio_client_factory: Callable[..., Awaitable[socketio.AsyncClient]], + user: UserInfoDict, + socketio_client_factory: Callable[ + [str | None, TestClient | None], Awaitable[tuple[socketio.AsyncClient, str]] + ], ) -> SioConnectionData: """Connect a user to a socket.io""" assert client.app socket_registry = get_registry(client.app) cur_client_session_id = f"{uuid4()}" - sio = await socketio_client_factory(cur_client_session_id, client) - resource_key: UserSessionDict = { - "user_id": str(user["id"]), - "client_session_id": cur_client_session_id, - } + sio, *_ = await socketio_client_factory(cur_client_session_id, client) + resource_key = UserSession( + user_id=user["id"], client_session_id=cur_client_session_id + ) sid = sio.get_sid() assert sid assert await socket_registry.find_keys(("socket_id", sid)) == [resource_key] @@ -422,7 +461,7 @@ async def assert_user_in_db( user_as_dict = dict(user) # some values need to be transformed - user_as_dict["role"] = user_as_dict["role"].value # type: ignore + user_as_dict["role"] = user_as_dict["role"] # type: ignore user_as_dict["status"] = user_as_dict["status"].value # type: ignore assert_dicts_match_by_common_keys(user_as_dict, logged_user) @@ -478,21 +517,28 @@ async def assert_one_owner_for_project( async def test_t1_while_guest_is_connected_no_resources_are_removed( disable_garbage_collector_task: None, client: TestClient, - socketio_client_factory: Callable, + create_socketio_connection: Callable[ + [str | None, TestClient | None], Awaitable[tuple[socketio.AsyncClient, str]] + ], aiopg_engine: aiopg.sa.engine.Engine, tests_data_dir: Path, osparc_product_name: str, + exit_stack: contextlib.AsyncExitStack, ): """while a GUEST user is connected GC will not remove none of its projects nor the user itself""" assert client.app - logged_guest_user = await login_guest_user(client) - empty_guest_user_project = await new_project( - client, logged_guest_user, osparc_product_name, tests_data_dir + logged_guest_user = await login_guest_user(client, exit_stack=exit_stack) + empty_guest_user_project = await create_standard_project( + client, + logged_guest_user, + osparc_product_name, + tests_data_dir, + exit_stack=exit_stack, ) await assert_users_count(aiopg_engine, 1) await assert_projects_count(aiopg_engine, 1) - await connect_to_socketio(client, logged_guest_user, socketio_client_factory) + await connect_to_socketio(client, logged_guest_user, create_socketio_connection) await asyncio.sleep(SERVICE_DELETION_DELAY + 1) await gc_core.collect_garbage(app=client.app) @@ -504,22 +550,29 @@ async def test_t1_while_guest_is_connected_no_resources_are_removed( async def test_t2_cleanup_resources_after_browser_is_closed( disable_garbage_collector_task: None, client: TestClient, - socketio_client_factory: Callable, + create_socketio_connection: Callable[ + [str | None, TestClient | None], Awaitable[tuple[socketio.AsyncClient, str]] + ], aiopg_engine: aiopg.sa.engine.Engine, tests_data_dir: Path, osparc_product_name: str, + exit_stack: contextlib.AsyncExitStack, ): """After a GUEST users with one opened project closes browser tab regularly (GC cleans everything)""" assert client.app - logged_guest_user = await login_guest_user(client) - empty_guest_user_project = await new_project( - client, logged_guest_user, osparc_product_name, tests_data_dir + logged_guest_user = await login_guest_user(client, exit_stack=exit_stack) + empty_guest_user_project = await create_standard_project( + client, + logged_guest_user, + osparc_product_name, + tests_data_dir, + exit_stack=exit_stack, ) await assert_users_count(aiopg_engine, 1) await assert_projects_count(aiopg_engine, 1) sio_connection_data = await connect_to_socketio( - client, logged_guest_user, socketio_client_factory + client, logged_guest_user, create_socketio_connection ) await asyncio.sleep(SERVICE_DELETION_DELAY + 1) await gc_core.collect_garbage(app=client.app) @@ -552,23 +605,32 @@ async def test_t2_cleanup_resources_after_browser_is_closed( async def test_t3_gc_will_not_intervene_for_regular_users_and_their_resources( client: TestClient, - socketio_client_factory: Callable, + create_socketio_connection: Callable[ + [str | None, TestClient | None], Awaitable[tuple[socketio.AsyncClient, str]] + ], aiopg_engine: aiopg.sa.engine.Engine, fake_project: dict, tests_data_dir: Path, osparc_product_name: str, + exit_stack: contextlib.AsyncExitStack, ): """after a USER disconnects the GC will remove none of its projects or templates nor the user itself""" number_of_projects = 5 number_of_templates = 5 - logged_user = await login_user(client) + logged_user = await login_user(client, exit_stack=exit_stack) user_projects = [ - await new_project(client, logged_user, osparc_product_name, tests_data_dir) + await create_standard_project( + client, logged_user, osparc_product_name, tests_data_dir, exit_stack + ) for _ in range(number_of_projects) ] user_template_projects = [ - await get_template_project( - client, logged_user, osparc_product_name, fake_project + await create_template_project( + client, + logged_user, + osparc_product_name, + fake_project, + exit_stack=exit_stack, ) for _ in range(number_of_templates) ] @@ -587,7 +649,7 @@ async def assert_projects_and_users_are_present() -> None: # connect the user and wait for gc sio_connection_data = await connect_to_socketio( - client, logged_user, socketio_client_factory + client, logged_user, create_socketio_connection ) await asyncio.sleep(WAIT_FOR_COMPLETE_GC_CYCLE) @@ -604,6 +666,7 @@ async def test_t4_project_shared_with_group_transferred_to_user_in_group_on_owne aiopg_engine: aiopg.sa.engine.Engine, tests_data_dir: Path, osparc_product_name: str, + exit_stack: contextlib.AsyncExitStack, ): """ USER "u1" creates a GROUP "g1" and invites USERS "u2" and "u3"; @@ -611,9 +674,9 @@ async def test_t4_project_shared_with_group_transferred_to_user_in_group_on_owne USER "u1" is manually marked as "GUEST"; EXPECTED: one of the users in the "g1" will become the new owner of the project and "u1" will be deleted """ - u1 = await login_user(client) - u2 = await login_user(client) - u3 = await login_user(client) + u1 = await login_user(client, exit_stack=exit_stack) + u2 = await login_user(client, exit_stack=exit_stack) + u3 = await login_user(client, exit_stack=exit_stack) # creating g1 and inviting u2 and u3 g1 = await get_group(client, u1) @@ -621,12 +684,13 @@ async def test_t4_project_shared_with_group_transferred_to_user_in_group_on_owne await invite_user_to_group(client, owner=u1, invitee=u3, group=g1) # u1 creates project and shares it with g1 - project = await new_project( + project = await create_standard_project( client, u1, osparc_product_name, tests_data_dir, access_rights={str(g1["gid"]): {"read": True, "write": True, "delete": False}}, + exit_stack=exit_stack, ) # mark u1 as guest @@ -648,15 +712,16 @@ async def test_t5_project_shared_with_other_users_transferred_to_one_of_them( aiopg_engine: aiopg.sa.engine.Engine, tests_data_dir: Path, osparc_product_name: str, + exit_stack: contextlib.AsyncExitStack, ): """ USER "u1" creates a project and shares it with "u2" and "u3"; USER "u1" is manually marked as "GUEST"; EXPECTED: one of "u2" or "u3" will become the new owner of the project and "u1" will be deleted """ - u1 = await login_user(client) - u2 = await login_user(client) - u3 = await login_user(client) + u1 = await login_user(client, exit_stack=exit_stack) + u2 = await login_user(client, exit_stack=exit_stack) + u3 = await login_user(client, exit_stack=exit_stack) q_u2 = await fetch_user_from_db(aiopg_engine, u2) assert q_u2 @@ -664,11 +729,12 @@ async def test_t5_project_shared_with_other_users_transferred_to_one_of_them( assert q_u3 # u1 creates project and shares it with g1 - project = await new_project( + project = await create_standard_project( client, u1, osparc_product_name, tests_data_dir, + exit_stack=exit_stack, access_rights={ str(q_u2["primary_gid"]): {"read": True, "write": True, "delete": False}, str(q_u3["primary_gid"]): {"read": True, "write": True, "delete": False}, @@ -694,6 +760,7 @@ async def test_t6_project_shared_with_group_transferred_to_last_user_in_group_on aiopg_engine: aiopg.sa.engine.Engine, tests_data_dir: Path, osparc_product_name: str, + exit_stack: contextlib.AsyncExitStack, ): """ USER "u1" creates a GROUP "g1" and invites USERS "u2" and "u3"; @@ -703,9 +770,9 @@ async def test_t6_project_shared_with_group_transferred_to_last_user_in_group_on the new owner either "u2" or "u3" will be manually marked as "GUEST"; EXPECTED: the GUEST user will be deleted and the project will pass to the last member of "g1" """ - u1 = await login_user(client) - u2 = await login_user(client) - u3 = await login_user(client) + u1 = await login_user(client, exit_stack=exit_stack) + u2 = await login_user(client, exit_stack=exit_stack) + u3 = await login_user(client, exit_stack=exit_stack) # creating g1 and inviting u2 and u3 g1 = await get_group(client, u1) @@ -713,11 +780,12 @@ async def test_t6_project_shared_with_group_transferred_to_last_user_in_group_on await invite_user_to_group(client, owner=u1, invitee=u3, group=g1) # u1 creates project and shares it with g1 - project = await new_project( + project = await create_standard_project( client, u1, osparc_product_name, tests_data_dir, + exit_stack=exit_stack, access_rights={str(g1["gid"]): {"read": True, "write": True, "delete": False}}, ) @@ -765,6 +833,7 @@ async def test_t7_project_shared_with_group_transferred_from_one_member_to_the_l aiopg_engine: aiopg.sa.engine.Engine, tests_data_dir: Path, osparc_product_name: str, + exit_stack: contextlib.AsyncExitStack, ): """ USER "u1" creates a GROUP "g1" and invites USERS "u2" and "u3"; @@ -777,9 +846,9 @@ async def test_t7_project_shared_with_group_transferred_from_one_member_to_the_l EXPECTED: the last user will be removed and the project will be removed """ assert client.app - u1 = await login_user(client) - u2 = await login_user(client) - u3 = await login_user(client) + u1 = await login_user(client, exit_stack=exit_stack) + u2 = await login_user(client, exit_stack=exit_stack) + u3 = await login_user(client, exit_stack=exit_stack) # creating g1 and inviting u2 and u3 g1 = await get_group(client, u1) @@ -787,11 +856,12 @@ async def test_t7_project_shared_with_group_transferred_from_one_member_to_the_l await invite_user_to_group(client, owner=u1, invitee=u3, group=g1) # u1 creates project and shares it with g1 - project = await new_project( + project = await create_standard_project( client, u1, osparc_product_name, tests_data_dir, + exit_stack=exit_stack, access_rights={str(g1["gid"]): {"read": True, "write": True, "delete": False}}, ) @@ -853,6 +923,7 @@ async def test_t8_project_shared_with_other_users_transferred_to_one_of_them_unt aiopg_engine: aiopg.sa.engine.Engine, tests_data_dir: Path, osparc_product_name: str, + exit_stack: contextlib.AsyncExitStack, ): """ USER "u1" creates a project and shares it with "u2" and "u3"; @@ -861,9 +932,9 @@ async def test_t8_project_shared_with_other_users_transferred_to_one_of_them_unt same as T5 => afterwards afterwards the new owner either "u2" or "u3" will be manually marked as "GUEST"; EXPECTED: the GUEST user will be deleted and the project will pass to the last member of "g1" """ - u1 = await login_user(client) - u2 = await login_user(client) - u3 = await login_user(client) + u1 = await login_user(client, exit_stack=exit_stack) + u2 = await login_user(client, exit_stack=exit_stack) + u3 = await login_user(client, exit_stack=exit_stack) q_u2 = await fetch_user_from_db(aiopg_engine, u2) assert q_u2 @@ -871,11 +942,12 @@ async def test_t8_project_shared_with_other_users_transferred_to_one_of_them_unt assert q_u3 # u1 creates project and shares it with g1 - project = await new_project( + project = await create_standard_project( client, u1, osparc_product_name, tests_data_dir, + exit_stack=exit_stack, access_rights={ str(q_u2["primary_gid"]): {"read": True, "write": True, "delete": False}, str(q_u3["primary_gid"]): {"read": True, "write": True, "delete": False}, @@ -927,6 +999,7 @@ async def test_t9_project_shared_with_other_users_transferred_between_them_and_t aiopg_engine: aiopg.sa.engine.Engine, tests_data_dir: Path, osparc_product_name: str, + exit_stack: contextlib.AsyncExitStack, ): """ USER "u1" creates a project and shares it with "u2" and "u3"; @@ -937,9 +1010,9 @@ async def test_t9_project_shared_with_other_users_transferred_between_them_and_t same as T8 => afterwards the last user will be marked as "GUEST"; EXPECTED: the last user will be removed and the project will be removed """ - u1 = await login_user(client) - u2 = await login_user(client) - u3 = await login_user(client) + u1 = await login_user(client, exit_stack=exit_stack) + u2 = await login_user(client, exit_stack=exit_stack) + u3 = await login_user(client, exit_stack=exit_stack) q_u2 = await fetch_user_from_db(aiopg_engine, u2) assert q_u2 @@ -947,11 +1020,12 @@ async def test_t9_project_shared_with_other_users_transferred_between_them_and_t assert q_u3 # u1 creates project and shares it with g1 - project = await new_project( + project = await create_standard_project( client, u1, osparc_product_name, tests_data_dir, + exit_stack=exit_stack, access_rights={ str(q_u2["primary_gid"]): {"read": True, "write": True, "delete": False}, str(q_u3["primary_gid"]): {"read": True, "write": True, "delete": False}, @@ -1014,6 +1088,7 @@ async def test_t10_owner_and_all_shared_users_marked_as_guests( aiopg_engine: aiopg.sa.engine.Engine, tests_data_dir: Path, osparc_product_name: str, + exit_stack: contextlib.AsyncExitStack, ): """ USER "u1" creates a project and shares it with "u2" and "u3"; @@ -1026,9 +1101,9 @@ async def test_t10_owner_and_all_shared_users_marked_as_guests( ) assert not gc_task.done() - u1 = await login_user(client) - u2 = await login_user(client) - u3 = await login_user(client) + u1 = await login_user(client, exit_stack=exit_stack) + u2 = await login_user(client, exit_stack=exit_stack) + u3 = await login_user(client, exit_stack=exit_stack) q_u2 = await fetch_user_from_db(aiopg_engine, u2) q_u3 = await fetch_user_from_db(aiopg_engine, u3) @@ -1036,11 +1111,12 @@ async def test_t10_owner_and_all_shared_users_marked_as_guests( assert q_u3 # u1 creates project and shares it with g1 - project = await new_project( + project = await create_standard_project( client, u1, osparc_product_name, tests_data_dir, + exit_stack=exit_stack, access_rights={ str(q_u2["primary_gid"]): {"read": True, "write": True, "delete": False}, str(q_u3["primary_gid"]): {"read": True, "write": True, "delete": False}, @@ -1067,6 +1143,7 @@ async def test_t11_owner_and_all_users_in_group_marked_as_guests( aiopg_engine: aiopg.sa.engine.Engine, tests_data_dir: Path, osparc_product_name: str, + exit_stack: contextlib.AsyncExitStack, ): """ USER "u1" creates a group and invites "u2" and "u3"; @@ -1074,9 +1151,9 @@ async def test_t11_owner_and_all_users_in_group_marked_as_guests( USER "u1", "u2" and "u3" are manually marked as "GUEST" EXPECTED: the project and all the users are removed """ - u1 = await login_user(client) - u2 = await login_user(client) - u3 = await login_user(client) + u1 = await login_user(client, exit_stack=exit_stack) + u2 = await login_user(client, exit_stack=exit_stack) + u3 = await login_user(client, exit_stack=exit_stack) # creating g1 and inviting u2 and u3 g1 = await get_group(client, u1) @@ -1084,11 +1161,12 @@ async def test_t11_owner_and_all_users_in_group_marked_as_guests( await invite_user_to_group(client, owner=u1, invitee=u3, group=g1) # u1 creates project and shares it with g1 - project = await new_project( + project = await create_standard_project( client, u1, osparc_product_name, tests_data_dir, + exit_stack=exit_stack, access_rights={str(g1["gid"]): {"read": True, "write": True, "delete": False}}, ) @@ -1101,7 +1179,8 @@ async def test_t11_owner_and_all_users_in_group_marked_as_guests( await assert_projects_count(aiopg_engine, 1) await assert_user_is_owner_of_project(aiopg_engine, u1, project) - await asyncio.sleep(WAIT_FOR_COMPLETE_GC_CYCLE) + # await asyncio.sleep(WAIT_FOR_COMPLETE_GC_CYCLE) + await gc_core.collect_garbage(app=client.app) - await assert_users_count(aiopg_engine, 0) + await assert_users_count(aiopg_engine, 0) # <-- MD: this is where the test fails await assert_projects_count(aiopg_engine, 0) diff --git a/services/web/server/tests/integration/conftest.py b/services/web/server/tests/integration/conftest.py index 70c50df9bd35..5fc7ea7b893c 100644 --- a/services/web/server/tests/integration/conftest.py +++ b/services/web/server/tests/integration/conftest.py @@ -64,8 +64,11 @@ def webserver_environ( # the test webserver is built-up in webserver_service fixture that runs # on the host. EXCLUDED_SERVICES = [ + "api-worker", "dask-scheduler", "director", + "docker-api-proxy", + "dynamic-schdlr", "sto-worker", "sto-worker-cpu-bound", ] diff --git a/services/web/server/tests/unit/conftest.py b/services/web/server/tests/unit/conftest.py index d99275c43573..53b4892491e2 100644 --- a/services/web/server/tests/unit/conftest.py +++ b/services/web/server/tests/unit/conftest.py @@ -16,8 +16,8 @@ from aiohttp.test_utils import TestClient from models_library.products import ProductName from pytest_mock import MockFixture, MockType -from pytest_simcore.helpers.webserver_login import UserInfoDict from pytest_simcore.helpers.webserver_projects import NewProject, empty_project_data +from pytest_simcore.helpers.webserver_users import UserInfoDict from simcore_service_webserver.application_settings_utils import AppConfigDict from simcore_service_webserver.constants import FRONTEND_APP_DEFAULT from simcore_service_webserver.projects.models import ProjectDict diff --git a/services/web/server/tests/unit/isolated/conftest.py b/services/web/server/tests/unit/isolated/conftest.py index eccad058e53b..b91599649a12 100644 --- a/services/web/server/tests/unit/isolated/conftest.py +++ b/services/web/server/tests/unit/isolated/conftest.py @@ -5,7 +5,7 @@ import pytest from faker import Faker -from pytest_mock import MockerFixture +from pytest_mock import MockerFixture, MockType from pytest_simcore.helpers.monkeypatch_envs import ( setenvs_from_dict, setenvs_from_envfile, @@ -19,7 +19,7 @@ def dir_with_random_content(tmpdir, faker: Faker) -> Path: def make_files_in_dir(dir_path: Path, file_count: int) -> None: for _ in range(file_count): (dir_path / f"{faker.file_name(extension='bin')}").write_bytes( - os.urandom(random.randint(1, 10)) + os.urandom(random.randint(1, 10)) # noqa: S311 ) def ensure_dir(path_to_ensure: Path) -> Path: @@ -30,13 +30,13 @@ def make_subdirectory_with_content(subdir_name: Path, max_file_count: int) -> No subdir_name = ensure_dir(subdir_name) make_files_in_dir( dir_path=subdir_name, - file_count=random.randint(1, max_file_count), + file_count=random.randint(1, max_file_count), # noqa: S311 ) def make_subdirectories_with_content( subdir_name: Path, max_subdirectories_count: int, max_file_count: int ) -> None: - subdirectories_count = random.randint(1, max_subdirectories_count) + subdirectories_count = random.randint(1, max_subdirectories_count) # noqa: S311 for _ in range(subdirectories_count): make_subdirectory_with_content( subdir_name=subdir_name / f"{faker.word()}", @@ -241,19 +241,32 @@ def mocked_login_required(mocker: MockerFixture): # patches @login_required decorator # avoids having to start database etc... mocker.patch( - "simcore_service_webserver.login.decorators.check_user_authorized", + "simcore_service_webserver.login_auth.decorators.security_web.check_user_authorized", spec=True, return_value=user_id, ) mocker.patch( - "simcore_service_webserver.login.decorators.check_user_permission", + "simcore_service_webserver.login_auth.decorators.security_web.check_user_permission", spec=True, return_value=None, ) mocker.patch( - "simcore_service_webserver.login.decorators.products_web.get_product_name", + "simcore_service_webserver.login_auth.decorators.products_web.get_product_name", spec=True, return_value="osparc", ) + + +@pytest.fixture +def mocked_db_setup_in_setup_security(mocker: MockerFixture) -> MockType: + """Mocking avoids setting up a full db""" + import simcore_service_webserver.security.plugin + + return mocker.patch.object( + simcore_service_webserver.security.plugin, + "setup_db", + autospec=True, + return_value=True, + ) diff --git a/services/web/server/tests/unit/isolated/notifications/test_rabbitmq_consumers.py b/services/web/server/tests/unit/isolated/notifications/test_rabbitmq_consumers.py index 20d471553393..02c2d34dcbfa 100644 --- a/services/web/server/tests/unit/isolated/notifications/test_rabbitmq_consumers.py +++ b/services/web/server/tests/unit/isolated/notifications/test_rabbitmq_consumers.py @@ -80,17 +80,17 @@ async def test_regression_progress_message_parser( mocker: MockerFixture, raw_data: bytes, expected_socket_message: SocketMessageDict ): - send_messages_to_user_mock = mocker.patch( - "simcore_service_webserver.notifications._rabbitmq_exclusive_queue_consumers.send_message_to_user", + send_message_to_project_room_mock = mocker.patch( + "simcore_service_webserver.notifications._rabbitmq_exclusive_queue_consumers.send_message_to_project_room", autospec=True, ) app = AsyncMock() assert await _progress_message_parser(app, raw_data) - # tests how send_message_to_user is called - assert send_messages_to_user_mock.call_count == 1 - message = send_messages_to_user_mock.call_args.kwargs["message"] + # tests how send_message_to_project_room is called + assert send_message_to_project_room_mock.call_count == 1 + message = send_message_to_project_room_mock.call_args.kwargs["message"] # check that all fields are sent as expected assert message["data"] == expected_socket_message["data"] diff --git a/services/web/server/tests/unit/isolated/notifications/test_wallet_osparc_credits.py b/services/web/server/tests/unit/isolated/notifications/test_wallet_osparc_credits.py new file mode 100644 index 000000000000..227bfbe484cf --- /dev/null +++ b/services/web/server/tests/unit/isolated/notifications/test_wallet_osparc_credits.py @@ -0,0 +1,70 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-import +import asyncio +from unittest.mock import AsyncMock, patch + +import pytest +from models_library.wallets import WalletID +from simcore_service_webserver.notifications import wallet_osparc_credits + + +@pytest.fixture +def app_with_wallets(): + app = { + "wallet_subscription_lock": asyncio.Lock(), + "wallet_subscriptions": {}, + } + return app + + +@pytest.fixture +def wallet_id(): + return WalletID(1) + + +async def test_subscribe_first_and_second(app_with_wallets, wallet_id): + app = app_with_wallets + app["wallet_subscriptions"][wallet_id] = 0 + mock_rabbit = AsyncMock() + with patch( + "simcore_service_webserver.notifications.wallet_osparc_credits.get_rabbitmq_client", + return_value=mock_rabbit, + ): + await wallet_osparc_credits.subscribe(app, wallet_id) + mock_rabbit.add_topics.assert_awaited_once() + # Second subscribe should not call add_topics again + await wallet_osparc_credits.subscribe(app, wallet_id) + assert mock_rabbit.add_topics.await_count == 1 + assert app["wallet_subscriptions"][wallet_id] == 2 + + +async def test_unsubscribe_last_and_not_last(app_with_wallets, wallet_id): + app = app_with_wallets + app["wallet_subscriptions"][wallet_id] = 2 + mock_rabbit = AsyncMock() + with patch( + "simcore_service_webserver.notifications.wallet_osparc_credits.get_rabbitmq_client", + return_value=mock_rabbit, + ): + # Not last unsubscribe + await wallet_osparc_credits.unsubscribe(app, wallet_id) + mock_rabbit.remove_topics.assert_not_awaited() + assert app["wallet_subscriptions"][wallet_id] == 1 + # Last unsubscribe + await wallet_osparc_credits.unsubscribe(app, wallet_id) + mock_rabbit.remove_topics.assert_awaited_once() + assert app["wallet_subscriptions"][wallet_id] == 0 + + +async def test_unsubscribe_when_not_subscribed(app_with_wallets, wallet_id): + app = app_with_wallets + # wallet_id not present + mock_rabbit = AsyncMock() + with patch( + "simcore_service_webserver.notifications.wallet_osparc_credits.get_rabbitmq_client", + return_value=mock_rabbit, + ): + await wallet_osparc_credits.unsubscribe(app, wallet_id) + mock_rabbit.remove_topics.assert_not_awaited() + assert app["wallet_subscriptions"].get(wallet_id, 0) == 0 diff --git a/services/web/server/tests/unit/isolated/products/test_products_model.py b/services/web/server/tests/unit/isolated/products/test_products_model.py index 25411aa0e044..7eff7a461992 100644 --- a/services/web/server/tests/unit/isolated/products/test_products_model.py +++ b/services/web/server/tests/unit/isolated/products/test_products_model.py @@ -21,6 +21,7 @@ ) from simcore_postgres_database.models.products import products as products_table from simcore_service_webserver.products.models import Product +from simcore_service_webserver.statics._events import _get_product_data @pytest.mark.parametrize( @@ -40,20 +41,27 @@ def test_all_products_models_examples( if "registration_email_template" in example_data: assert model_instance.get_template_name_for("registration_email.jinja2") + if model_instance.vendor and "ui" in model_instance.vendor: + assert model_instance.vendor["ui"]["strong_color"] + assert model_instance.vendor["ui"]["logo_url"] + def test_product_to_static(): product = Product.model_validate(Product.model_json_schema()["examples"][0]) - assert product.to_statics() == { + product_data = _get_product_data(product) + assert product_data == { "displayName": "o²S²PARC", "supportEmail": "support@osparc.io", } product = Product.model_validate(Product.model_json_schema()["examples"][2]) - assert product.to_statics() == { + product_data = _get_product_data(product) + assert product_data == { "displayName": "o²S²PARC FOO", "supportEmail": "foo@osparcf.io", + "supportStandardGroupId": 67890, "vendor": { "copyright": "© ACME correcaminos", "name": "ACME", @@ -63,7 +71,6 @@ def test_product_to_static(): "ui": { "logo_url": "https://acme.com/logo", "strong_color": "#123456", - "project_alias": "study", }, }, "issues": [ diff --git a/services/web/server/tests/unit/isolated/test_activity.py b/services/web/server/tests/unit/isolated/test_activity.py index 8f041245f998..2eaf7818f2ad 100644 --- a/services/web/server/tests/unit/isolated/test_activity.py +++ b/services/web/server/tests/unit/isolated/test_activity.py @@ -10,7 +10,7 @@ import pytest from aiohttp.client_exceptions import ClientConnectionError from aiohttp.test_utils import TestClient -from pytest_mock import MockerFixture +from pytest_mock import MockerFixture, MockType from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict @@ -22,7 +22,6 @@ setup_settings, ) from simcore_service_webserver.rest.plugin import setup_rest -from simcore_service_webserver.security.plugin import setup_security from simcore_service_webserver.session.plugin import setup_session @@ -70,6 +69,8 @@ def app_environment( "POSTGRES_HOST": "postgres", "POSTGRES_MAXSIZE": "10", "POSTGRES_MINSIZE": "10", + "POSTGRES_MAX_POOLSIZE": "10", + "POSTGRES_MAX_OVERFLOW": "20", "POSTGRES_PASSWORD": "simcore", "POSTGRES_PORT": "5432", "POSTGRES_USER": "simcore", @@ -101,6 +102,7 @@ async def client( aiohttp_client: Callable[..., Awaitable[TestClient]], mock_orphaned_services: MagicMock, app_environment: EnvVarsDict, + mocked_db_setup_in_setup_security: MockType, ): # app_environment are in place assert {key: os.environ[key] for key in app_environment} == app_environment @@ -112,8 +114,9 @@ async def client( assert expected_activity_settings == settings.WEBSERVER_ACTIVITY setup_session(app) - setup_security(app) setup_rest(app) + assert mocked_db_setup_in_setup_security.called + assert setup_activity(app) return await aiohttp_client(app) diff --git a/services/web/server/tests/unit/isolated/test_application_settings.py b/services/web/server/tests/unit/isolated/test_application_settings.py index 02394063061f..4c662ed12d66 100644 --- a/services/web/server/tests/unit/isolated/test_application_settings.py +++ b/services/web/server/tests/unit/isolated/test_application_settings.py @@ -73,6 +73,9 @@ def test_settings_to_client_statics(app_settings: ApplicationSettings): def test_settings_to_client_statics_plugins( mock_webserver_service_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch ): + monkeypatch.delenv("WEBSERVER_REALTIME_COLLABORATION", raising=False) + + # explicitly disable these plugins disable_plugins = { "WEBSERVER_EXPORTER", "WEBSERVER_SCICRUNCH", @@ -82,12 +85,21 @@ def test_settings_to_client_statics_plugins( for name in disable_plugins: monkeypatch.setenv(name, "null") + # explicitly disable WEBSERVER_FOLDERS monkeypatch.setenv("WEBSERVER_FOLDERS", "0") disable_plugins.add("WEBSERVER_FOLDERS") + # set WEBSERVER_REALTIME_COLLABORATION (NOTE: for now WEBSERVER_DEV_FEATURES_ENABLED=True) ) + monkeypatch.setenv( + "WEBSERVER_REALTIME_COLLABORATION", '{"RTC_MAX_NUMBER_OF_USERS":3}' + ) + settings = ApplicationSettings.create_from_envs() - statics = settings.to_client_statics() + assert settings.WEBSERVER_DEV_FEATURES_ENABLED + # ------------- + + statics = settings.to_client_statics() print("STATICS:\n", json_dumps(statics, indent=1)) assert settings.WEBSERVER_LOGIN @@ -111,6 +123,15 @@ def test_settings_to_client_statics_plugins( assert statics["vcsReleaseTag"] assert TypeAdapter(HttpUrl).validate_python(statics["vcsReleaseUrl"]) + # check WEBSERVER_REALTIME_COLLABORATION enabled + assert "WEBSERVER_REALTIME_COLLABORATION" not in statics["pluginsDisabled"] + assert settings.WEBSERVER_REALTIME_COLLABORATION + assert ( + statics["webserverRealtimeCollaboration"]["RTC_MAX_NUMBER_OF_USERS"] + == settings.WEBSERVER_REALTIME_COLLABORATION.RTC_MAX_NUMBER_OF_USERS + ) + + # check disabled plugins assert set(statics["pluginsDisabled"]) == (disable_plugins) @@ -193,3 +214,12 @@ def test_backwards_compatibility_with_bool_env_vars_turned_into_objects( settings = ApplicationSettings.create_from_envs() assert settings.WEBSERVER_LICENSES is None + + +def test_valid_application_settings(mock_webserver_service_environment: EnvVarsDict): + assert mock_webserver_service_environment + + settings = ApplicationSettings() # type: ignore + assert settings + + assert settings == ApplicationSettings.create_from_envs() diff --git a/services/web/server/tests/unit/isolated/test_diagnostics.py b/services/web/server/tests/unit/isolated/test_diagnostics.py deleted file mode 100644 index fdd08db062d3..000000000000 --- a/services/web/server/tests/unit/isolated/test_diagnostics.py +++ /dev/null @@ -1,61 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable -# pylint: disable=too-many-arguments - -from unittest.mock import Mock - -import pytest -from pytest_mock import MockerFixture -from servicelib.aiohttp.application_setup import APP_SETUP_COMPLETED_KEY -from simcore_service_webserver.application_settings import setup_settings -from simcore_service_webserver.diagnostics.plugin import setup_diagnostics -from simcore_service_webserver.rest.plugin import setup_rest - - -class MockApp(dict): - middlewares = [] - cleanup_ctx = [] - router = Mock() - _overriden = [] - - def __setitem__(self, key, value): - if key in self: - current_value = self.__getitem__(key) - self._overriden.append((key, value)) - - print(f"ERROR app['{key}'] = {current_value} overriden with {value}") - - super().__setitem__(key, value) - - def assert_none_overriden(self): - assert not self._overriden - - def add_routes(self, *args, **kwargs): - self.router.add_routes(*args, **kwargs) - - -@pytest.fixture -def app_mock(mocker: MockerFixture): - app = MockApp() - - # emulates security is initialized - app[APP_SETUP_COMPLETED_KEY] = ["simcore_service_webserver.security"] - - mocker.patch("simcore_service_webserver.rest.plugin.api_doc") - - return app - - -def test_unique_application_keys(app_mock, mock_env_devel_environment: dict[str, str]): - setup_settings(app_mock) - setup_rest(app_mock) - setup_diagnostics(app_mock) - - for key, value in app_mock.items(): - print(f"app['{key}'] = {value}") - - assert any(key for key in app_mock if "diagnostics" in key) - - # this module has A LOT of constants and it is easy to override them - app_mock.assert_none_overriden() diff --git a/services/web/server/tests/unit/isolated/test_diagnostics_healthcheck.py b/services/web/server/tests/unit/isolated/test_diagnostics_healthcheck.py index ab0883aaa69e..8aa41b00d6a6 100644 --- a/services/web/server/tests/unit/isolated/test_diagnostics_healthcheck.py +++ b/services/web/server/tests/unit/isolated/test_diagnostics_healthcheck.py @@ -14,6 +14,7 @@ import simcore_service_webserver from aiohttp import web from aiohttp.test_utils import TestClient +from pytest_mock import MockType from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict @@ -29,7 +30,6 @@ from simcore_service_webserver.diagnostics.plugin import setup_diagnostics from simcore_service_webserver.diagnostics.settings import DiagnosticsSettings from simcore_service_webserver.rest.plugin import setup_rest -from simcore_service_webserver.security.plugin import setup_security from tenacity import retry from tenacity.before import before_log from tenacity.stop import stop_after_attempt @@ -104,6 +104,7 @@ def mock_environment( @pytest.fixture async def client( + mocked_db_setup_in_setup_security: MockType, unused_tcp_port_factory: Callable, aiohttp_client: Callable[..., Awaitable[TestClient]], api_version_prefix: str, @@ -155,7 +156,6 @@ async def delay_response(request: web.Request): # activates some sub-modules assert setup_settings(app) - setup_security(app) setup_rest(app) setup_diagnostics(app) @@ -188,7 +188,7 @@ async def test_healthy_app(client: TestClient, api_version_prefix: str): assert data assert not error - assert data["name"] == "simcore_service_webserver" + assert data["name"] == simcore_service_webserver._meta.APP_NAME assert data["version"] == simcore_service_webserver._meta.__version__ diff --git a/services/web/server/tests/unit/isolated/test_dynamic_scheduler.py b/services/web/server/tests/unit/isolated/test_dynamic_scheduler.py index 1810e48493a7..507a3eb19c86 100644 --- a/services/web/server/tests/unit/isolated/test_dynamic_scheduler.py +++ b/services/web/server/tests/unit/isolated/test_dynamic_scheduler.py @@ -59,11 +59,9 @@ def dynamic_service_start() -> DynamicServiceStart: NodeGet.model_validate(x) for x in NodeGet.model_config["json_schema_extra"]["examples"] ], - NodeGetIdle.model_validate( - NodeGetIdle.model_config["json_schema_extra"]["example"] - ), + NodeGetIdle.model_validate(NodeGetIdle.model_json_schema()["examples"][0]), DynamicServiceGet.model_validate( - DynamicServiceGet.model_config["json_schema_extra"]["examples"][0] + DynamicServiceGet.model_json_schema()["examples"][0] ), ], ) @@ -108,7 +106,7 @@ async def test_get_service_status_raises_rpc_server_error( for x in NodeGet.model_config["json_schema_extra"]["examples"] ], DynamicServiceGet.model_validate( - DynamicServiceGet.model_config["json_schema_extra"]["examples"][0] + DynamicServiceGet.model_json_schema()["examples"][0] ), ], ) diff --git a/services/web/server/tests/unit/isolated/test_exception_handling.py b/services/web/server/tests/unit/isolated/test_exception_handling.py index 775fe452a213..1b7052afb1e6 100644 --- a/services/web/server/tests/unit/isolated/test_exception_handling.py +++ b/services/web/server/tests/unit/isolated/test_exception_handling.py @@ -98,7 +98,7 @@ async def _handler(request: web.Request): app.add_routes(routes) # 3. testing from the client side - client: TestClient = await aiohttp_client(app) + client = await aiohttp_client(app) # success resp = await client.post("/ok") diff --git a/services/web/server/tests/unit/isolated/test_exception_handling_factory.py b/services/web/server/tests/unit/isolated/test_exception_handling_factory.py index e87ef0b53c37..022eb12c62ae 100644 --- a/services/web/server/tests/unit/isolated/test_exception_handling_factory.py +++ b/services/web/server/tests/unit/isolated/test_exception_handling_factory.py @@ -13,6 +13,7 @@ from aiohttp.test_utils import make_mocked_request from servicelib.aiohttp import status from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON +from servicelib.status_codes_utils import get_code_display_name from simcore_service_webserver.errors import WebServerBaseError from simcore_service_webserver.exception_handling._base import ( ExceptionHandlingContextManager, @@ -28,16 +29,13 @@ # Some custom errors in my service -class BaseError(WebServerBaseError): - ... +class BaseError(WebServerBaseError): ... -class OneError(BaseError): - ... +class OneError(BaseError): ... -class OtherError(BaseError): - ... +class OtherError(BaseError): ... @pytest.fixture @@ -58,7 +56,7 @@ async def test_factory__create_exception_handler_from_http_error( response = await one_error_to_404(fake_request, caught) assert response.status == status.HTTP_404_NOT_FOUND assert response.text is not None - assert "one error message" in response.reason + assert response.reason == get_code_display_name(response.status) assert response.content_type == MIMETYPE_APPLICATION_JSON @@ -82,9 +80,7 @@ async def test_handling_different_exceptions_with_context( response = cm.get_response_or_none() assert response is not None assert response.status == status.HTTP_400_BAD_REQUEST - assert response.reason == exc_to_http_error_map[OneError].msg_template.format( - code="WebServerBaseError.BaseError.OneError" - ) + assert response.reason == get_code_display_name(response.status) assert not caplog.records # unhandled -> reraises @@ -103,9 +99,7 @@ async def test_handling_different_exceptions_with_context( response = cm.get_response_or_none() assert response is not None assert response.status == status.HTTP_500_INTERNAL_SERVER_ERROR - assert response.reason == exc_to_http_error_map[OtherError].msg_template.format( - code="WebServerBaseError.BaseError.OtherError" - ) + assert response.reason == get_code_display_name(response.status) assert caplog.records, "Expected 5XX troubleshooting logged as error" assert caplog.records[0].levelno == logging.ERROR diff --git a/services/web/server/tests/unit/isolated/test_garbage_collector_core.py b/services/web/server/tests/unit/isolated/test_garbage_collector_core.py index b944b0d93c12..3c178d69a64a 100644 --- a/services/web/server/tests/unit/isolated/test_garbage_collector_core.py +++ b/services/web/server/tests/unit/isolated/test_garbage_collector_core.py @@ -21,7 +21,7 @@ from simcore_service_webserver.garbage_collector._core_orphans import ( remove_orphaned_services, ) -from simcore_service_webserver.resource_manager.registry import UserSessionDict +from simcore_service_webserver.resource_manager.registry import UserSession from simcore_service_webserver.users.exceptions import UserNotFoundError MODULE_GC_CORE_ORPHANS: Final[str] = ( @@ -36,7 +36,7 @@ def project_id(faker: Faker) -> ProjectID: @pytest.fixture def client_session_id(faker: Faker) -> str: - return faker.uuid4(cast_to=None) + return faker.uuid4(cast_to=str) @pytest.fixture @@ -44,9 +44,9 @@ def mock_registry( user_id: UserID, project_id: ProjectID, client_session_id: str ) -> mock.AsyncMock: async def _fake_get_all_resource_keys() -> ( - tuple[list[UserSessionDict], list[UserSessionDict]] + tuple[list[UserSession], list[UserSession]] ): - return ([{"user_id": user_id, "client_session_id": client_session_id}], []) + return ([UserSession(user_id=user_id, client_session_id=client_session_id)], []) registry = mock.AsyncMock() registry.get_all_resource_keys = mock.AsyncMock( @@ -124,7 +124,7 @@ async def test_remove_orphaned_services_with_no_running_services_does_nothing( def faker_dynamic_service_get() -> Callable[[], DynamicServiceGet]: def _() -> DynamicServiceGet: return DynamicServiceGet.model_validate( - DynamicServiceGet.model_config["json_schema_extra"]["examples"][1] + DynamicServiceGet.model_json_schema()["examples"][1] ) return _ @@ -156,7 +156,9 @@ async def mock_get_user_role( mocker: MockerFixture, user_role: UserRole ) -> mock.AsyncMock: return mocker.patch( - f"{MODULE_GC_CORE_ORPHANS}.get_user_role", autospec=True, return_value=user_role + f"{MODULE_GC_CORE_ORPHANS}.users_service.get_user_role", + autospec=True, + return_value=user_role, ) diff --git a/services/web/server/tests/unit/isolated/test_models.py b/services/web/server/tests/unit/isolated/test_models.py new file mode 100644 index 000000000000..04e22afc6843 --- /dev/null +++ b/services/web/server/tests/unit/isolated/test_models.py @@ -0,0 +1,69 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + + +import pytest +from faker import Faker +from pydantic import TypeAdapter, ValidationError +from pytest_simcore.helpers.faker_factories import random_phone_number +from simcore_service_webserver.users._controller.rest._rest_schemas import ( + MyPhoneRegister, + PhoneNumberStr, +) + + +@pytest.mark.parametrize( + "phone", + [ + "+41763456789", + "+19104630364", + "+1 301-304-4567", + "+41763456686", + "+19104630873", + "+19104630424", + "+34 950 453 772", + "+19104630700", + "+13013044719", + ], +) +def test_valid_phone_numbers(phone: str): + # This test is used to tune options of PhoneNumberValidator + assert MyPhoneRegister.model_validate({"phone": phone}).phone == TypeAdapter( + PhoneNumberStr + ).validate_python(phone) + + +def test_random_phone_number(): + # This test is used to tune options of PhoneNumberValidator + for _ in range(10): + phone = random_phone_number(Faker(seed=42)) + assert MyPhoneRegister.model_validate({"phone": phone}).phone == TypeAdapter( + PhoneNumberStr + ).validate_python(phone) + + +@pytest.mark.parametrize( + "phone", + [ + "+41763456789", + "+41 76 345 67 89", + "tel:+41-76-345-67-89", + ], + ids=["E.164", "INTERNATIONAL", "RFC3966"], +) +def test_autoformat_phone_number_to_e164(phone: str): + # This test is used to tune options of PhoneNumberValidator formatting to E164 + assert TypeAdapter(PhoneNumberStr).validate_python(phone) == "+41763456789" + + +@pytest.mark.parametrize( + "phone", + ["41763456789", "+09104630364", "+1 111-304-4567"], +) +def test_invalid_phone_numbers(phone: str): + # This test is used to tune options of PhoneNumberValidator + with pytest.raises(ValidationError): + MyPhoneRegister.model_validate({"phone": phone}) diff --git a/services/web/server/tests/unit/isolated/test_projects__db_utils.py b/services/web/server/tests/unit/isolated/test_projects__db_utils.py index 7f42f14f23b4..1d8349fb2c7e 100644 --- a/services/web/server/tests/unit/isolated/test_projects__db_utils.py +++ b/services/web/server/tests/unit/isolated/test_projects__db_utils.py @@ -22,12 +22,12 @@ convert_to_schema_names, create_project_access_rights, patch_workbench, - update_workbench, ) from simcore_service_webserver.projects._projects_repository_legacy_utils import ( DB_EXCLUSIVE_COLUMNS, SCHEMA_NON_NULL_KEYS, assemble_array_groups, + update_workbench, ) from simcore_service_webserver.projects.exceptions import ( NodeNotFoundError, diff --git a/services/web/server/tests/unit/isolated/test_projects__nodes_models_adapters.py b/services/web/server/tests/unit/isolated/test_projects__nodes_models_adapters.py new file mode 100644 index 000000000000..af56734647cb --- /dev/null +++ b/services/web/server/tests/unit/isolated/test_projects__nodes_models_adapters.py @@ -0,0 +1,62 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + + +from typing import Any +from uuid import UUID + +import pytest +from faker import Faker +from models_library.projects_nodes import Node +from simcore_postgres_database.utils_projects_nodes import ( + ProjectNode, + ProjectNodeCreate, +) +from simcore_service_webserver.projects import _nodes_models_adapters + +_NODE_DOMAIN_MODEL_DICT_EXAMPLES = Node.model_json_schema()["examples"] + + +@pytest.mark.parametrize( + "node_data", + _NODE_DOMAIN_MODEL_DICT_EXAMPLES, + ids=[f"example-{i}" for i in range(len(_NODE_DOMAIN_MODEL_DICT_EXAMPLES))], +) +def test_adapters_between_different_node_models( + node_data: dict[str, Any], faker: Faker +): + # dict -> to Node (from models_library) + node_id = UUID(faker.uuid4()) + node = Node.model_validate(node_data) + + # Node -> ProjectNodeCreate (from simcore_postgres_database) using adapters + project_node_create = _nodes_models_adapters.project_node_create_from_node( + node, node_id + ) + assert isinstance(project_node_create, ProjectNodeCreate) + assert project_node_create.node_id == node_id + + # Node -> ProjectNode (from simcore_postgres_database) using adapters + project_node = _nodes_models_adapters.project_node_from_node( + node, + node_id, + created=faker.date_time(), + modified=faker.date_time(), + ) + + assert isinstance(project_node, ProjectNode) + assert project_node.node_id == node_id + assert project_node.created != project_node.modified + assert project_node_create.node_id == node_id + + # ProjectNodeCreate -> Node (from models_library) using adapters + assert ( + _nodes_models_adapters.node_from_project_node_create(project_node_create) + == node + ) + + # ProjectNode -> Node (from models_library) using adapters + assert _nodes_models_adapters.node_from_project_node(project_node) == node diff --git a/services/web/server/tests/unit/isolated/test_projects_utils.py b/services/web/server/tests/unit/isolated/test_projects_utils.py index 0178882d760d..1dffd70ad633 100644 --- a/services/web/server/tests/unit/isolated/test_projects_utils.py +++ b/services/web/server/tests/unit/isolated/test_projects_utils.py @@ -22,6 +22,7 @@ "test_data_file_name", [ "fake-project.json", + "fake-project-with-conversation.json", "fake-template-projects.hack08.notebooks.json", "fake-template-projects.isan.2dplot.json", "fake-template-projects.isan.matward.json", @@ -51,6 +52,13 @@ def test_clone_project_document( for clone_node_id in clone["workbench"]: assert clone_node_id not in node_ids + # checks no conversation have been copied + if "ui" in clone and "annotations" in clone["ui"]: + assert not any( + annotation["type"] == "conversation" + for annotation in clone["ui"]["annotations"].values() + ) + # Here we do not use anymore jsonschema.validator since ... # # "OpenAPI 3.0 does not have an explicit null type as in JSON Schema, but you can use nullable: diff --git a/services/web/server/tests/unit/isolated/test_rest.py b/services/web/server/tests/unit/isolated/test_rest.py index 02094f11dc29..1bc502a3e976 100644 --- a/services/web/server/tests/unit/isolated/test_rest.py +++ b/services/web/server/tests/unit/isolated/test_rest.py @@ -9,7 +9,7 @@ import pytest from aiohttp import web from aiohttp.test_utils import TestClient -from pytest_mock import MockerFixture +from pytest_mock import MockerFixture, MockType from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.aiohttp import status @@ -26,6 +26,7 @@ async def client( api_version_prefix: str, mock_env_devel_environment: EnvVarsDict, mock_env_deployer_pipeline: EnvVarsDict, + mocked_db_setup_in_setup_security: MockType, ) -> TestClient: app = create_safe_application() diff --git a/services/web/server/tests/unit/isolated/test_security__authz.py b/services/web/server/tests/unit/isolated/test_security__authz.py index 2fb280cf25f9..e22d2e91fd2c 100644 --- a/services/web/server/tests/unit/isolated/test_security__authz.py +++ b/services/web/server/tests/unit/isolated/test_security__authz.py @@ -28,8 +28,8 @@ ROLES_PERMISSIONS, UserRole, ) -from simcore_service_webserver.security._authz_db import AuthInfoDict from simcore_service_webserver.security._authz_policy import AuthorizationPolicy +from simcore_service_webserver.security._authz_repository import ActiveUserIdAndRole @pytest.fixture @@ -90,6 +90,38 @@ def _can_update_inputs(context): return RoleBasedAccessModel.from_rawdata(fake_roles_permissions) +async def test_operation_in_role_check(access_model: RoleBasedAccessModel): + """Tests the branch where operation is in role_access.check in the can method""" + R = UserRole # alias + + # The "study.pipeline.node.inputs.update" operation has a check function in ANONYMOUS role + + # Test with proper context + current_data = {"workbench": {}} + candidate_data = {"workbench": {}} # no changes + context = {"current": current_data, "candidate": candidate_data} + + assert await access_model.can( + R.ANONYMOUS, "study.pipeline.node.inputs.update", context=context + ) + + # Test with invalid context that would make the check function fail + invalid_context = {"wrong_key": "value"} # missing expected keys + assert not await access_model.can( + R.ANONYMOUS, "study.pipeline.node.inputs.update", context=invalid_context + ) + + # Test with None context (should fail safely) + assert not await access_model.can( + R.ANONYMOUS, "study.pipeline.node.inputs.update", context=None + ) + + # Test inheritance - USER role inherits ANONYMOUS role's check function + assert await access_model.can( + R.USER, "study.pipeline.node.inputs.update", context=context + ) + + def test_unique_permissions(): used = [] for role in ROLES_PERMISSIONS: @@ -226,31 +258,19 @@ async def test_check_access_expressions(access_model: RoleBasedAccessModel): assert await has_access_by_role(access_model, R.ANONYMOUS, "study.stop") - assert await has_access_by_role( - access_model, R.ANONYMOUS, "study.stop |study.node.create" - ) - - assert not await has_access_by_role( - access_model, R.ANONYMOUS, "study.stop & study.node.create" - ) - - assert await has_access_by_role( - access_model, R.USER, "study.stop & study.node.create" - ) - @pytest.fixture def mock_db(mocker: MockerFixture) -> MagicMock: mocker.patch( - "simcore_service_webserver.security._authz_policy.get_database_engine", + "simcore_service_webserver.security._authz_policy.get_async_engine", autospec=True, return_value="FAKE-ENGINE", ) - users_db: dict[str, AuthInfoDict] = { - "foo@email.com": AuthInfoDict(id=1, role=UserRole.GUEST), - "bar@email.com": AuthInfoDict(id=55, role=UserRole.GUEST), + users_db: dict[str, ActiveUserIdAndRole] = { + "foo@email.com": ActiveUserIdAndRole(id=1, role=UserRole.GUEST), + "bar@email.com": ActiveUserIdAndRole(id=55, role=UserRole.GUEST), } async def _fake_db(engine, email): @@ -263,7 +283,7 @@ async def _fake_db(engine, email): return copy.deepcopy(users_db.get(email)) mock_db_fun = mocker.patch( - "simcore_service_webserver.security._authz_policy.get_active_user_or_none", + "simcore_service_webserver.security._authz_policy._authz_repository.get_active_user_or_none", autospec=True, side_effect=_fake_db, ) @@ -280,22 +300,23 @@ async def test_authorization_policy_cache(mocker: MockerFixture, mock_db: MagicM # cache under test # pylint: disable=no-member - autz_cache: BaseCache = authz_policy._get_auth_or_none.cache + autz_cache: BaseCache = authz_policy._get_authorized_user_or_none.cache - assert not (await autz_cache.exists("_get_auth_or_none/foo@email.com")) + assert not (await autz_cache.exists("_get_authorized_user_or_none/foo@email.com")) for _ in range(3): - got = await authz_policy._get_auth_or_none(email="foo@email.com") + got = await authz_policy._get_authorized_user_or_none(email="foo@email.com") assert mock_db.call_count == 1 assert got["id"] == 1 - assert await autz_cache.exists("_get_auth_or_none/foo@email.com") + assert await autz_cache.exists("_get_authorized_user_or_none/foo@email.com") # new value in db mock_db.users_db["foo@email.com"]["id"] = 2 - assert (await autz_cache.get("_get_auth_or_none/foo@email.com"))["id"] == 1 + got = await autz_cache.get("_get_authorized_user_or_none/foo@email.com") + assert got["id"] == 1 # gets cache, db is NOT called - got = await authz_policy._get_auth_or_none(email="foo@email.com") + got = await authz_policy._get_authorized_user_or_none(email="foo@email.com") assert mock_db.call_count == 1 assert got["id"] == 1 @@ -303,19 +324,79 @@ async def test_authorization_policy_cache(mocker: MockerFixture, mock_db: MagicM await authz_policy.clear_cache() # gets new value - got = await authz_policy._get_auth_or_none(email="foo@email.com") + got = await authz_policy._get_authorized_user_or_none(email="foo@email.com") assert mock_db.call_count == 2 assert got["id"] == 2 # other email has other key - assert not (await autz_cache.exists("_get_auth_or_none/bar@email.com")) + assert not (await autz_cache.exists("_get_authorized_user_or_none/bar@email.com")) for _ in range(4): # NOTE: None - assert await authz_policy._get_auth_or_none(email="bar@email.com") - assert await autz_cache.exists("_get_auth_or_none/bar@email.com") + assert await authz_policy._get_authorized_user_or_none(email="bar@email.com") + assert await autz_cache.exists("_get_authorized_user_or_none/bar@email.com") assert mock_db.call_count == 3 # should raise web.HTTPServiceUnavailable on db failure with pytest.raises(web.HTTPServiceUnavailable): - await authz_policy._get_auth_or_none(email="db-failure@email.com") + await authz_policy._get_authorized_user_or_none(email="db-failure@email.com") + + +async def test_operation_with_check_callbacks(access_model: RoleBasedAccessModel): + """Tests operations with different types of check callbacks""" + R = UserRole # alias + + # Add a synchronous check callback + def sync_check(context) -> bool: + return context.get("allowed", False) if context else False + + # Add an async check callback + async def async_check(context) -> bool: + return context.get("allowed", False) if context else False + + # Add a callback that raises an exception + def failing_check(context) -> bool: + raise ValueError("This check always fails") + + # Register the callbacks for different operations + access_model.roles[R.USER].check["operation.sync.check"] = sync_check + access_model.roles[R.USER].check["operation.async.check"] = async_check + access_model.roles[R.USER].check["operation.failing.check"] = failing_check + + # Test synchronous check callback + assert await access_model.can( + R.USER, "operation.sync.check", context={"allowed": True} + ) + assert not await access_model.can( + R.USER, "operation.sync.check", context={"allowed": False} + ) + assert not await access_model.can(R.USER, "operation.sync.check", context=None) + + # Test asynchronous check callback + assert await access_model.can( + R.USER, "operation.async.check", context={"allowed": True} + ) + assert not await access_model.can( + R.USER, "operation.async.check", context={"allowed": False} + ) + + # Test exception handling in check callback + assert not await access_model.can( + R.USER, "operation.failing.check", context={"allowed": True} + ) + + # Test inheritance of checked operations + assert await access_model.can( + R.TESTER, "operation.sync.check", context={"allowed": True} + ) + assert not await access_model.can( + R.ANONYMOUS, "operation.sync.check", context={"allowed": True} + ) + + # Test who_can with checked operations + who_can = await access_model.who_can( + "operation.sync.check", context={"allowed": True} + ) + assert R.USER in who_can + assert R.TESTER in who_can + assert R.ANONYMOUS not in who_can diff --git a/services/web/server/tests/unit/isolated/test_security_api_utils.py b/services/web/server/tests/unit/isolated/test_security_service.py similarity index 74% rename from services/web/server/tests/unit/isolated/test_security_api_utils.py rename to services/web/server/tests/unit/isolated/test_security_service.py index 4ac895f67504..8f441df9d9cf 100644 --- a/services/web/server/tests/unit/isolated/test_security_api_utils.py +++ b/services/web/server/tests/unit/isolated/test_security_service.py @@ -11,22 +11,22 @@ from hypothesis import given from hypothesis import strategies as st from passlib.hash import sha256_crypt -from simcore_service_webserver.security.api import check_password, encrypt_password +from simcore_service_webserver.security import security_service def test_encrypt_password_returns_string(): - assert isinstance(encrypt_password("password"), str) + assert isinstance(security_service.encrypt_password("password"), str) def test_encrypt_password_returns_valid_sha256_hash(): password = "password" - hashed_password = encrypt_password(password) - assert check_password(password, hashed_password) + hashed_password = security_service.encrypt_password(password) + assert security_service.check_password(password, hashed_password) def test_encrypt_password_raises_type_error_for_non_string_input(): with pytest.raises(TypeError): - encrypt_password(123) + security_service.encrypt_password(123) @given( @@ -44,4 +44,4 @@ def test_encrypt_decrypt_deprecated_and_new_method_return_same_values(password: hashed_password_old = sha256_crypt.hash(password, rounds=1000, salt=salt) assert hashed_password_new == hashed_password_old - assert check_password(password, hashed_password_new) + assert security_service.check_password(password, hashed_password_new) diff --git a/services/web/server/tests/unit/isolated/test_security_api.py b/services/web/server/tests/unit/isolated/test_security_web.py similarity index 95% rename from services/web/server/tests/unit/isolated/test_security_api.py rename to services/web/server/tests/unit/isolated/test_security_web.py index 83751e25695a..166e0b025066 100644 --- a/services/web/server/tests/unit/isolated/test_security_api.py +++ b/services/web/server/tests/unit/isolated/test_security_web.py @@ -18,7 +18,7 @@ from models_library.emails import LowerCaseEmailStr from models_library.products import ProductName from pydantic import TypeAdapter -from pytest_mock import MockerFixture +from pytest_mock import MockerFixture, MockType from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.aiohttp import status from simcore_postgres_database.models.products import LOGIN_SETTINGS_DEFAULT, products @@ -30,14 +30,14 @@ discover_product_middleware, ) from simcore_service_webserver.products.models import Product -from simcore_service_webserver.security.api import ( +from simcore_service_webserver.security.decorators import permission_required +from simcore_service_webserver.security.plugin import setup_security +from simcore_service_webserver.security.security_service import clean_auth_policy_cache +from simcore_service_webserver.security.security_web import ( check_user_authorized, - clean_auth_policy_cache, forget_identity, remember_identity, ) -from simcore_service_webserver.security.decorators import permission_required -from simcore_service_webserver.security.plugin import setup_security from simcore_service_webserver.session.settings import SessionSettings # Prototype concept ------------------------------------------------------- @@ -214,6 +214,7 @@ async def client( ], app_routes: RouteTableDef, mock_env_devel_environment: EnvVarsDict, + mocked_db_setup_in_setup_security: MockType, ): app = web.Application() app.router.add_routes(app_routes) @@ -227,7 +228,9 @@ async def client( return_value=session_settings, ) + assert not mocked_db_setup_in_setup_security.called setup_security(app) + assert mocked_db_setup_in_setup_security.called # mocks 'setup_products': patch to avoid database set_products_in_app_state(app, app_products) @@ -243,7 +246,7 @@ async def basic_db_funs_mocked(client: TestClient, mocker: MockerFixture) -> Non await clean_auth_policy_cache(client.app) mocker.patch( - "simcore_service_webserver.security._authz_policy.get_database_engine", + "simcore_service_webserver.security._authz_policy.get_async_engine", autospec=True, ) @@ -269,7 +272,7 @@ def get_active_user_or_none_dbmock( basic_db_funs_mocked: None, mocker: MockerFixture ) -> MagicMock: return mocker.patch( - "simcore_service_webserver.security._authz_policy.get_active_user_or_none", + "simcore_service_webserver.security._authz_policy._authz_repository.get_active_user_or_none", autospec=True, return_value={"email": "foo@email.com", "id": 1, "role": UserRole.ADMIN}, ) @@ -280,7 +283,7 @@ def is_user_in_product_name_dbmock( basic_db_funs_mocked: None, mocker: MockerFixture ) -> MagicMock: return mocker.patch( - "simcore_service_webserver.security._authz_policy.is_user_in_product_name", + "simcore_service_webserver.security._authz_policy._authz_repository.is_user_in_product_name", autospec=True, return_value=True, ) diff --git a/services/web/server/tests/unit/isolated/test_studies_dispatcher_models.py b/services/web/server/tests/unit/isolated/test_studies_dispatcher_models.py index 245709164634..d1de084671db 100644 --- a/services/web/server/tests/unit/isolated/test_studies_dispatcher_models.py +++ b/services/web/server/tests/unit/isolated/test_studies_dispatcher_models.py @@ -9,20 +9,38 @@ from urllib.parse import parse_qs import pytest +import simcore_service_webserver.studies_dispatcher from aiohttp.test_utils import make_mocked_request from models_library.utils.pydantic_tools_extension import parse_obj_or_none -from pydantic import ByteSize, TypeAdapter +from pydantic import BaseModel, ByteSize, TypeAdapter +from pytest_simcore.pydantic_models import ( + assert_validation_model, + walk_model_examples_in_package, +) from servicelib.aiohttp.requests_validation import parse_request_query_parameters_as +from simcore_service_webserver.studies_dispatcher._controller.rest.redirects_schemas import ( + FileQueryParams, + ServiceAndFileParams, +) from simcore_service_webserver.studies_dispatcher._models import ( FileParams, ServiceParams, ) -from simcore_service_webserver.studies_dispatcher._redirects_handlers import ( - FileQueryParams, - ServiceAndFileParams, -) from yarl import URL + +@pytest.mark.parametrize( + "model_cls, example_name, example_data", + walk_model_examples_in_package(simcore_service_webserver.studies_dispatcher), +) +def test_model_examples( + model_cls: type[BaseModel], example_name: str, example_data: Any +): + assert_validation_model( + model_cls, example_name=example_name, example_data=example_data + ) + + _SIZEBYTES = TypeAdapter(ByteSize).validate_python("3MiB") # SEE https://github.com/ITISFoundation/osparc-simcore/issues/3951#issuecomment-1489992645 @@ -79,9 +97,7 @@ def test_download_link_validators_2(file_and_service_params: dict[str, Any]): assert params.download_link assert params.download_link.host - assert params.download_link.host.endswith( - "s3.amazonaws.com" - ) + assert params.download_link.host.endswith("s3.amazonaws.com") query = parse_qs(params.download_link.query) assert {"AWSAccessKeyId", "Signature", "Expires", "x-amz-request-payer"} == set( diff --git a/services/web/server/tests/unit/isolated/test_studies_dispatcher_projects_permalinks.py b/services/web/server/tests/unit/isolated/test_studies_dispatcher_projects_permalinks.py index 9a182352efad..384041df0bd9 100644 --- a/services/web/server/tests/unit/isolated/test_studies_dispatcher_projects_permalinks.py +++ b/services/web/server/tests/unit/isolated/test_studies_dispatcher_projects_permalinks.py @@ -39,7 +39,6 @@ def app_environment( **env_devel_dict, "WEBSERVER_ACTIVITY": "null", "WEBSERVER_CATALOG": "null", - "WEBSERVER_CLUSTERS": "false", "WEBSERVER_NOTIFICATIONS": "0", "WEBSERVER_DIAGNOSTICS": "null", "WEBSERVER_DIRECTOR_V2": "null", @@ -92,7 +91,9 @@ def test_create_permalink(fake_get_project_request: web.Request, is_public: bool project_uuid: str = fake_get_project_request.match_info["project_uuid"] permalink = create_permalink_for_study( - fake_get_project_request, + fake_get_project_request.app, + request_url=fake_get_project_request.url, + request_headers=dict(fake_get_project_request.headers), project_uuid=project_uuid, project_type=ProjectType.TEMPLATE, project_access_rights={"1": {"read": True, "write": False, "delete": False}}, @@ -120,7 +121,9 @@ def test_permalink_only_for_template_projects( ): with pytest.raises(PermalinkNotAllowedError): create_permalink_for_study( - fake_get_project_request, + fake_get_project_request.app, + request_url=fake_get_project_request.url, + request_headers=dict(fake_get_project_request.headers), **{**valid_project_kwargs, "project_type": ProjectType.STANDARD} ) @@ -130,7 +133,9 @@ def test_permalink_only_when_read_access_to_everyone( ): with pytest.raises(PermalinkNotAllowedError): create_permalink_for_study( - fake_get_project_request, + fake_get_project_request.app, + request_url=fake_get_project_request.url, + request_headers=dict(fake_get_project_request.headers), **{ **valid_project_kwargs, "project_access_rights": { @@ -141,7 +146,9 @@ def test_permalink_only_when_read_access_to_everyone( with pytest.raises(PermalinkNotAllowedError): create_permalink_for_study( - fake_get_project_request, + fake_get_project_request.app, + request_url=fake_get_project_request.url, + request_headers=dict(fake_get_project_request.headers), **{ **valid_project_kwargs, "project_access_rights": { diff --git a/services/web/server/tests/unit/isolated/test_user_notifications.py b/services/web/server/tests/unit/isolated/test_user_notifications.py index b8b1d3e06fde..a450600ba8c2 100644 --- a/services/web/server/tests/unit/isolated/test_user_notifications.py +++ b/services/web/server/tests/unit/isolated/test_user_notifications.py @@ -4,7 +4,7 @@ import pytest from models_library.users import UserID -from simcore_service_webserver.users._notifications import ( +from simcore_service_webserver.user_notifications._models import ( NotificationCategory, UserNotification, UserNotificationCreate, @@ -12,9 +12,7 @@ ) -@pytest.mark.parametrize( - "raw_data", UserNotification.model_config["json_schema_extra"]["examples"] -) +@pytest.mark.parametrize("raw_data", UserNotification.model_json_schema()["examples"]) def test_user_notification(raw_data: dict[str, Any]): assert UserNotification.model_validate(raw_data) diff --git a/services/web/server/tests/unit/isolated/test_users_models.py b/services/web/server/tests/unit/isolated/test_users_models.py index e61f543e2113..48b621c51f5e 100644 --- a/services/web/server/tests/unit/isolated/test_users_models.py +++ b/services/web/server/tests/unit/isolated/test_users_models.py @@ -10,23 +10,23 @@ import pytest from faker import Faker from models_library.api_schemas_webserver.users import ( - MyProfileGet, - MyProfilePatch, MyProfilePrivacyGet, + MyProfileRestGet, + MyProfileRestPatch, ) from models_library.generics import Envelope from models_library.utils.fastapi_encoders import jsonable_encoder from servicelib.rest_constants import RESPONSE_MODEL_POLICY from simcore_postgres_database import utils_users -from simcore_service_webserver.users._common.models import ToUserUpdateDB +from simcore_service_webserver.users._models import UserModelAdapter @pytest.fixture -def fake_profile_get(faker: Faker) -> MyProfileGet: +def fake_profile_get(faker: Faker) -> MyProfileRestGet: fake_profile: dict[str, Any] = faker.simple_profile() first, last = fake_profile["name"].rsplit(maxsplit=1) - return MyProfileGet( + return MyProfileRestGet( id=faker.pyint(), first_name=first, last_name=last, @@ -40,7 +40,7 @@ def fake_profile_get(faker: Faker) -> MyProfileGet: ) -def test_profile_get_expiration_date(fake_profile_get: MyProfileGet): +def test_profile_get_expiration_date(fake_profile_get: MyProfileRestGet): fake_expiration = datetime.now(UTC) profile = fake_profile_get.model_copy( @@ -53,7 +53,7 @@ def test_profile_get_expiration_date(fake_profile_get: MyProfileGet): assert body["expirationDate"] == fake_expiration.date().isoformat() -def test_auto_compute_gravatar__deprecated(fake_profile_get: MyProfileGet): +def test_auto_compute_gravatar__deprecated(fake_profile_get: MyProfileRestGet): profile = fake_profile_get.model_copy() @@ -62,7 +62,7 @@ def test_auto_compute_gravatar__deprecated(fake_profile_get: MyProfileGet): assert ( "gravatar_id" not in data - ), f"{dict(MyProfileGet.model_fields)['gravatar_id'].deprecated=}" + ), f"{dict(MyProfileRestGet.model_fields)['gravatar_id'].deprecated=}" assert data["id"] == profile.id assert data["first_name"] == profile.first_name assert data["last_name"] == profile.last_name @@ -116,13 +116,13 @@ def test_parsing_output_of_get_user_profile(): }, } - profile = MyProfileGet.model_validate(result_from_db_query_and_composition) + profile = MyProfileRestGet.model_validate(result_from_db_query_and_composition) assert "password" not in profile.model_dump(exclude_unset=True) def test_mapping_update_models_from_rest_to_db(): - profile_update = MyProfilePatch.model_validate( + profile_update = MyProfileRestPatch.model_validate( # request payload { "first_name": "foo", @@ -132,10 +132,10 @@ def test_mapping_update_models_from_rest_to_db(): ) # to db - profile_update_db = ToUserUpdateDB.from_api(profile_update) + profile_update_db = UserModelAdapter.from_rest_schema_model(profile_update) # expected - assert profile_update_db.to_db() == { + assert profile_update_db.to_db_values() == { "first_name": "foo", "name": "foo1234", "privacy_hide_fullname": False, @@ -146,7 +146,7 @@ def test_mapping_update_models_from_rest_to_db(): def test_utils_user_generates_valid_myprofile_patch(): username = utils_users._generate_username_from_email("xi@email.com") # noqa: SLF001 - MyProfilePatch.model_validate({"userName": username}) - MyProfilePatch.model_validate( + MyProfileRestPatch.model_validate({"userName": username}) + MyProfileRestPatch.model_validate( {"userName": utils_users.generate_alternative_username(username)} ) diff --git a/services/web/server/tests/unit/isolated/test_utils.py b/services/web/server/tests/unit/isolated/test_utils.py index d4017a79d8c3..a7abf976fd3d 100644 --- a/services/web/server/tests/unit/isolated/test_utils.py +++ b/services/web/server/tests/unit/isolated/test_utils.py @@ -1,3 +1,5 @@ +import asyncio +import contextlib import time import urllib.parse from datetime import datetime @@ -5,6 +7,7 @@ from simcore_service_webserver.utils import ( DATETIME_FORMAT, compose_support_error_msg, + get_task_info, now_str, to_datetime, ) @@ -70,3 +73,74 @@ def test_compose_support_error_msg(): msg == "First sentence for Mr.X. Second sentence." " For more information please forward this message to support@email.com (supportID=OEC:139641204989600)" ) + + +async def test_get_task_info(): + """Test get_task_info function with asyncio tasks""" + + async def dummy_task(): + await asyncio.sleep(0.1) + return "task_result" + + # Create a named task + task = asyncio.create_task(dummy_task(), name="test_task") + + task_info = get_task_info(task) + + # Check that task_info is a dictionary + assert isinstance(task_info, dict) + + # Check that it contains expected keys from TaskInfoDict + expected_keys = {"txt", "type", "done", "cancelled", "stack", "exception"} + assert all(key in task_info for key in expected_keys) + + # Check basic types + assert isinstance(task_info["txt"], str) + assert isinstance(task_info["type"], str) + assert isinstance(task_info["done"], bool) + assert isinstance(task_info["cancelled"], bool) + assert isinstance(task_info["stack"], list) + + # Check that task name is in the txt representation + assert "test_task" in task_info["txt"] + + # Check that stack contains frame info when task is running + if not task_info["done"]: + assert len(task_info["stack"]) > 0 + # Check stack frame structure + for frame_info in task_info["stack"]: + assert "f_code" in frame_info + assert "f_lineno" in frame_info + assert isinstance(frame_info["f_code"], str) + assert isinstance(frame_info["f_lineno"], str) + + # Clean up + task.cancel() + with contextlib.suppress(asyncio.CancelledError): + await task + + +async def test_get_task_info_unnamed_task(): + """Test get_task_info function with unnamed tasks""" + + async def dummy_task(): + await asyncio.sleep(0.1) + + # Create an unnamed task + task = asyncio.create_task(dummy_task()) + + task_info = get_task_info(task) + + # Check basic structure + assert isinstance(task_info, dict) + expected_keys = {"txt", "type", "done", "cancelled", "stack", "exception"} + assert all(key in task_info for key in expected_keys) + + # Check that txt contains task representation + assert isinstance(task_info["txt"], str) + assert "Task" in task_info["txt"] + + # Clean up + task.cancel() + with contextlib.suppress(asyncio.CancelledError): + await task diff --git a/services/web/server/tests/unit/with_dbs/01/groups/test_groups_handlers_crud.py b/services/web/server/tests/unit/with_dbs/01/groups/test_groups_handlers_crud.py index 74aa021ddb65..45a4e38c8acc 100644 --- a/services/web/server/tests/unit/with_dbs/01/groups/test_groups_handlers_crud.py +++ b/services/web/server/tests/unit/with_dbs/01/groups/test_groups_handlers_crud.py @@ -12,11 +12,11 @@ from models_library.api_schemas_webserver.groups import GroupGet, MyGroupsGet from pydantic import TypeAdapter from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import UserInfoDict from pytest_simcore.helpers.webserver_parametrizations import ( ExpectedResponse, standard_role_response, ) +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.users import UserRole from simcore_service_webserver._meta import API_VTAG @@ -59,7 +59,7 @@ async def test_list_user_groups_and_try_modify_organizations( ): assert client.app assert logged_user["id"] != standard_groups_owner["id"] - assert logged_user["role"] == user_role.value + assert logged_user["role"] == user_role # List all groups (organizations, primary, everyone and products) I belong to url = client.app.router["list_groups"].url_for() @@ -130,7 +130,7 @@ async def test_group_creation_workflow( ): assert client.app assert logged_user["id"] != 0 - assert logged_user["role"] == user_role.value + assert logged_user["role"] == user_role url = client.app.router["create_group"].url_for() new_group_data = { diff --git a/services/web/server/tests/unit/with_dbs/01/groups/test_groups_handlers_users.py b/services/web/server/tests/unit/with_dbs/01/groups/test_groups_handlers_users.py index 7661d74443e3..6ea4bfdd3747 100644 --- a/services/web/server/tests/unit/with_dbs/01/groups/test_groups_handlers_users.py +++ b/services/web/server/tests/unit/with_dbs/01/groups/test_groups_handlers_users.py @@ -14,11 +14,12 @@ from models_library.groups import AccessRightsDict, Group, StandardGroupCreate from pydantic import TypeAdapter from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import LoggedUser, NewUser, UserInfoDict +from pytest_simcore.helpers.webserver_login import LoggedUser from pytest_simcore.helpers.webserver_parametrizations import ( ExpectedResponse, standard_role_response, ) +from pytest_simcore.helpers.webserver_users import NewUser, UserInfoDict from servicelib.aiohttp import status from servicelib.status_codes_utils import is_2xx_success from simcore_postgres_database.models.users import UserRole @@ -32,7 +33,7 @@ delete_standard_group, ) from simcore_service_webserver.groups.api import auto_add_user_to_groups -from simcore_service_webserver.security.api import clean_auth_policy_cache +from simcore_service_webserver.security import security_service def _assert_group(group: dict[str, str]): @@ -213,9 +214,11 @@ async def test_add_remove_users_from_group( expected_user, expected_access_rigths, user, - group_owner_id=the_owner["id"] - if expected_user.get("is_private", False) - else user["id"], + group_owner_id=( + the_owner["id"] + if expected_user.get("is_private", False) + else user["id"] + ), ) # PATCH the user and REMOVE them from the group @@ -456,7 +459,7 @@ async def test_add_user_gets_added_to_group( assert len(data["organizations"]) == (0 if "bad" in email else 1) # NOTE: here same email are used for different users! Therefore sessions get mixed! - await clean_auth_policy_cache(client.app) + await security_service.clean_auth_policy_cache(client.app) @pytest.fixture @@ -550,7 +553,7 @@ async def test_create_organization_and_add_users( ): assert client.app assert logged_user["id"] != 0 - assert logged_user["role"] == user_role.value + assert logged_user["role"] == user_role # CREATE GROUP url = client.app.router["create_group"].url_for() diff --git a/services/web/server/tests/unit/with_dbs/01/groups/test_groups_repository.py b/services/web/server/tests/unit/with_dbs/01/groups/test_groups_repository.py new file mode 100644 index 000000000000..91f1aa485116 --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/01/groups/test_groups_repository.py @@ -0,0 +1,74 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + +from collections.abc import AsyncGenerator, Callable, Coroutine +from typing import Any + +import pytest +from aiohttp.test_utils import TestClient +from models_library.groups import GroupMember, StandardGroupCreate +from pytest_simcore.helpers.webserver_users import UserInfoDict +from simcore_postgres_database.models.users import UserRole +from simcore_service_webserver.groups import _groups_repository + + +@pytest.fixture +async def create_test_group( + client: TestClient, logged_user: UserInfoDict +) -> AsyncGenerator[Callable[..., Coroutine[Any, Any, Any]], None]: + """Fixture that creates a standard group and ensures cleanup.""" + created_groups = [] + + async def _create_group( + name: str = "Test Group", + description: str = "A test group", + thumbnail: str | None = None, + ): + group, _ = await _groups_repository.create_standard_group( + app=client.app, + user_id=logged_user["id"], + create=StandardGroupCreate( + name=name, + description=description, + thumbnail=thumbnail, + ), + ) + created_groups.append(group) + return group + + yield _create_group + + # Cleanup all created groups + for group in created_groups: + await _groups_repository.delete_standard_group( + app=client.app, user_id=logged_user["id"], group_id=group.gid + ) + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_list_users_in_group_owner_only( + client: TestClient, + user_role: UserRole, + logged_user: UserInfoDict, + create_test_group: Callable[..., Coroutine[Any, Any, Any]], +): + """Test list_users_in_group returns only the owner for a new group.""" + assert client.app + + # Create a standard group + group = await create_test_group( + name="Test Owner Only Group", + description="A test group to check owner-only user list", + ) + + # List users in the group - should only contain the owner + users_in_group = await _groups_repository.list_users_in_group( + app=client.app, group_id=group.gid + ) + + # Should contain exactly one user (the owner) + assert len(users_in_group) == 1 + assert isinstance(users_in_group[0], GroupMember) + assert users_in_group[0].id == logged_user["id"] diff --git a/services/web/server/tests/unit/with_dbs/01/storage/conftest.py b/services/web/server/tests/unit/with_dbs/01/storage/conftest.py index 051a2d234237..c8dcb2aac7e1 100644 --- a/services/web/server/tests/unit/with_dbs/01/storage/conftest.py +++ b/services/web/server/tests/unit/with_dbs/01/storage/conftest.py @@ -16,7 +16,7 @@ from faker import Faker from fastapi import APIRouter, Depends, FastAPI, Request, status from fastapi_pagination import add_pagination, create_page -from fastapi_pagination.cursor import CursorPage, CursorParams +from fastapi_pagination.cursor import CursorParams from models_library.api_schemas_storage.storage_schemas import ( DatasetMetaDataGet, FileLocation, @@ -35,6 +35,7 @@ from pydantic import AnyUrl, TypeAdapter from pytest_simcore.helpers.logging_tools import log_context from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from servicelib.fastapi.rest_pagination import CustomizedPathsCursorPage from servicelib.utils import unused_port from yarl import URL @@ -74,7 +75,7 @@ async def _list_storage_locations(user_id: UserID, request: Request): @router.get( "/locations/{location_id}/paths", - response_model=CursorPage[PathMetaDataGet], + response_model=CustomizedPathsCursorPage[PathMetaDataGet], ) async def _list_paths( page_params: Annotated[CursorParams, Depends()], diff --git a/services/web/server/tests/unit/with_dbs/01/storage/test_storage.py b/services/web/server/tests/unit/with_dbs/01/storage/test_storage.py index dd59712ed7b6..835b38f3bdb1 100644 --- a/services/web/server/tests/unit/with_dbs/01/storage/test_storage.py +++ b/services/web/server/tests/unit/with_dbs/01/storage/test_storage.py @@ -11,7 +11,6 @@ import pytest from aiohttp.test_utils import TestClient from faker import Faker -from fastapi_pagination.cursor import CursorPage from models_library.api_schemas_long_running_tasks.tasks import ( TaskGet, TaskResult, @@ -54,8 +53,9 @@ from pydantic import TypeAdapter from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status +from servicelib.fastapi.rest_pagination import CustomizedPathsCursorPage from servicelib.rabbitmq.rpc_interfaces.async_jobs import async_jobs from servicelib.rabbitmq.rpc_interfaces.async_jobs.async_jobs import ( submit, @@ -138,7 +138,7 @@ async def test_list_storage_paths( resp = await client.get(f"{url}") data, error = await assert_status(resp, expected) if not error: - TypeAdapter(CursorPage[PathMetaDataGet]).validate_python(data) + TypeAdapter(CustomizedPathsCursorPage[PathMetaDataGet]).validate_python(data) _faker = Faker() @@ -485,7 +485,7 @@ async def test_export_data( ) _body = DataExportPost( - paths=[f"{faker.uuid4()}/{faker.uuid4()}/{faker.file_name()}"] + paths=[Path(f"{faker.uuid4()}/{faker.uuid4()}/{faker.file_name()}")] ) response = await client.post( f"/{API_VERSION}/storage/locations/0/export-data", data=_body.model_dump_json() diff --git a/services/web/server/tests/unit/with_dbs/01/storage/test_storage_handlers.py b/services/web/server/tests/unit/with_dbs/01/storage/test_storage_handlers.py index ef9724704c64..74772386de18 100644 --- a/services/web/server/tests/unit/with_dbs/01/storage/test_storage_handlers.py +++ b/services/web/server/tests/unit/with_dbs/01/storage/test_storage_handlers.py @@ -18,7 +18,7 @@ from pytest_mock import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp.rest_responses import wrap_as_envelope from simcore_postgres_database.models.users import UserRole diff --git a/services/web/server/tests/unit/with_dbs/01/test_api_keys.py b/services/web/server/tests/unit/with_dbs/01/test_api_keys.py index 0793762fada8..0f03fad59fb4 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_api_keys.py +++ b/services/web/server/tests/unit/with_dbs/01/test_api_keys.py @@ -4,21 +4,24 @@ # pylint: disable=too-many-arguments import asyncio -from collections.abc import AsyncIterable +from collections.abc import AsyncIterable, Awaitable, Callable from datetime import timedelta from http import HTTPStatus -from http.client import HTTPException import pytest import tenacity from aiohttp.test_utils import TestClient from faker import Faker +from models_library.auth import ( + API_KEY_AUTOGENERATED_DISPLAY_NAME_PREFIX, + API_KEY_AUTOGENERATED_KEY_PREFIX, +) from models_library.products import ProductName from pytest_mock import MockerFixture, MockType from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from simcore_service_webserver.api_keys import _repository, api_keys_service from simcore_service_webserver.api_keys.models import ApiKey @@ -35,35 +38,50 @@ @pytest.fixture -async def fake_user_api_keys( +async def fake_api_key_factory( client: TestClient, logged_user: UserInfoDict, osparc_product_name: ProductName, faker: Faker, -) -> AsyncIterable[list[ApiKey]]: +) -> AsyncIterable[Callable[..., Awaitable[ApiKey]]]: assert client.app - api_keys: list[ApiKey] = [ - await _repository.create_api_key( + created_keys: list[tuple[ApiKey, ProductName]] = [] + + async def _create( + *, + product_name: ProductName | None = None, + display_name: str | None = None, + expiration=None, + api_key: str | None = None, + api_secret: str | None = None, + ) -> ApiKey: + final_product_name = product_name or osparc_product_name + final_display_name = display_name or faker.pystr() + final_api_key = api_key or faker.pystr() + final_api_secret = api_secret or faker.pystr() + + created_key = await _repository.create_api_key( client.app, user_id=logged_user["id"], - product_name=osparc_product_name, - display_name=faker.pystr(), - expiration=None, - api_key=faker.pystr(), - api_secret=faker.pystr(), + product_name=final_product_name, + display_name=final_display_name, + expiration=expiration, + api_key=final_api_key, + api_secret=final_api_secret, ) - for _ in range(5) - ] - yield api_keys + created_keys.append((created_key, final_product_name)) + return created_key + + yield _create - for api_key in api_keys: + for api_key, product_name in created_keys: await _repository.delete_api_key( client.app, api_key_id=api_key.id, user_id=logged_user["id"], - product_name=osparc_product_name, + product_name=product_name, ) @@ -86,13 +104,54 @@ def _get_user_access_parametrizations(expected_authed_status_code): async def test_list_api_keys( disabled_setup_garbage_collector: MockType, client: TestClient, + fake_api_key_factory: Callable[..., Awaitable[ApiKey]], logged_user: UserInfoDict, user_role: UserRole, expected: HTTPStatus, ): + fake_api_keys = [await fake_api_key_factory() for _ in range(10)] + resp = await client.get("/v0/auth/api-keys") data, errors = await assert_status(resp, expected) + if not errors: + assert len(data) == len(fake_api_keys) + + +@pytest.mark.parametrize( + "user_role,expected", + _get_user_access_parametrizations(status.HTTP_200_OK), +) +async def test_list_auto_api_keys( + disabled_setup_garbage_collector: MockType, + client: TestClient, + fake_api_key_factory: Callable[..., Awaitable[ApiKey]], + logged_user: UserInfoDict, + user_role: UserRole, + expected: HTTPStatus, + faker: Faker, +): + fake_auto_api_keys = [ + await fake_api_key_factory( + api_key=API_KEY_AUTOGENERATED_KEY_PREFIX + faker.pystr(), + display_name=API_KEY_AUTOGENERATED_DISPLAY_NAME_PREFIX + faker.pystr(), + ) + for _ in range(10) + ] + + resp = await client.get( + "/v0/auth/api-keys", params={"includeAutogenerated": "true"} + ) + data, errors = await assert_status(resp, expected) + + if not errors: + assert len(data) == len(fake_auto_api_keys) + + resp = await client.get( + "/v0/auth/api-keys", params={"includeAutogenerated": "false"} + ) + data, errors = await assert_status(resp, expected) + if not errors: assert not data @@ -136,19 +195,44 @@ async def test_create_api_key( async def test_delete_api_keys( disabled_setup_garbage_collector: MockType, client: TestClient, - fake_user_api_keys: list[ApiKey], + fake_api_key_factory: Callable[..., Awaitable[ApiKey]], logged_user: UserInfoDict, user_role: UserRole, expected: HTTPStatus, ): + fake_api_keys = [await fake_api_key_factory() for _ in range(10)] + resp = await client.delete("/v0/auth/api-keys/0") await assert_status(resp, expected) - for api_key in fake_user_api_keys: + for api_key in fake_api_keys: resp = await client.delete(f"/v0/auth/api-keys/{api_key.id}") await assert_status(resp, expected) +@pytest.mark.parametrize( + "user_role,expected", + _get_user_access_parametrizations(status.HTTP_200_OK), +) +async def test_create_api_keys_same_display_name_different_products( + disabled_setup_garbage_collector: MockType, + client: TestClient, + fake_api_key_factory: Callable[..., Awaitable[ApiKey]], + logged_user: UserInfoDict, + app_products_names: list[str], + user_role: UserRole, + expected: HTTPStatus, +): + display_name = "foo" + + created_keys = [ + await fake_api_key_factory(display_name=display_name, product_name=product_name) + for product_name in app_products_names + ] + + assert len(created_keys) == len(app_products_names) + + EXPIRATION_WAIT_FACTOR = 1.2 @@ -181,7 +265,7 @@ async def test_create_api_key_with_expiration( "/v0/auth/api-keys", json={ "displayName": expected_api_key, - "expiration": expiration_interval.seconds, + "expiration": expiration_interval.total_seconds(), }, ) @@ -197,7 +281,9 @@ async def test_create_api_key_with_expiration( assert [d["displayName"] for d in data] == [expected_api_key] # wait for api-key for it to expire and force-run scheduled task - await asyncio.sleep(EXPIRATION_WAIT_FACTOR * expiration_interval.seconds) + await asyncio.sleep( + EXPIRATION_WAIT_FACTOR * expiration_interval.total_seconds() + ) deleted = await api_keys_service.prune_expired_api_keys(client.app) assert deleted == [expected_api_key] @@ -216,7 +302,7 @@ async def test_get_not_existing_api_key( client: TestClient, logged_user: UserInfoDict, user_role: UserRole, - expected: HTTPException, + expected: HTTPStatus, ): resp = await client.get("/v0/auth/api-keys/42") data, errors = await assert_status(resp, expected) diff --git a/services/web/server/tests/unit/with_dbs/01/test_api_keys_rpc.py b/services/web/server/tests/unit/with_dbs/01/test_api_keys_rpc.py index fca3bf0177ac..465cd1c74f24 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_api_keys_rpc.py +++ b/services/web/server/tests/unit/with_dbs/01/test_api_keys_rpc.py @@ -6,7 +6,7 @@ from collections.abc import AsyncIterable, Awaitable, Callable import pytest -from aiohttp.test_utils import TestServer +from aiohttp.test_utils import TestClient, TestServer from faker import Faker from models_library.basic_types import IDStr from models_library.products import ProductName @@ -14,8 +14,8 @@ from pytest_mock import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.webserver_login import UserInfoDict -from servicelib.rabbitmq import RabbitMQRPCClient +from pytest_simcore.helpers.webserver_users import UserInfoDict +from servicelib.rabbitmq import RabbitMQRPCClient, RPCNamespace from servicelib.rabbitmq.rpc_interfaces.webserver.auth.api_keys import ( create_api_key, delete_api_key_by_key, @@ -27,6 +27,7 @@ from simcore_service_webserver.api_keys.errors import ApiKeyNotFoundError from simcore_service_webserver.api_keys.models import ApiKey from simcore_service_webserver.application_settings import ApplicationSettings +from simcore_service_webserver.rabbitmq import get_rpc_namespace pytest_simcore_core_services_selection = [ "rabbit", @@ -104,15 +105,23 @@ async def rpc_client( return await rabbitmq_rpc_client("client") +@pytest.fixture +async def app_rpc_namespace(client: TestClient) -> RPCNamespace: + assert client.app is not None + return get_rpc_namespace(client.app) + + async def test_get_api_key( fake_user_api_keys: list[ApiKey], rpc_client: RabbitMQRPCClient, osparc_product_name: ProductName, logged_user: UserInfoDict, + app_rpc_namespace: RPCNamespace, ): for api_key in fake_user_api_keys: result = await get_api_key( rpc_client, + app_rpc_namespace, user_id=logged_user["id"], product_name=osparc_product_name, api_key_id=IDStr(api_key.id), @@ -121,10 +130,10 @@ async def test_get_api_key( async def test_api_keys_workflow( - web_server: TestServer, rpc_client: RabbitMQRPCClient, osparc_product_name: ProductName, logged_user: UserInfoDict, + app_rpc_namespace: RPCNamespace, faker: Faker, ): key_name = faker.pystr() @@ -132,6 +141,7 @@ async def test_api_keys_workflow( # creating a key created_api_key = await create_api_key( rpc_client, + app_rpc_namespace, user_id=logged_user["id"], product_name=osparc_product_name, api_key=ApiKeyCreate(display_name=key_name, expiration=None), @@ -141,6 +151,7 @@ async def test_api_keys_workflow( # query the key is still present queried_api_key = await get_api_key( rpc_client, + app_rpc_namespace, product_name=osparc_product_name, user_id=logged_user["id"], api_key_id=created_api_key.id, @@ -155,6 +166,7 @@ async def test_api_keys_workflow( # remove the key await delete_api_key_by_key( rpc_client, + app_rpc_namespace, user_id=logged_user["id"], product_name=osparc_product_name, api_key=created_api_key.api_key, @@ -164,6 +176,7 @@ async def test_api_keys_workflow( # key no longer present await get_api_key( rpc_client, + app_rpc_namespace, product_name=osparc_product_name, user_id=logged_user["id"], api_key_id=created_api_key.id, diff --git a/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__pricing_plan.py b/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__pricing_plan.py index 40440879da0b..7e9799bd77ce 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__pricing_plan.py +++ b/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__pricing_plan.py @@ -15,7 +15,7 @@ from models_library.utils.fastapi_encoders import jsonable_encoder from pytest_simcore.aioresponses_mocker import AioResponsesMock from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from settings_library.resource_usage_tracker import ResourceUsageTrackerSettings from simcore_service_webserver.db.models import UserRole diff --git a/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services.py b/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services.py index e456f9d86f91..13cc1a0234da 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services.py +++ b/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services.py @@ -24,7 +24,7 @@ from pytest_simcore.helpers.faker_factories import random_icon_url from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from simcore_service_webserver.catalog._controller_rest_schemas import ( ServiceInputGet, diff --git a/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services_resources.py b/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services_resources.py index 5c65109ef0a9..09dc81b486ba 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services_resources.py +++ b/services/web/server/tests/unit/with_dbs/01/test_catalog_handlers__services_resources.py @@ -16,7 +16,7 @@ from pydantic import TypeAdapter from pytest_simcore.aioresponses_mocker import AioResponsesMock from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from settings_library.catalog import CatalogSettings from simcore_service_webserver.catalog.settings import get_plugin_settings diff --git a/services/web/server/tests/unit/with_dbs/01/test_catalog_rest_client.py b/services/web/server/tests/unit/with_dbs/01/test_catalog_rest_client.py index 452ecfd76ec6..3635e2293ae2 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_catalog_rest_client.py +++ b/services/web/server/tests/unit/with_dbs/01/test_catalog_rest_client.py @@ -8,7 +8,9 @@ from models_library.api_schemas_catalog.service_access_rights import ( ServiceAccessRightsGet, ) -from pytest_simcore.helpers.webserver_login import UserInfoDict +from models_library.api_schemas_catalog.services import ServiceGet +from pydantic import TypeAdapter +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from simcore_service_webserver.catalog._controller_rest_exceptions import ( DefaultPricingUnitForServiceNotFoundError, @@ -38,9 +40,9 @@ async def test_server_responsive( assert client.app is_responsive = await is_catalog_service_responsive(app=client.app) if backend_status_code == status.HTTP_200_OK: - assert is_responsive == True + assert is_responsive is True else: - assert is_responsive == False + assert is_responsive is False @pytest.mark.parametrize( @@ -56,17 +58,20 @@ async def test_get_services_for_user_in_product( aioresponses_mocker: AioResponsesMock, backend_status_code: int, ): + examples = ServiceGet.model_json_schema()["examples"] + url_pattern = re.compile(r"http://catalog:8000/.*") aioresponses_mocker.get( url_pattern, status=backend_status_code, + payload=TypeAdapter(list[ServiceGet]).dump_python(examples, mode="json"), ) assert client.app - _ = await get_services_for_user_in_product( + # tests it does not raise an exception + await get_services_for_user_in_product( app=client.app, user_id=logged_user["id"], product_name="osparc", - only_key_versions=False, ) diff --git a/services/web/server/tests/unit/with_dbs/01/test_director_v2_handlers.py b/services/web/server/tests/unit/with_dbs/01/test_director_v2_handlers.py index 4556878b0e05..e39c79d751ba 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_director_v2_handlers.py +++ b/services/web/server/tests/unit/with_dbs/01/test_director_v2_handlers.py @@ -4,15 +4,22 @@ import pytest +import sqlalchemy as sa from aiohttp.test_utils import TestClient from faker import Faker from models_library.api_schemas_directorv2.comp_runs import ( + ComputationCollectionRunRpcGet, + ComputationCollectionRunRpcGetPage, + ComputationCollectionRunTaskRpcGet, + ComputationCollectionRunTaskRpcGetPage, ComputationRunRpcGet, ComputationRunRpcGetPage, ComputationTaskRpcGet, ComputationTaskRpcGetPage, ) from models_library.api_schemas_webserver.computations import ( + ComputationCollectionRunRestGet, + ComputationCollectionRunTaskRestGet, ComputationRunRestGet, ComputationTaskRestGet, ) @@ -26,6 +33,8 @@ ) from pytest_simcore.services_api_mocks_for_aiohttp_clients import AioResponsesMock from servicelib.aiohttp import status +from simcore_postgres_database.models.comp_runs_collections import comp_runs_collections +from simcore_postgres_database.models.projects_metadata import projects_metadata from simcore_service_webserver.db.models import UserRole from simcore_service_webserver.projects.models import ProjectDict @@ -241,3 +250,233 @@ async def test_list_computations_latest_iteration( ) if user_role != UserRole.ANONYMOUS: assert ComputationTaskRestGet.model_validate(data[0]) + + +@pytest.fixture +def mock_rpc_list_computation_collection_runs_page( + mocker: MockerFixture, + user_project: ProjectDict, +) -> ComputationCollectionRunRpcGetPage: + project_uuid = user_project["uuid"] + example = ComputationCollectionRunRpcGet.model_config["json_schema_extra"][ + "examples" + ][0] + example["project_ids"] = [project_uuid] + example["info"]["project_metadata"]["root_parent_project_id"] = project_uuid + + return mocker.patch( + "simcore_service_webserver.director_v2._computations_service.computations.list_computation_collection_runs_page", + spec=True, + return_value=ComputationCollectionRunRpcGetPage( + items=[ComputationCollectionRunRpcGet.model_validate(example)], + total=1, + ), + ) + + +@pytest.fixture +def mock_rpc_list_computation_collection_run_tasks_page( + mocker: MockerFixture, + user_project: ProjectDict, +) -> str: + project_uuid = user_project["uuid"] + workbench_ids = list(user_project["workbench"].keys()) + example = ComputationCollectionRunTaskRpcGet.model_config["json_schema_extra"][ + "examples" + ][0] + example["node_id"] = workbench_ids[0] + example["project_uuid"] = project_uuid + + mocker.patch( + "simcore_service_webserver.director_v2._computations_service.computations.list_computation_collection_run_tasks_page", + spec=True, + return_value=ComputationCollectionRunTaskRpcGetPage( + items=[ComputationCollectionRunTaskRpcGet.model_validate(example)], + total=1, + ), + ) + + return workbench_ids[0] + + +@pytest.mark.parametrize(*standard_role_response(), ids=str) +async def test_list_computation_collection_runs_and_tasks( + director_v2_service_mock: AioResponsesMock, + user_project: ProjectDict, + client: TestClient, + logged_user: LoggedUser, + user_role: UserRole, + expected: ExpectedResponse, + mock_rpc_list_computation_collection_runs_page: None, + mock_rpc_list_computation_collection_run_tasks_page: str, + faker: Faker, +): + assert client.app + url = client.app.router["list_computation_collection_runs"].url_for() + resp = await client.get(f"{url}") + data, _ = await assert_status( + resp, status.HTTP_200_OK if user_role == UserRole.GUEST else expected.ok + ) + if user_role != UserRole.ANONYMOUS: + assert ComputationCollectionRunRestGet.model_validate(data[0]) + assert data[0]["name"] == user_project["name"] + + url = client.app.router["list_computation_collection_run_tasks"].url_for( + collection_run_id=faker.uuid4() + ) + resp = await client.get(f"{url}") + data, _ = await assert_status( + resp, status.HTTP_200_OK if user_role == UserRole.GUEST else expected.ok + ) + if user_role != UserRole.ANONYMOUS: + assert ComputationCollectionRunTaskRestGet.model_validate(data[0]) + assert len(data) == 1 + assert ( + data[0]["name"] + == user_project["workbench"][ + mock_rpc_list_computation_collection_run_tasks_page + ]["label"] + ) + + +@pytest.fixture +async def populated_comp_run_collection( + client: TestClient, + postgres_db: sa.engine.Engine, +): + assert client.app + example = ComputationCollectionRunRpcGet.model_config["json_schema_extra"][ + "examples" + ][0] + collection_run_id = example["collection_run_id"] + + with postgres_db.connect() as con: + con.execute( + comp_runs_collections.insert() + .values( + collection_run_id=collection_run_id, + client_or_system_generated_id=collection_run_id, + client_or_system_generated_display_name="My Collection Run", + is_generated_by_system=False, + created=sa.func.now(), + modified=sa.func.now(), + ) + .returning(comp_runs_collections.c.collection_run_id) + ) + yield + con.execute(comp_runs_collections.delete()) + + +@pytest.mark.parametrize(*standard_role_response(), ids=str) +async def test_list_computation_collection_runs_with_client_defined_name( + director_v2_service_mock: AioResponsesMock, + user_project: ProjectDict, + client: TestClient, + logged_user: LoggedUser, + user_role: UserRole, + expected: ExpectedResponse, + populated_comp_run_collection: None, + mock_rpc_list_computation_collection_runs_page: None, +): + assert client.app + url = client.app.router["list_computation_collection_runs"].url_for() + resp = await client.get(f"{url}") + data, _ = await assert_status( + resp, status.HTTP_200_OK if user_role == UserRole.GUEST else expected.ok + ) + if user_role != UserRole.ANONYMOUS: + assert ComputationCollectionRunRestGet.model_validate(data[0]) + assert data[0]["name"] == "My Collection Run" + + +@pytest.mark.parametrize(*standard_role_response(), ids=str) +async def test_list_computation_collection_runs_with_filter_only_running( + director_v2_service_mock: AioResponsesMock, + user_project: ProjectDict, + client: TestClient, + logged_user: LoggedUser, + user_role: UserRole, + expected: ExpectedResponse, + populated_comp_run_collection: None, + mock_rpc_list_computation_collection_runs_page: None, +): + assert client.app + url = client.app.router["list_computation_collection_runs"].url_for() + query_parameters = {"filter_only_running": "true"} + url_with_query = url.with_query(**query_parameters) + resp = await client.get(f"{url_with_query}") + data, _ = await assert_status( + resp, status.HTTP_200_OK if user_role == UserRole.GUEST else expected.ok + ) + if user_role != UserRole.ANONYMOUS: + assert ComputationCollectionRunRestGet.model_validate(data[0]) + + +@pytest.mark.parametrize(*standard_role_response(), ids=str) +async def test_list_computation_collection_runs_with_filter_root_project( + director_v2_service_mock: AioResponsesMock, + user_project: ProjectDict, + client: TestClient, + logged_user: LoggedUser, + user_role: UserRole, + expected: ExpectedResponse, + populated_comp_run_collection: None, + mock_rpc_list_computation_collection_runs_page: None, +): + assert client.app + url = client.app.router["list_computation_collection_runs"].url_for() + query_parameters = {"filter_by_root_project_id": user_project["uuid"]} + url_with_query = url.with_query(**query_parameters) + resp = await client.get(f"{url_with_query}") + data, _ = await assert_status( + resp, status.HTTP_200_OK if user_role == UserRole.GUEST else expected.ok + ) + if user_role != UserRole.ANONYMOUS: + assert ComputationCollectionRunRestGet.model_validate(data[0]) + + +@pytest.fixture +async def populated_project_metadata( + client: TestClient, + logged_user: LoggedUser, + user_project: ProjectDict, + postgres_db: sa.engine.Engine, +): + assert client.app + project_uuid = user_project["uuid"] + with postgres_db.connect() as con: + con.execute( + projects_metadata.insert().values( + **{ + "project_uuid": project_uuid, + "custom": {"job_name": "My Job Name"}, + } + ) + ) + yield + con.execute(projects_metadata.delete()) + + +@pytest.mark.parametrize(*standard_role_response(), ids=str) +async def test_list_computation_collection_runs_tasks_with_different_names( + director_v2_service_mock: AioResponsesMock, + user_project: ProjectDict, + client: TestClient, + logged_user: LoggedUser, + user_role: UserRole, + expected: ExpectedResponse, + populated_project_metadata: None, + mock_rpc_list_computation_collection_run_tasks_page: str, + faker: Faker, +): + assert client.app + url = client.app.router["list_computation_collection_run_tasks"].url_for( + collection_run_id=faker.uuid4() + ) + resp = await client.get(f"{url}") + data, _ = await assert_status( + resp, status.HTTP_200_OK if user_role == UserRole.GUEST else expected.ok + ) + if user_role != UserRole.ANONYMOUS: + assert ComputationCollectionRunTaskRestGet.model_validate(data[0]) + assert data[0]["name"] == "My Job Name" diff --git a/services/web/server/tests/unit/with_dbs/01/test_groups_handlers_classifers.py b/services/web/server/tests/unit/with_dbs/01/test_groups_handlers_classifers.py index 02a0ddf581ba..7fd721eb446f 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_groups_handlers_classifers.py +++ b/services/web/server/tests/unit/with_dbs/01/test_groups_handlers_classifers.py @@ -27,7 +27,6 @@ def app_environment( # exclude "WEBSERVER_ACTIVITY": "null", "WEBSERVER_CATALOG": "null", - "WEBSERVER_CLUSTERS": "null", "WEBSERVER_COMPUTATION": "null", "WEBSERVER_DIAGNOSTICS": "null", "WEBSERVER_EMAIL": "null", diff --git a/services/web/server/tests/unit/with_dbs/01/test_long_running_tasks.py b/services/web/server/tests/unit/with_dbs/01/test_long_running_tasks.py index c6f58f29ee1a..7a06060369e2 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_long_running_tasks.py +++ b/services/web/server/tests/unit/with_dbs/01/test_long_running_tasks.py @@ -6,6 +6,7 @@ # pylint: disable=no-self-argument from typing import Any +from unittest.mock import Mock import pytest from aiohttp.test_utils import TestClient @@ -26,7 +27,7 @@ ("GET", "list_tasks", {}), ("GET", "get_task_status", {"task_id": "some_fake_task_id"}), ("GET", "get_task_result", {"task_id": "some_fake_task_id"}), - ("DELETE", "cancel_and_delete_task", {"task_id": "some_fake_task_id"}), + ("DELETE", "remove_task", {"task_id": "some_fake_task_id"}), ], ) async def test_long_running_tasks_access_restricted_to_logged_users( @@ -76,12 +77,15 @@ async def test_listing_tasks_with_list_inprocess_tasks_error( assert client.app class _DummyTaskManager: - def list_tasks(self, *args, **kwargs): - raise Exception() # pylint: disable=broad-exception-raised + async def list_tasks(self, *args, **kwargs): + raise Exception # pylint: disable=broad-exception-raised # noqa: TRY002 + + mock = Mock() + mock.tasks_manager = _DummyTaskManager() mocker.patch( - "servicelib.aiohttp.long_running_tasks._routes.get_tasks_manager", - return_value=_DummyTaskManager(), + "servicelib.aiohttp.long_running_tasks._routes.get_long_running_manager", + return_value=mock, ) _async_jobs_listing_path = client.app.router["get_async_jobs"].url_for() diff --git a/services/web/server/tests/unit/with_dbs/02/conftest.py b/services/web/server/tests/unit/with_dbs/02/conftest.py index 339e154e95e5..714614c9d952 100644 --- a/services/web/server/tests/unit/with_dbs/02/conftest.py +++ b/services/web/server/tests/unit/with_dbs/02/conftest.py @@ -16,6 +16,7 @@ import pytest from aiohttp.test_utils import TestClient from aioresponses import aioresponses +from common_library.json_serialization import json_dumps from faker import Faker from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet from models_library.projects_nodes import Node, NodeID @@ -29,14 +30,23 @@ from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.webserver_login import UserInfoDict from pytest_simcore.helpers.webserver_projects import NewProject, delete_all_projects +from pytest_simcore.helpers.webserver_users import UserInfoDict from settings_library.catalog import CatalogSettings from simcore_service_webserver.application_settings import get_application_settings from simcore_service_webserver.catalog.settings import get_plugin_settings from simcore_service_webserver.projects.models import ProjectDict +@pytest.fixture +def app_environment( + app_environment: dict[str, str], monkeypatch: pytest.MonkeyPatch +) -> dict[str, str]: + # NOTE: overrides app_environment + monkeypatch.setenv("WEBSERVER_GARBAGE_COLLECTOR", "null") + return app_environment | {"WEBSERVER_GARBAGE_COLLECTOR": "null"} + + @pytest.fixture def mock_service_resources() -> ServiceResourcesDict: return TypeAdapter(ServiceResourcesDict).validate_python( @@ -244,17 +254,6 @@ async def _assert_it( return _assert_it -@pytest.fixture -def app_environment( - app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch -) -> EnvVarsDict: - envs_plugins = setenvs_from_dict( - monkeypatch, - {"WEBSERVER_DEV_FEATURES_ENABLED": "1"}, - ) - return app_environment | envs_plugins - - @pytest.fixture def disable_max_number_of_running_dynamic_nodes( app_environment: dict[str, str], monkeypatch: pytest.MonkeyPatch @@ -324,7 +323,7 @@ def mock_catalog_service_api_responses(client, aioresponses_mocker): aioresponses_mocker.get( url_pattern, - payload={"data": {}}, + payload={}, repeat=True, ) aioresponses_mocker.post( @@ -487,3 +486,50 @@ def workbench_db_column() -> dict[str, Any]: def workbench(workbench_db_column: dict[str, Any]) -> dict[NodeID, Node]: # convert to model return TypeAdapter(dict[NodeID, Node]).validate_python(workbench_db_column) + + +@pytest.fixture +def max_number_of_user_sessions(faker: Faker) -> int: + return faker.pyint(min_value=1, max_value=5) + + +@pytest.fixture +def with_disabled_rtc_collaboration( + app_environment: EnvVarsDict, # ensure pre-app startup envs + monkeypatch: pytest.MonkeyPatch, +) -> None: + setenvs_from_dict( + monkeypatch, + {"WEBSERVER_REALTIME_COLLABORATION": "null"}, + ) + + +@pytest.fixture +def with_enabled_rtc_collaboration( + app_environment: EnvVarsDict, # ensure pre-app startup envs + monkeypatch: pytest.MonkeyPatch, + max_number_of_user_sessions: int, +) -> None: + setenvs_from_dict( + monkeypatch, + { + "WEBSERVER_REALTIME_COLLABORATION": json_dumps( + {"RTC_MAX_NUMBER_OF_USERS": max_number_of_user_sessions} + ) + }, + ) + + +@pytest.fixture +def with_enabled_rtc_collaboration_limited_to_1_user( + app_environment: EnvVarsDict, # ensure pre-app startup envs + monkeypatch: pytest.MonkeyPatch, +) -> None: + setenvs_from_dict( + monkeypatch, + { + "WEBSERVER_REALTIME_COLLABORATION": json_dumps( + {"RTC_MAX_NUMBER_OF_USERS": 1} + ) + }, + ) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects__jobs_service.py b/services/web/server/tests/unit/with_dbs/02/test_projects__jobs_service.py index df62390bf37e..0e44cd5e9c63 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects__jobs_service.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects__jobs_service.py @@ -12,7 +12,7 @@ from models_library.products import ProductName from models_library.projects import ProjectID from models_library.users import UserID -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from simcore_service_webserver.projects._jobs_service import ( list_my_projects_marked_as_jobs, set_project_as_job, @@ -56,6 +56,7 @@ async def project_job_fixture( user_id=user_id, project_uuid=project_uuid, job_parent_resource_name=job_parent_resource_name, + storage_assets_deleted=False, ) return ProjectJobFixture( user_id=user_id, @@ -221,6 +222,7 @@ async def test_filter_projects_by_metadata( user_id=user_id, project_uuid=project_uuid, job_parent_resource_name=job_parent_resource_name, + storage_assets_deleted=False, ) # 2. Set custom metadata diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py b/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py index 07cb83015d0a..3e535cab5b5d 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py @@ -23,10 +23,12 @@ MockedStorageSubsystem, standard_role_response, ) -from servicelib.aiohttp.long_running_tasks.server import TaskGet +from servicelib.long_running_tasks.models import TaskGet from servicelib.rabbitmq.rpc_interfaces.async_jobs.async_jobs import ( AsyncJobComposedResult, ) +from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings from simcore_postgres_database.models.users import UserRole from simcore_service_webserver._meta import api_version_prefix from simcore_service_webserver.application_settings import get_application_settings @@ -35,17 +37,21 @@ from tenacity.stop import stop_after_delay from tenacity.wait import wait_fixed +pytest_simcore_core_services_selection = [ + "rabbit", +] + API_PREFIX = "/" + api_version_prefix @pytest.fixture def app_environment( - app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch + use_in_memory_redis: RedisSettings, + rabbit_settings: RabbitSettings, + app_environment: EnvVarsDict, + monkeypatch: pytest.MonkeyPatch, ) -> EnvVarsDict: - envs_plugins = setenvs_from_dict( - monkeypatch, - {}, - ) + envs_plugins = setenvs_from_dict(monkeypatch, {}) return app_environment | envs_plugins @@ -91,19 +97,18 @@ def _standard_user_role_response() -> ( @pytest.mark.parametrize(*_standard_user_role_response()) async def test_copying_large_project_and_aborting_correctly_removes_new_project( + mock_dynamic_scheduler: None, client: TestClient, logged_user: dict[str, Any], primary_group: dict[str, str], standard_groups: list[dict[str, str]], user_project: dict[str, Any], expected: ExpectedResponse, - catalog_subsystem_mock: Callable[[list[ProjectDict]], None], slow_storage_subsystem_mock: MockedStorageSubsystem, project_db_cleaner: None, mocked_dynamic_services_interface: dict[str, MagicMock], ): assert client.app - catalog_subsystem_mock([user_project]) # initiate a project copy that will last long (simulated by a long running storage) # POST /v0/projects create_url = client.app.router["create_project"].url_for() @@ -136,7 +141,7 @@ async def test_copying_large_project_and_aborting_correctly_removes_new_project( await assert_status(resp, expected.no_content) # wait to check that the call to storage is "done" async for attempt in AsyncRetrying( - reraise=True, stop=stop_after_delay(10), wait=wait_fixed(1) + reraise=True, stop=stop_after_delay(60), wait=wait_fixed(1) ): with attempt: slow_storage_subsystem_mock.delete_project.assert_called_once() @@ -144,19 +149,18 @@ async def test_copying_large_project_and_aborting_correctly_removes_new_project( @pytest.mark.parametrize(*_standard_user_role_response()) async def test_copying_large_project_and_retrieving_copy_task( + mock_dynamic_scheduler: None, client: TestClient, logged_user: dict[str, Any], primary_group: dict[str, str], standard_groups: list[dict[str, str]], user_project: dict[str, Any], expected: ExpectedResponse, - catalog_subsystem_mock: Callable[[list[ProjectDict]], None], slow_storage_subsystem_mock: MockedStorageSubsystem, project_db_cleaner: None, mocked_dynamic_services_interface: dict[str, MagicMock], ): assert client.app - catalog_subsystem_mock([user_project]) # initiate a project copy that will last long (simulated by a long running storage) # POST /v0/projects @@ -192,19 +196,18 @@ async def test_copying_large_project_and_retrieving_copy_task( @pytest.mark.parametrize(*_standard_user_role_response()) async def test_creating_new_project_from_template_without_copying_data_creates_skeleton( mock_dynamic_scheduler: None, + mocked_dynamic_services_interface: dict[str, MagicMock], client: TestClient, logged_user: dict[str, Any], primary_group: dict[str, str], standard_groups: list[dict[str, str]], template_project: dict[str, Any], expected: ExpectedResponse, - catalog_subsystem_mock: Callable[[list[ProjectDict]], None], slow_storage_subsystem_mock: MockedStorageSubsystem, project_db_cleaner: None, request_create_project: Callable[..., Awaitable[ProjectDict]], ): assert client.app - catalog_subsystem_mock([template_project]) # create a project from another without copying data shall not call in the storage API # POST /v0/projects await request_create_project( @@ -243,19 +246,18 @@ async def test_creating_new_project_from_template_without_copying_data_creates_s @pytest.mark.parametrize(*_standard_user_role_response()) async def test_creating_new_project_as_template_without_copying_data_creates_skeleton( mock_dynamic_scheduler: None, + mocked_dynamic_services_interface: dict[str, MagicMock], client: TestClient, logged_user: dict[str, Any], primary_group: dict[str, str], standard_groups: list[dict[str, str]], user_project: dict[str, Any], expected: ExpectedResponse, - catalog_subsystem_mock: Callable[[list[ProjectDict]], None], slow_storage_subsystem_mock: MockedStorageSubsystem, project_db_cleaner: None, request_create_project: Callable[..., Awaitable[ProjectDict]], ): assert client.app - catalog_subsystem_mock([user_project]) # create a project from another without copying data shall not call in the storage API # POST /v0/projects await request_create_project( @@ -304,6 +306,7 @@ async def test_copying_too_large_project_returns_422( ): assert client.app app_settings = get_application_settings(client.app) + assert app_settings.WEBSERVER_PROJECTS large_project_total_size = ( app_settings.WEBSERVER_PROJECTS.PROJECTS_MAX_COPY_SIZE_BYTES + 1 ) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_conversations_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_conversations_handlers.py index 66e115f07407..665110aa29d4 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_conversations_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_conversations_handlers.py @@ -6,15 +6,20 @@ # pylint: disable=too-many-statements +from collections.abc import Callable, Iterable from http import HTTPStatus +from types import SimpleNamespace import pytest +import simcore_service_webserver.conversations._conversation_message_service as conversation_message_service +import simcore_service_webserver.conversations._conversation_service as conversation_service import sqlalchemy as sa from aiohttp.test_utils import TestClient -from models_library.api_schemas_webserver.projects_conversations import ( +from models_library.api_schemas_webserver.conversations import ( ConversationMessageRestGet, ConversationRestGet, ) +from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.webserver_login import LoggedUser, UserInfoDict from servicelib.aiohttp import status @@ -28,6 +33,21 @@ API_PREFIX = "/" + api_version_prefix +@pytest.fixture +def mock_functions_factory( + mocker: MockerFixture, +) -> Callable[[Iterable[tuple[object, str]]], SimpleNamespace]: + def _patch(targets_and_names: Iterable[tuple[object, str]]) -> SimpleNamespace: + return SimpleNamespace( + **{ + name: mocker.patch.object(target, name) + for target, name in targets_and_names + } + ) + + return _patch + + @pytest.mark.parametrize( "user_role,expected", [ @@ -66,7 +86,16 @@ async def test_project_conversations_full_workflow( logged_user: UserInfoDict, user_project: ProjectDict, expected: HTTPStatus, + mock_functions_factory: Callable[[Iterable[tuple[object, str]]], SimpleNamespace], ): + mocks = mock_functions_factory( + [ + (conversation_service, "notify_conversation_created"), + (conversation_service, "notify_conversation_updated"), + (conversation_service, "notify_conversation_deleted"), + ] + ) + base_url = client.app.router["list_project_conversations"].url_for( project_id=user_project["uuid"] ) @@ -91,6 +120,12 @@ async def test_project_conversations_full_workflow( assert ConversationRestGet.model_validate(data) _first_conversation_id = data["conversationId"] + assert mocks.notify_conversation_created.call_count == 1 + kwargs = mocks.notify_conversation_created.call_args.kwargs + + assert f"{kwargs['project_id']}" == user_project["uuid"] + assert kwargs["conversation"].name == "My conversation" + # Now we will create second conversation body = {"name": "My conversation", "type": "PROJECT_ANNOTATION"} resp = await client.post(f"{base_url}", json=body) @@ -100,6 +135,12 @@ async def test_project_conversations_full_workflow( ) assert ConversationRestGet.model_validate(data) + assert mocks.notify_conversation_created.call_count == 2 + kwargs = mocks.notify_conversation_created.call_args.kwargs + + assert f"{kwargs['project_id']}" == user_project["uuid"] + assert kwargs["conversation"].name == "My conversation" + # Now we will list all conversations for the project resp = await client.get(f"{base_url}") data, _, meta, links = await assert_status( @@ -130,6 +171,12 @@ async def test_project_conversations_full_workflow( ) assert data["name"] == updated_name + assert mocks.notify_conversation_updated.call_count == 1 + kwargs = mocks.notify_conversation_updated.call_args.kwargs + + assert f"{kwargs['project_id']}" == user_project["uuid"] + assert kwargs["conversation"].name == updated_name + # Now we will delete the first conversation resp = await client.delete(f"{base_url}/{_first_conversation_id}") data, _ = await assert_status( @@ -137,6 +184,11 @@ async def test_project_conversations_full_workflow( status.HTTP_204_NO_CONTENT, ) + assert mocks.notify_conversation_deleted.call_count == 1 + kwargs = mocks.notify_conversation_deleted.call_args.kwargs + + assert f"{kwargs['conversation_id']}" == _first_conversation_id + # Now we will list all conversations for the project resp = await client.get(f"{base_url}") data, _, meta = await assert_status( @@ -163,7 +215,16 @@ async def test_project_conversation_messages_full_workflow( user_project: ProjectDict, expected: HTTPStatus, postgres_db: sa.engine.Engine, + mock_functions_factory: Callable[[Iterable[tuple[object, str]]], SimpleNamespace], ): + mocks = mock_functions_factory( + [ + (conversation_message_service, "notify_conversation_message_created"), + (conversation_message_service, "notify_conversation_message_updated"), + (conversation_message_service, "notify_conversation_message_deleted"), + ] + ) + base_project_url = client.app.router["list_project_conversations"].url_for( project_id=user_project["uuid"] ) @@ -191,6 +252,12 @@ async def test_project_conversation_messages_full_workflow( assert ConversationMessageRestGet.model_validate(data) _first_message_id = data["messageId"] + assert mocks.notify_conversation_message_created.call_count == 1 + kwargs = mocks.notify_conversation_message_created.call_args.kwargs + + assert f"{kwargs['project_id']}" == user_project["uuid"] + assert kwargs["conversation_message"].content == "My first message" + # Now we will add second message body = {"content": "My second message", "type": "MESSAGE"} resp = await client.post(f"{base_project_conversation_url}", json=body) @@ -201,6 +268,12 @@ async def test_project_conversation_messages_full_workflow( assert ConversationMessageRestGet.model_validate(data) _second_message_id = data["messageId"] + assert mocks.notify_conversation_message_created.call_count == 2 + kwargs = mocks.notify_conversation_message_created.call_args.kwargs + + assert user_project["uuid"] == f"{kwargs['project_id']}" + assert kwargs["conversation_message"].content == "My second message" + # Now we will list all message for the project conversation resp = await client.get(f"{base_project_conversation_url}") data, _, meta, links = await assert_status( @@ -227,6 +300,12 @@ async def test_project_conversation_messages_full_workflow( expected, ) + assert mocks.notify_conversation_message_updated.call_count == 1 + kwargs = mocks.notify_conversation_message_updated.call_args.kwargs + + assert user_project["uuid"] == f"{kwargs['project_id']}" + assert kwargs["conversation_message"].content == updated_content + # Get the second message resp = await client.get(f"{base_project_conversation_url}/{_second_message_id}") data, _ = await assert_status( @@ -257,6 +336,13 @@ async def test_project_conversation_messages_full_workflow( status.HTTP_204_NO_CONTENT, ) + assert mocks.notify_conversation_message_deleted.call_count == 1 + kwargs = mocks.notify_conversation_message_deleted.call_args.kwargs + + assert f"{kwargs['project_id']}" == user_project["uuid"] + assert f"{kwargs['conversation_id']}" == _conversation_id + assert f"{kwargs['message_id']}" == _second_message_id + # Now we will list all message for the project conversation resp = await client.get(f"{base_project_conversation_url}") data, _, meta = await assert_status(resp, expected, include_meta=True) @@ -347,3 +433,10 @@ async def test_project_conversation_messages_full_workflow( resp, status.HTTP_204_NO_CONTENT, ) + + assert mocks.notify_conversation_message_deleted.call_count == 2 + kwargs = mocks.notify_conversation_message_deleted.call_args.kwargs + + assert f"{kwargs['project_id']}" == user_project["uuid"] + assert f"{kwargs['conversation_id']}" == _conversation_id + assert f"{kwargs['message_id']}" == _first_message_id diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py index bc5e5d747810..3fa5482fddfc 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py @@ -9,29 +9,35 @@ from http import HTTPStatus from math import ceil from typing import Any +from unittest import mock import pytest import sqlalchemy as sa from aiohttp.test_utils import TestClient from aioresponses import aioresponses +from deepdiff import DeepDiff from faker import Faker from models_library.api_schemas_directorv2.dynamic_services import ( GetProjectInactivityResponse, ) +from models_library.api_schemas_webserver.projects import ProjectStateOutputSchema from models_library.products import ProductName -from models_library.projects_state import ProjectState from pydantic import TypeAdapter from pytest_mock import MockerFixture -from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.assert_checks import ( + assert_equal_ignoring_none, + assert_status, +) from pytest_simcore.helpers.webserver_parametrizations import ( ExpectedResponse, MockedStorageSubsystem, standard_role_response, standard_user_role_response, ) +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from servicelib.rest_constants import X_PRODUCT_NAME_HEADER +from settings_library.rabbit import RabbitSettings from simcore_postgres_database.models.products import products from simcore_postgres_database.models.projects_to_products import projects_to_products from simcore_service_webserver._meta import api_version_prefix @@ -47,6 +53,10 @@ from simcore_service_webserver.utils import to_datetime from yarl import URL +pytest_simcore_core_services_selection = [ + "rabbit", +] + API_PREFIX = "/" + api_version_prefix @@ -168,10 +178,12 @@ async def _assert_get_same_project( project_permalink = data.pop("permalink", None) folder_id = data.pop("folderId", None) - assert data == {k: project[k] for k in data} + assert not DeepDiff( + data, {k: project[k] for k in data}, exclude_paths="root['lastChangeDate']" + ) if project_state: - assert ProjectState.model_validate(project_state) + assert ProjectStateOutputSchema.model_validate(project_state) if project_permalink: assert ProjectPermalink.model_validate(project_permalink) @@ -189,15 +201,15 @@ async def _assert_get_same_project( ], ) async def test_list_projects( + rabbit_settings: RabbitSettings, client: TestClient, + mocked_dynamic_services_interface: dict[str, mock.MagicMock], logged_user: dict[str, Any], user_project: dict[str, Any], template_project: dict[str, Any], expected: HTTPStatus, - catalog_subsystem_mock: Callable[[list[ProjectDict]], None], director_v2_service_mock: aioresponses, ): - catalog_subsystem_mock([user_project, template_project]) data, *_ = await _list_and_assert_projects(client, expected) if data: @@ -209,11 +221,15 @@ async def test_list_projects( project_permalink = got.pop("permalink") folder_id = got.pop("folderId") - assert got == {k: template_project[k] for k in got} + assert not DeepDiff( + got, + {k: template_project[k] for k in got}, + exclude_paths="root['lastChangeDate']", + ) - assert not ProjectState( + assert not ProjectStateOutputSchema( **project_state - ).locked.value, "Templates are not locked" + ).share_state.locked, "Templates are not locked" assert ProjectPermalink.model_validate(project_permalink) # standard project @@ -222,9 +238,13 @@ async def test_list_projects( project_permalink = got.pop("permalink", None) folder_id = got.pop("folderId") - assert got == {k: user_project[k] for k in got} + assert not DeepDiff( + got, + {k: user_project[k] for k in got}, + exclude_paths="root['lastChangeDate']", + ) - assert ProjectState(**project_state) + assert ProjectStateOutputSchema(**project_state) assert project_permalink is None assert folder_id is None @@ -239,10 +259,15 @@ async def test_list_projects( project_permalink = got.pop("permalink", None) folder_id = got.pop("folderId") - assert got == {k: user_project[k] for k in got} - assert not ProjectState( + assert not DeepDiff( + got, + {k: user_project[k] for k in got}, + exclude_paths="root['lastChangeDate']", + ) + + assert not ProjectStateOutputSchema( **project_state - ).locked.value, "Single user does not lock" + ).share_state.locked, "Single user does not lock" assert project_permalink is None # GET /v0/projects?type=template @@ -257,10 +282,14 @@ async def test_list_projects( project_permalink = got.pop("permalink") folder_id = got.pop("folderId") - assert got == {k: template_project[k] for k in got} - assert not ProjectState( + assert not DeepDiff( + got, + {k: template_project[k] for k in got}, + exclude_paths="root['lastChangeDate']", + ) + assert not ProjectStateOutputSchema( **project_state - ).locked.value, "Templates are not locked" + ).share_state.locked, "Templates are not locked" assert ProjectPermalink.model_validate(project_permalink) @@ -340,11 +369,11 @@ async def logged_user_registed_in_two_products( async def test_list_projects_with_innaccessible_services( s4l_products_db_name: ProductName, client: TestClient, + mocked_dynamic_services_interface: dict[str, mock.MagicMock], logged_user_registed_in_two_products: UserInfoDict, user_project: dict[str, Any], template_project: dict[str, Any], expected: HTTPStatus, - catalog_subsystem_mock: Callable[[list[ProjectDict]], None], director_v2_service_mock: aioresponses, postgres_db: sa.engine.Engine, s4l_product_headers: dict[str, Any], @@ -374,7 +403,6 @@ async def test_list_projects_with_innaccessible_services( # use-case 4: give user access to services # shall return the projects for any product - catalog_subsystem_mock([user_project, template_project]) data, *_ = await _list_and_assert_projects( client, expected, headers=s4l_product_headers ) @@ -399,14 +427,12 @@ async def test_list_projects_with_innaccessible_services( ) async def test_get_project( client: TestClient, + mocked_dynamic_services_interface: dict[str, mock.MagicMock], logged_user: UserInfoDict, user_project: ProjectDict, template_project: ProjectDict, expected, - catalog_subsystem_mock: Callable[[list[ProjectDict]], None], ): - catalog_subsystem_mock([user_project, template_project]) - # standard project await _assert_get_same_project(client, user_project, expected) @@ -444,7 +470,6 @@ async def test_create_get_and_patch_project_ui_field( logged_user: UserInfoDict, primary_group: dict[str, str], request_create_project: Callable[..., Awaitable[ProjectDict]], - catalog_subsystem_mock: Callable[[list[ProjectDict]], None], project_db_cleaner, ): assert client.app @@ -462,8 +487,6 @@ async def test_create_get_and_patch_project_ui_field( ) project_id = new_project["uuid"] - catalog_subsystem_mock([new_project]) - # Step 2: Get the project and check the ui.icon url = client.app.router["get_project"].url_for(project_id=project_id) resp = await client.get(f"{url}") @@ -489,6 +512,7 @@ async def test_create_get_and_patch_project_ui_field( @pytest.mark.parametrize(*standard_user_role_response()) async def test_new_project_from_template( mock_dynamic_scheduler: None, + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, logged_user: UserInfoDict, primary_group: dict[str, str], @@ -516,17 +540,16 @@ async def test_new_project_from_template( @pytest.mark.parametrize(*standard_user_role_response()) async def test_new_project_from_other_study( mock_dynamic_scheduler: None, + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, logged_user: UserInfoDict, primary_group: dict[str, str], user_project: ProjectDict, expected: ExpectedResponse, storage_subsystem_mock, - catalog_subsystem_mock: Callable[[list[ProjectDict]], None], project_db_cleaner, request_create_project: Callable[..., Awaitable[ProjectDict]], ): - catalog_subsystem_mock([user_project]) new_project = await request_create_project( client, expected.accepted, @@ -546,6 +569,7 @@ async def test_new_project_from_other_study( @pytest.mark.parametrize(*standard_user_role_response()) async def test_new_project_from_template_with_body( mock_dynamic_scheduler: None, + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, logged_user: UserInfoDict, primary_group: dict[str, str], @@ -601,6 +625,7 @@ async def test_new_project_from_template_with_body( @pytest.mark.parametrize(*standard_user_role_response()) async def test_new_template_from_project( mock_dynamic_scheduler: None, + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, logged_user: dict[str, Any], primary_group: dict[str, str], @@ -608,7 +633,6 @@ async def test_new_template_from_project( user_project: dict[str, Any], expected: ExpectedResponse, storage_subsystem_mock: MockedStorageSubsystem, - catalog_subsystem_mock: Callable[[list[ProjectDict]], None], project_db_cleaner: None, request_create_project: Callable[..., Awaitable[ProjectDict]], ): @@ -625,14 +649,13 @@ async def test_new_template_from_project( if new_template_prj: template_project = new_template_prj - catalog_subsystem_mock([template_project]) templates, *_ = await _list_and_assert_projects( client, status.HTTP_200_OK, {"type": "template"} ) assert len(templates) == 1 - assert templates[0] == template_project + assert_equal_ignoring_none(template_project, templates[0]) assert template_project["name"] == user_project["name"] assert template_project["description"] == user_project["description"] diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone.py index 5945d2907445..dd4dbda73ea0 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone.py @@ -15,11 +15,11 @@ from models_library.api_schemas_webserver.projects import ProjectGet from models_library.projects import ProjectID from pydantic import TypeAdapter -from pytest_simcore.helpers.webserver_login import UserInfoDict from pytest_simcore.helpers.webserver_parametrizations import ( MockedStorageSubsystem, standard_role_response, ) +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from servicelib.aiohttp.long_running_tasks.client import long_running_task_request from simcore_service_webserver.db.models import UserRole diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone_in_workspace_and_folder.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone_in_workspace_and_folder.py index fa7ed48abeb8..235a9ed886b7 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone_in_workspace_and_folder.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__clone_in_workspace_and_folder.py @@ -14,8 +14,8 @@ from models_library.folders import FolderID from models_library.projects import ProjectID from models_library.workspaces import WorkspaceID -from pytest_simcore.helpers.webserver_login import UserInfoDict from pytest_simcore.helpers.webserver_parametrizations import MockedStorageSubsystem +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp.long_running_tasks.client import long_running_task_request from simcore_postgres_database.models.folders_v2 import folders_v2 from simcore_postgres_database.models.workspaces import workspaces diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__delete.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__delete.py index 6dbcbe488ac0..0b8fe41d43ec 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__delete.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__delete.py @@ -11,6 +11,7 @@ from unittest.mock import MagicMock, call import pytest +import socketio import sqlalchemy as sa from aiohttp.test_utils import TestClient from faker import Faker @@ -22,12 +23,12 @@ from models_library.projects_access import Owner from models_library.projects_state import ProjectStatus from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import UserInfoDict from pytest_simcore.helpers.webserver_parametrizations import ( ExpectedResponse, MockedStorageSubsystem, standard_role_response, ) +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE from servicelib.redis import with_project_locked @@ -61,7 +62,6 @@ async def test_delete_project( expected: ExpectedResponse, storage_subsystem_mock: MockedStorageSubsystem, mocked_dynamic_services_interface: dict[str, MagicMock], - catalog_subsystem_mock: Callable[[list[ProjectDict]], None], fake_services: Callable[..., Awaitable[list[DynamicServiceGet]]], assert_get_same_project_caller: Callable, mock_dynamic_scheduler_rabbitmq: None, @@ -143,8 +143,9 @@ async def test_delete_multiple_opened_project_forbidden( user_project: ProjectDict, mocked_dynamic_services_interface, create_dynamic_service_mock: Callable[..., Awaitable[DynamicServiceGet]], - socketio_client_factory: Callable, - client_session_id_factory: Callable, + create_socketio_connection: Callable[ + [str | None, TestClient | None], Awaitable[tuple[socketio.AsyncClient, str]] + ], user_role: UserRole, expected_ok: HTTPStatus, expected_forbidden: HTTPStatus, @@ -156,9 +157,10 @@ async def test_delete_multiple_opened_project_forbidden( user_id=logged_user["id"], project_id=user_project["uuid"] ) # open project in tab1 - client_session_id1 = client_session_id_factory() + client_session_id1 = None try: - await socketio_client_factory(client_session_id1) + sio, client_session_id1 = await create_socketio_connection(None, client) + assert sio except SocketConnectionError: if user_role != UserRole.ANONYMOUS: pytest.fail("socket io connection should not fail") @@ -175,9 +177,11 @@ async def test_delete_multiple_opened_project_forbidden( mocked_notifications_plugin["subscribe"].assert_not_called() # delete project in tab2 - client_session_id2 = client_session_id_factory() try: - await socketio_client_factory(client_session_id2) + sio_2, client_session_id2 = await create_socketio_connection(None, client) + assert sio_2 + if client_session_id1: + assert client_session_id2 != client_session_id1 except SocketConnectionError: if user_role != UserRole.ANONYMOUS: pytest.fail("socket io connection should not fail") @@ -233,6 +237,6 @@ async def test_delete_project_while_it_is_locked_raises_error( get_redis_lock_manager_client_sdk(client.app), project_uuid=project_uuid, status=ProjectStatus.CLOSING, - owner=Owner(user_id=user_id, first_name=faker.name(), last_name=faker.name()), + owner=Owner(user_id=user_id), notification_cb=None, )(_request_delete_project)(client, user_project, expected.conflict) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__list.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__list.py index d16058179be6..5596fdfdf1ee 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__list.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__list.py @@ -18,6 +18,7 @@ standard_role_response, ) from servicelib.aiohttp import status +from settings_library.redis import RedisSettings from simcore_service_webserver._meta import api_version_prefix from simcore_service_webserver.db.models import UserRole from simcore_service_webserver.projects.models import ProjectDict @@ -133,7 +134,6 @@ async def test_list_projects_with_invalid_pagination_parameters( primary_group: dict[str, str], expected: ExpectedResponse, storage_subsystem_mock, - catalog_subsystem_mock: Callable[[list[ProjectDict]], None], director_v2_service_mock: aioresponses, project_db_cleaner, limit: int, @@ -152,13 +152,13 @@ async def test_list_projects_with_invalid_pagination_parameters( @pytest.mark.parametrize("limit", [7, 20, 43]) @pytest.mark.parametrize(*standard_user_role()) async def test_list_projects_with_pagination( + use_in_memory_redis: RedisSettings, mock_dynamic_scheduler: None, client: TestClient, logged_user: dict[str, Any], primary_group: dict[str, str], expected: ExpectedResponse, storage_subsystem_mock, - catalog_subsystem_mock: Callable[[list[ProjectDict]], None], director_v2_service_mock: aioresponses, project_db_cleaner, limit: int, @@ -175,7 +175,6 @@ async def test_list_projects_with_pagination( ] ) if expected.created == status.HTTP_201_CREATED: - catalog_subsystem_mock(created_projects) assert len(created_projects) == NUM_PROJECTS NUMBER_OF_CALLS = ceil(NUM_PROJECTS / limit) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__list_with_query_params.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__list_with_query_params.py index 3e9b7ec0922b..d97ca5954089 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__list_with_query_params.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__list_with_query_params.py @@ -11,22 +11,23 @@ from copy import deepcopy from pathlib import Path from typing import Any +from unittest import mock import pytest import sqlalchemy as sa +from _pytest.mark.structures import ParameterSet from aiohttp.test_utils import TestClient from models_library.folders import FolderID from models_library.projects import ProjectID, ProjectTemplateType from models_library.users import UserID from pydantic import BaseModel, PositiveInt -from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import UserInfoDict from pytest_simcore.helpers.webserver_parametrizations import ( ExpectedResponse, standard_role_response, ) from pytest_simcore.helpers.webserver_projects import create_project +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.folders_v2 import folders_v2 from simcore_postgres_database.models.projects_to_folders import projects_to_folders @@ -35,13 +36,21 @@ from simcore_service_webserver.projects.models import ProjectDict -def standard_user_role() -> tuple[str, tuple[UserRole, ExpectedResponse]]: - all_roles = standard_role_response() +def standard_user_role() -> tuple[str, list[ParameterSet]]: + parameters, all_roles_expected_responses = standard_role_response() + standard_user, standard_user_expected_response = all_roles_expected_responses[2] - return (all_roles[0], [pytest.param(*all_roles[1][2], id="standard_user_role")]) + return ( + parameters, + [ + pytest.param( + standard_user, standard_user_expected_response, id="standard_user_role" + ) + ], + ) -def standard_and_tester_user_roles() -> tuple[str, tuple[UserRole, ExpectedResponse]]: +def standard_and_tester_user_roles() -> tuple[str, list[ParameterSet]]: all_roles = standard_role_response() return ( @@ -53,21 +62,13 @@ def standard_and_tester_user_roles() -> tuple[str, tuple[UserRole, ExpectedRespo ) -@pytest.fixture -def mock_catalog_api_get_services_for_user_in_product(mocker: MockerFixture): - mocker.patch( - "simcore_service_webserver.projects._crud_api_read.catalog_service.get_services_for_user_in_product", - spec=True, - return_value=[], - ) - - async def _new_project( client: TestClient, user_id: UserID, product_name: str, tests_data_dir: Path, project_data: dict[str, Any], + *, as_template: bool = False, ): """returns a project for the given user""" @@ -99,7 +100,7 @@ def _assert_response_data( def _pick_random_substring(text, length): length = min(length, len(text)) - start_index = random.randint(0, len(text) - length) + start_index = random.randint(0, len(text) - length) # noqa: S311 end_index = start_index + length return text[start_index:end_index] @@ -111,7 +112,8 @@ class _ProjectInfo(BaseModel): @pytest.mark.parametrize(*standard_user_role()) -async def test_list_projects_with_search_parameter( +async def test_list_projects_with_search_parameter( # noqa: PLR0915 + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, logged_user: UserDict, expected: ExpectedResponse, @@ -119,31 +121,30 @@ async def test_list_projects_with_search_parameter( tests_data_dir: Path, osparc_product_name: str, project_db_cleaner: None, - mock_catalog_api_get_services_for_user_in_product, ): projects_info = [ _ProjectInfo( - uuid="d4d0eca3-d210-4db6-84f9-63670b07176b", + uuid=ProjectID("d4d0eca3-d210-4db6-84f9-63670b07176b"), name="Name 1", description="Description 1", ), _ProjectInfo( - uuid="2f3ef868-fe1b-11ed-b038-cdb13a78a6f3", + uuid=ProjectID("2f3ef868-fe1b-11ed-b038-cdb13a78a6f3"), name="Name 2", description="Description 2", ), _ProjectInfo( - uuid="9cd66c12-fe1b-11ed-b038-cdb13a78a6f3", + uuid=ProjectID("9cd66c12-fe1b-11ed-b038-cdb13a78a6f3"), name="Name 3", description="Description 3", ), _ProjectInfo( - uuid="b9e32426-fe1b-11ed-b038-cdb13a78a6f3", + uuid=ProjectID("b9e32426-fe1b-11ed-b038-cdb13a78a6f3"), name="Yoda 4", description="Description 4", ), _ProjectInfo( - uuid="bc57aff6-fe1b-11ed-b038-cdb13a78a6f3", + uuid=ProjectID("bc57aff6-fe1b-11ed-b038-cdb13a78a6f3"), name="Name 5", description="Yoda 5", ), @@ -302,6 +303,7 @@ async def test_list_projects_with_search_parameter( @pytest.mark.parametrize(*standard_user_role()) async def test_list_projects_with_order_by_parameter( + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, logged_user: UserDict, expected: ExpectedResponse, @@ -309,31 +311,30 @@ async def test_list_projects_with_order_by_parameter( tests_data_dir: Path, osparc_product_name: str, project_db_cleaner: None, - mock_catalog_api_get_services_for_user_in_product: None, ): projects_info = [ _ProjectInfo( - uuid="aaa0eca3-d210-4db6-84f9-63670b07176b", + uuid=ProjectID("aaa0eca3-d210-4db6-84f9-63670b07176b"), name="d", description="c", ), _ProjectInfo( - uuid="cccef868-fe1b-11ed-b038-cdb13a78a6f3", + uuid=ProjectID("cccef868-fe1b-11ed-b038-cdb13a78a6f3"), name="b", description="e", ), _ProjectInfo( - uuid="eee66c12-fe1b-11ed-b038-cdb13a78a6f3", + uuid=ProjectID("eee66c12-fe1b-11ed-b038-cdb13a78a6f3"), name="a", description="a", ), _ProjectInfo( - uuid="ddd32426-fe1b-11ed-b038-cdb13a78a6f3", + uuid=ProjectID("ddd32426-fe1b-11ed-b038-cdb13a78a6f3"), name="c", description="b", ), _ProjectInfo( - uuid="bbb7aff6-fe1b-11ed-b038-cdb13a78a6f3", + uuid=ProjectID("bbb7aff6-fe1b-11ed-b038-cdb13a78a6f3"), name="e", description="d", ), @@ -424,7 +425,9 @@ def setup_folders_db( ) .returning(folders_v2.c.folder_id) ) - _folder_id = result.fetchone()[0] + row = result.fetchone() + assert row is not None + _folder_id = row[0] con.execute( projects_to_folders.insert().values( @@ -434,7 +437,7 @@ def setup_folders_db( ) ) - yield FolderID(_folder_id) + yield _folder_id con.execute(projects_to_folders.delete()) con.execute(folders_v2.delete()) @@ -442,6 +445,7 @@ def setup_folders_db( @pytest.mark.parametrize(*standard_user_role()) async def test_list_projects_for_specific_folder_id( + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, logged_user: UserDict, expected: ExpectedResponse, @@ -449,22 +453,21 @@ async def test_list_projects_for_specific_folder_id( tests_data_dir: Path, osparc_product_name: str, project_db_cleaner: None, - mock_catalog_api_get_services_for_user_in_product: None, setup_folders_db: FolderID, ): projects_info = [ _ProjectInfo( - uuid="d4d0eca3-d210-4db6-84f9-63670b07176b", + uuid=ProjectID("d4d0eca3-d210-4db6-84f9-63670b07176b"), name="Name 1", description="Description 1", ), _ProjectInfo( - uuid="2f3ef868-fe1b-11ed-b038-cdb13a78a6f3", + uuid=ProjectID("2f3ef868-fe1b-11ed-b038-cdb13a78a6f3"), name="Name 2", description="Description 2", ), _ProjectInfo( - uuid="9cd66c12-fe1b-11ed-b038-cdb13a78a6f3", + uuid=ProjectID("9cd66c12-fe1b-11ed-b038-cdb13a78a6f3"), name="Name 3", description="Description 3", ), @@ -524,7 +527,8 @@ async def test_list_projects_for_specific_folder_id( @pytest.mark.parametrize(*standard_and_tester_user_roles()) -async def test_list_and_patch_projects_with_template_type( +async def test_list_and_patch_projects_with_template_type( # noqa: PLR0915 + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, logged_user: UserDict, expected: ExpectedResponse, @@ -532,7 +536,6 @@ async def test_list_and_patch_projects_with_template_type( tests_data_dir: Path, osparc_product_name: str, project_db_cleaner: None, - mock_catalog_api_get_services_for_user_in_product, ): projects_type = [ "STANDARD", diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__patch.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__patch.py index dbb33ea5ecd6..cc991148c468 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__patch.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers__patch.py @@ -7,13 +7,15 @@ import json +import uuid from http import HTTPStatus +from unittest import mock +from unittest.mock import patch import pytest from aiohttp.test_utils import TestClient -from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from simcore_service_webserver._meta import api_version_prefix from simcore_service_webserver.db.models import UserRole @@ -22,24 +24,6 @@ API_PREFIX = "/" + api_version_prefix -@pytest.fixture -def mock_catalog_api_get_services_for_user_in_product(mocker: MockerFixture): - mocker.patch( - "simcore_service_webserver.projects._controller.projects_rest.catalog_service.get_services_for_user_in_product", - spec=True, - return_value=[], - ) - - -@pytest.fixture -def mock_project_uses_available_services(mocker: MockerFixture): - mocker.patch( - "simcore_service_webserver.projects._controller.projects_rest.project_uses_available_services", - spec=True, - return_value=True, - ) - - @pytest.mark.parametrize( "user_role,expected", [ @@ -77,12 +61,11 @@ async def test_patch_project_entrypoint_access( "user_role,expected", [(UserRole.USER, status.HTTP_204_NO_CONTENT)] ) async def test_patch_project( + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, expected: HTTPStatus, - mock_catalog_api_get_services_for_user_in_product, - mock_project_uses_available_services, ): assert client.app base_url = client.app.router["patch_project"].url_for( @@ -206,3 +189,150 @@ async def test_patch_project( assert data["ui"] == _patch_ui_2["ui"] assert data["quality"] == _patch_quality["quality"] assert data["dev"] == _patch_dev["dev"] + + +@pytest.mark.parametrize( + "user_role,expected", [(UserRole.USER, status.HTTP_204_NO_CONTENT)] +) +async def test_patch_project_with_client_session_header( + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, + expected: HTTPStatus, +): + assert client.app + base_url = client.app.router["patch_project"].url_for( + project_id=user_project["uuid"] + ) + + # Generate a valid UUID for client session ID + client_session_id = str(uuid.uuid4()) + + # Test patch with X-Client-Session-Id header - should succeed + resp = await client.patch( + f"{base_url}", + data=json.dumps( + { + "name": "testing-name-with-session", + "description": "testing-description-with-session", + } + ), + headers={"X-Client-Session-Id": client_session_id}, + ) + await assert_status(resp, expected) + + # Test patch without X-Client-Session-Id header - should also succeed (header is optional) + resp = await client.patch( + f"{base_url}", + data=json.dumps( + { + "name": "testing-name-without-session", + "description": "testing-description-without-session", + } + ), + ) + await assert_status(resp, expected) + + # Test patch with invalid X-Client-Session-Id header - should fail with validation error + resp = await client.patch( + f"{base_url}", + data=json.dumps( + { + "name": "testing-name-invalid-session", + "description": "testing-description-invalid-session", + } + ), + headers={"X-Client-Session-Id": "invalid-uuid-format"}, + ) + # This should fail validation since it's not a proper UUID + await assert_status(resp, status.HTTP_422_UNPROCESSABLE_ENTITY) + + +@pytest.mark.parametrize( + "user_role,expected", [(UserRole.USER, status.HTTP_204_NO_CONTENT)] +) +async def test_patch_project_with_mocked_header_parsing( + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, + expected: HTTPStatus, +): + """Test that header_params = parse_request_headers_as(ClientSessionHeaderParams, request) works correctly""" + assert client.app + base_url = client.app.router["patch_project"].url_for( + project_id=user_project["uuid"] + ) + + # Generate a valid client session ID + test_client_session_id = str(uuid.uuid4()) + + # Mock the _projects_service.patch_project_for_user to spy on the client_session_id parameter + with patch( + "simcore_service_webserver.projects._controller.projects_rest._projects_service.patch_project_for_user" + ) as mock_patch_project_service: + # Make the service call succeed + mock_patch_project_service.return_value = None + + # Make the PATCH request with client session header + resp = await client.patch( + f"{base_url}", + data=json.dumps( + { + "name": "testing-name-with-mocked-header", + "description": "testing-description-with-mocked-header", + } + ), + headers={"X-Client-Session-Id": test_client_session_id}, + ) + await assert_status(resp, expected) + + # Verify that patch_project_for_user was called with the correct client_session_id + mock_patch_project_service.assert_called_once() + call_args = mock_patch_project_service.call_args + + # Extract the client_session_id from the call arguments + assert "client_session_id" in call_args.kwargs + assert call_args.kwargs["client_session_id"] == test_client_session_id + + +@pytest.mark.parametrize( + "user_role,expected", [(UserRole.USER, status.HTTP_204_NO_CONTENT)] +) +async def test_patch_project_without_client_session_header( + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, + expected: HTTPStatus, +): + """Test patch project works when X-Client-Session-Id header is not provided""" + assert client.app + base_url = client.app.router["patch_project"].url_for( + project_id=user_project["uuid"] + ) + + # Mock the _projects_service.patch_project_for_user to spy on the client_session_id parameter + with patch( + "simcore_service_webserver.projects._controller.projects_rest._projects_service.patch_project_for_user" + ) as mock_patch_project_service: + # Make the service call succeed + mock_patch_project_service.return_value = None + + # Make the PATCH request WITHOUT client session header + resp = await client.patch( + f"{base_url}", + data=json.dumps( + { + "name": "testing-name-without-header", + "description": "testing-description-without-header", + } + ), + ) + await assert_status(resp, expected) + + # Verify that patch_project_for_user was called with client_session_id=None + mock_patch_project_service.assert_called_once() + call_args = mock_patch_project_service.call_args + + # Extract the client_session_id from the call arguments + assert "client_session_id" in call_args.kwargs + assert call_args.kwargs["client_session_id"] is None diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_groups_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_groups_handlers.py index ecbbeffc3344..fa017bc4f3f8 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_groups_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_groups_handlers.py @@ -6,52 +6,30 @@ from http import HTTPStatus +from unittest import mock import pytest from aiohttp.test_utils import TestClient from models_library.api_schemas_webserver.projects_access_rights import ( ProjectShareAccepted, ) -from pytest_mock import MockType -from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict +from pytest_simcore.helpers.webserver_users import NewUser, UserInfoDict from servicelib.aiohttp import status from simcore_service_webserver.db.models import UserRole from simcore_service_webserver.projects.models import ProjectDict -@pytest.fixture -def mock_catalog_api_get_services_for_user_in_product( - mocker: MockerFixture, -) -> MockType: - return mocker.patch( - "simcore_service_webserver.projects._controller.projects_rest.catalog_service.get_services_for_user_in_product", - spec=True, - return_value=[], - ) - - -@pytest.fixture -def mock_project_uses_available_services(mocker: MockerFixture) -> MockType: - return mocker.patch( - "simcore_service_webserver.projects._controller.projects_rest.project_uses_available_services", - spec=True, - return_value=True, - ) - - @pytest.mark.acceptance_test( "Driving test for https://github.com/ITISFoundation/osparc-issues/issues/1547" ) @pytest.mark.parametrize("user_role,expected", [(UserRole.USER, status.HTTP_200_OK)]) async def test_projects_groups_full_workflow( # noqa: PLR0915 + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, expected: HTTPStatus, - mock_catalog_api_get_services_for_user_in_product: MockType, - mock_project_uses_available_services: MockType, ): assert client.app # check the default project permissions @@ -261,11 +239,10 @@ async def test_projects_groups_full_workflow( # noqa: PLR0915 @pytest.mark.parametrize("user_role", [UserRole.USER]) async def test_share_project( + with_dev_features_enabled: None, client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, - mock_catalog_api_get_services_for_user_in_product: MockType, - mock_project_uses_available_services: MockType, ): assert client.app @@ -329,17 +306,16 @@ async def test_share_project( ], ) async def test_share_project_with_roles( + with_dev_features_enabled: None, client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, - mock_catalog_api_get_services_for_user_in_product: MockType, - mock_project_uses_available_services: MockType, user_role: UserRole, expected_status: HTTPStatus, ): assert client.app - assert logged_user["role"] == user_role.value + assert logged_user["role"] == user_role # Attempt to share the project url = client.app.router["share_project"].url_for( diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_metadata_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_metadata_handlers.py index dae450a88fe7..ee94a5640aab 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_metadata_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_metadata_handlers.py @@ -21,19 +21,24 @@ from models_library.projects_nodes_io import NodeID from pydantic import TypeAdapter from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import UserInfoDict from pytest_simcore.helpers.webserver_parametrizations import ( ExpectedResponse, MockedStorageSubsystem, standard_user_role_response, ) +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status +from settings_library.rabbit import RabbitSettings from simcore_postgres_database.utils_projects_metadata import ( get as get_db_project_metadata, ) from simcore_service_webserver.projects import _crud_api_delete from simcore_service_webserver.projects.models import ProjectDict +pytest_simcore_core_services_selection = [ + "rabbit", +] + @pytest.mark.acceptance_test( "For https://github.com/ITISFoundation/osparc-simcore/issues/4313" @@ -113,6 +118,7 @@ async def _wait_until_deleted(): @pytest.mark.parametrize(*standard_user_role_response()) async def test_new_project_with_parent_project_node( + rabbit_settings: RabbitSettings, mock_dynamic_scheduler: None, # for deletion mocked_dynamic_services_interface: dict[str, MagicMock], @@ -122,12 +128,10 @@ async def test_new_project_with_parent_project_node( primary_group: dict[str, str], user_project: ProjectDict, expected: ExpectedResponse, - catalog_subsystem_mock: Callable[[list[ProjectDict]], None], request_create_project: Callable[..., Awaitable[ProjectDict]], aiopg_engine: aiopg.sa.Engine, ): """this is new way of setting parents by using request headers""" - catalog_subsystem_mock([user_project]) parent_project = await request_create_project( client, expected.accepted, @@ -199,13 +203,11 @@ async def test_new_project_with_invalid_parent_project_node( primary_group: dict[str, str], user_project: ProjectDict, expected: ExpectedResponse, - catalog_subsystem_mock: Callable[[list[ProjectDict]], None], request_create_project: Callable[..., Awaitable[ProjectDict]], aiopg_engine: aiopg.sa.Engine, faker: Faker, ): """this is new way of setting parents by using request headers""" - catalog_subsystem_mock([user_project]) parent_project = await request_create_project( client, expected.accepted, @@ -273,6 +275,7 @@ async def test_new_project_with_invalid_parent_project_node( @pytest.mark.parametrize(*standard_user_role_response()) async def test_set_project_parent_backward_compatibility( + rabbit_settings: RabbitSettings, mock_dynamic_scheduler: None, # for deletion mocked_dynamic_services_interface: dict[str, MagicMock], diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py index 4db7e6f8a22a..ce031477e935 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py @@ -6,6 +6,7 @@ import asyncio import re from collections.abc import Awaitable, Callable +from contextlib import suppress from dataclasses import dataclass, field from datetime import datetime, timedelta from http import HTTPStatus @@ -16,6 +17,7 @@ from uuid import uuid4 import pytest +import socketio import sqlalchemy as sa from aiohttp.test_utils import TestClient from aioresponses import aioresponses @@ -28,7 +30,9 @@ FileMetaDataGet, PresignedLink, ) +from models_library.api_schemas_webserver.projects_nodes import NodeGetIdle from models_library.generics import Envelope +from models_library.projects_nodes import Node, NodeShareStatus from models_library.projects_nodes_io import NodeID from models_library.services_resources import ( DEFAULT_SINGLE_SERVICE_NAME, @@ -37,6 +41,7 @@ ) from models_library.utils.fastapi_encoders import jsonable_encoder from pydantic import NonNegativeFloat, NonNegativeInt, TypeAdapter +from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.webserver_parametrizations import ( @@ -46,12 +51,27 @@ ) from servicelib.aiohttp import status from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE +from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings from simcore_postgres_database.models.projects import projects as projects_db_model from simcore_service_webserver.db.models import UserRole from simcore_service_webserver.projects._controller.nodes_rest import ( _ProjectNodePreview, ) from simcore_service_webserver.projects.models import ProjectDict +from simcore_service_webserver.socketio.messages import SOCKET_IO_NODE_UPDATED_EVENT +from tenacity import ( + AsyncRetrying, + RetryError, + retry_if_exception_type, + retry_unless_exception_type, + stop_after_delay, + wait_fixed, +) + +pytest_simcore_core_services_selection = [ + "rabbit", +] @pytest.mark.parametrize( @@ -402,11 +422,13 @@ class _RunningServices: running_services_uuids: list[str] = field(default_factory=list) def num_services( - self, *args, **kwargs # noqa: ARG002 + self, + *args, + **kwargs, # noqa: ARG002 ) -> list[DynamicServiceGet]: return [ DynamicServiceGet.model_validate( - DynamicServiceGet.model_config["json_schema_extra"]["examples"][1] + DynamicServiceGet.model_json_schema()["examples"][1] | {"service_uuid": service_uuid, "project_id": user_project["uuid"]} ) for service_uuid in self.running_services_uuids @@ -440,7 +462,9 @@ def inc_running_services(self, *args, **kwargs): # noqa: ARG002 *(client.post(f"{url}", json=body) for _ in range(NUM_DY_SERVICES)) ) # all shall have worked - await asyncio.gather(*(assert_status(r, expected.created) for r in responses)) + data_errors_list = await asyncio.gather( + *(assert_status(r, expected.created) for r in responses) + ) # but only the allowed number of services should have started assert ( @@ -450,6 +474,7 @@ def inc_running_services(self, *args, **kwargs): # noqa: ARG002 == NUM_DY_SERVICES ) assert len(running_services.running_services_uuids) == NUM_DY_SERVICES + assert len(set(running_services.running_services_uuids)) == NUM_DY_SERVICES # check that we do have NUM_DY_SERVICES nodes in the project with postgres_db.connect() as conn: result = conn.execute( @@ -460,6 +485,8 @@ def inc_running_services(self, *args, **kwargs): # noqa: ARG002 assert result workbench = result.one()[projects_db_model.c.workbench] assert len(workbench) == NUM_DY_SERVICES + num_services_in_project + node_ids_in_db = set(workbench.keys()) + set(running_services.running_services_uuids).issubset(node_ids_in_db) print(f"--> {NUM_DY_SERVICES} nodes were created concurrently") # # delete now @@ -502,7 +529,7 @@ async def test_create_node_does_not_start_dynamic_node_if_there_are_already_too_ "service_version": faker.numerify("%.#.#"), "service_id": None, } - response = await client.post(f"{ url}", json=body) + response = await client.post(f"{url}", json=body) await assert_status(response, expected.created) mocked_dynamic_services_interface[ "dynamic_scheduler.api.run_dynamic_service" @@ -533,7 +560,9 @@ class _RunninServices: running_services_uuids: list[str] = field(default_factory=list) async def num_services( - self, *args, **kwargs # noqa: ARG002 + self, + *args, + **kwargs, # noqa: ARG002 ) -> list[dict[str, Any]]: return [ {"service_uuid": service_uuid} @@ -622,7 +651,7 @@ async def test_create_node_does_start_dynamic_node_if_max_num_set_to_0( "service_version": faker.numerify("%.#.#"), "service_id": None, } - response = await client.post(f"{ url}", json=body) + response = await client.post(f"{url}", json=body) await assert_status(response, expected.created) mocked_dynamic_services_interface[ "dynamic_scheduler.api.run_dynamic_service" @@ -755,6 +784,21 @@ async def test_delete_node( assert node_id not in workbench +@pytest.fixture +async def socket_io_node_updated_mock( + mocker: MockerFixture, + client: TestClient, + logged_user, + create_socketio_connection: Callable[ + [str | None, TestClient | None], Awaitable[tuple[socketio.AsyncClient, str]] + ], +) -> mock.Mock: + socket_io_conn, _ = await create_socketio_connection(None, client) + mock_node_updated_handler = mocker.MagicMock() + socket_io_conn.on(SOCKET_IO_NODE_UPDATED_EVENT, handler=mock_node_updated_handler) + return mock_node_updated_handler + + @pytest.mark.parametrize(*standard_role_response(), ids=str) async def test_start_node( client: TestClient, @@ -773,7 +817,8 @@ async def test_start_node( all_service_uuids = list(project["workbench"]) # start the node, shall work as expected url = client.app.router["start_node"].url_for( - project_id=project["uuid"], node_id=choice(all_service_uuids) # noqa: S311 + project_id=project["uuid"], + node_id=choice(all_service_uuids), # noqa: S311 ) response = await client.post(f"{url}") data, error = await assert_status( @@ -794,6 +839,107 @@ async def test_start_node( ].assert_not_called() +@pytest.mark.parametrize(*standard_user_role()) +async def test_start_stop_node_sends_node_updated_socketio_event( + client: TestClient, + logged_user: dict[str, Any], + socket_io_node_updated_mock: mock.Mock, + user_project_with_num_dynamic_services: Callable[[int], Awaitable[ProjectDict]], + expected: ExpectedResponse, + mocked_dynamic_services_interface: dict[str, mock.MagicMock], + mock_catalog_api: dict[str, mock.Mock], + faker: Faker, + max_amount_of_auto_started_dyn_services: int, + mocker: MockerFixture, +): + assert client.app + project = await user_project_with_num_dynamic_services( + max_amount_of_auto_started_dyn_services or faker.pyint(min_value=3) + ) + all_service_uuids = list(project["workbench"]) + # start the node, shall work as expected + chosen_node_id = choice(all_service_uuids) # noqa: S311 + url = client.app.router["start_node"].url_for( + project_id=project["uuid"], node_id=chosen_node_id + ) + + # simulate that the dynamic service is running + mocked_dynamic_services_interface[ + "dynamic_scheduler.api.get_dynamic_service" + ].return_value = DynamicServiceGet.model_validate( + DynamicServiceGet.model_json_schema()["examples"][0] + | { + "user_id": logged_user["id"], + "project_id": project["uuid"], + "node_uuid": chosen_node_id, + } + ) + + response = await client.post(f"{url}") + await assert_status(response, expected.no_content) + mocked_dynamic_services_interface[ + "dynamic_scheduler.api.run_dynamic_service" + ].assert_called_once() + + socket_io_node_updated_mock.assert_called_once() + message = socket_io_node_updated_mock.call_args[0][0] + assert "data" in message + assert "project_id" in message + assert "node_id" in message + assert message["project_id"] == project["uuid"] + assert message["node_id"] == chosen_node_id + received_node = Node.model_validate(message["data"]) + assert received_node.state + assert received_node.state.lock_state + assert received_node.state.lock_state.locked is True + assert received_node.state.lock_state.current_user_groupids == [ + logged_user["primary_gid"] + ] + assert received_node.state.lock_state.status is NodeShareStatus.OPENED + socket_io_node_updated_mock.reset_mock() + + # now stop the node + url = client.app.router["stop_node"].url_for( + project_id=project["uuid"], node_id=chosen_node_id + ) + # simulate that the dynamic service is idle + mocked_dynamic_services_interface[ + "dynamic_scheduler.api.get_dynamic_service" + ].return_value = NodeGetIdle.model_validate( + NodeGetIdle.model_json_schema()["examples"][0] + | { + "user_id": logged_user["id"], + "project_id": project["uuid"], + "node_uuid": chosen_node_id, + } + ) + response = await client.post(f"{url}") + await assert_status(response, expected.accepted) + async for attempt in AsyncRetrying( + retry=retry_if_exception_type(AssertionError), + stop=stop_after_delay(5), + wait=wait_fixed(0.1), + reraise=True, + ): + with attempt: + mocked_dynamic_services_interface[ + "dynamic_scheduler.api.stop_dynamic_service" + ].assert_called_once() + socket_io_node_updated_mock.assert_called_once() + message = socket_io_node_updated_mock.call_args[0][0] + assert "data" in message + assert "project_id" in message + assert "node_id" in message + assert message["project_id"] == project["uuid"] + assert message["node_id"] == chosen_node_id + received_node = Node.model_validate(message["data"]) + assert received_node.state + assert received_node.state.lock_state + assert received_node.state.lock_state.locked is False + assert received_node.state.lock_state.current_user_groupids is None + assert received_node.state.lock_state.status is None + + @pytest.mark.parametrize(*standard_user_role()) async def test_start_node_raises_if_dynamic_services_limit_attained( client: TestClient, @@ -817,7 +963,8 @@ async def test_start_node_raises_if_dynamic_services_limit_attained( ] # start the node, shall work as expected url = client.app.router["start_node"].url_for( - project_id=project["uuid"], node_id=choice(all_service_uuids) # noqa: S311 + project_id=project["uuid"], + node_id=choice(all_service_uuids), # noqa: S311 ) response = await client.post(f"{url}") data, error = await assert_status( @@ -852,7 +999,8 @@ async def test_start_node_starts_dynamic_service_if_max_number_of_services_set_t ] # start the node, shall work as expected url = client.app.router["start_node"].url_for( - project_id=project["uuid"], node_id=choice(all_service_uuids) # noqa: S311 + project_id=project["uuid"], + node_id=choice(all_service_uuids), # noqa: S311 ) response = await client.post(f"{url}") data, error = await assert_status( @@ -885,7 +1033,8 @@ async def test_start_node_raises_if_called_with_wrong_data( # start the node, with wrong project url = client.app.router["start_node"].url_for( - project_id=faker.uuid4(), node_id=choice(all_service_uuids) # noqa: S311 + project_id=faker.uuid4(), + node_id=choice(all_service_uuids), # noqa: S311 ) response = await client.post(f"{url}") data, error = await assert_status( @@ -916,6 +1065,8 @@ async def test_start_node_raises_if_called_with_wrong_data( @pytest.mark.parametrize(*standard_role_response(), ids=str) async def test_stop_node( + rabbit_settings: RabbitSettings, + use_in_memory_redis: RedisSettings, client: TestClient, user_project_with_num_dynamic_services: Callable[[int], Awaitable[ProjectDict]], user_role: UserRole, @@ -932,21 +1083,38 @@ async def test_stop_node( all_service_uuids = list(project["workbench"]) # start the node, shall work as expected url = client.app.router["stop_node"].url_for( - project_id=project["uuid"], node_id=choice(all_service_uuids) # noqa: S311 + project_id=project["uuid"], + node_id=choice(all_service_uuids), # noqa: S311 ) response = await client.post(f"{url}") - data, error = await assert_status( + _, error = await assert_status( response, status.HTTP_202_ACCEPTED if user_role == UserRole.GUEST else expected.accepted, ) + if error is None: - mocked_dynamic_services_interface[ - "dynamic_scheduler.api.stop_dynamic_service" - ].assert_called_once() + async for attempt in AsyncRetrying( + wait=wait_fixed(0.1), + stop=stop_after_delay(5), + retry=retry_if_exception_type(AssertionError), + reraise=True, + ): + with attempt: + mocked_dynamic_services_interface[ + "dynamic_scheduler.api.stop_dynamic_service" + ].assert_called_once() else: - mocked_dynamic_services_interface[ - "dynamic_scheduler.api.stop_dynamic_service" - ].assert_not_called() + with suppress(RetryError): + async for attempt in AsyncRetrying( + wait=wait_fixed(0.1), + stop=stop_after_delay(5), + retry=retry_unless_exception_type(AssertionError), + reraise=True, + ): + with attempt: + mocked_dynamic_services_interface[ + "dynamic_scheduler.api.stop_dynamic_service" + ].assert_not_called() @pytest.fixture diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__patch.py b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__patch.py index 3bba1eaf1187..59958de8acf2 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__patch.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__patch.py @@ -8,12 +8,14 @@ import json from http import HTTPStatus +from unittest import mock import pytest from aiohttp.test_utils import TestClient +from deepdiff import DeepDiff from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from servicelib.rabbitmq.rpc_interfaces.catalog.errors import ( CatalogForbiddenError, @@ -26,24 +28,6 @@ API_PREFIX = "/" + api_version_prefix -@pytest.fixture -def mock_catalog_api_get_services_for_user_in_product(mocker: MockerFixture): - mocker.patch( - "simcore_service_webserver.projects._controller.projects_rest.catalog_service.get_services_for_user_in_product", - spec=True, - return_value=[], - ) - - -@pytest.fixture -def mock_project_uses_available_services(mocker: MockerFixture): - mocker.patch( - "simcore_service_webserver.projects._controller.projects_rest.project_uses_available_services", - spec=True, - return_value=True, - ) - - @pytest.fixture def mock_catalog_rpc_check_for_service(mocker: MockerFixture): mocker.patch( @@ -73,6 +57,7 @@ def mocked_notify_project_node_update(mocker: MockerFixture): ) async def test_patch_project_node_entrypoint_access( mock_dynamic_scheduler: None, + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, @@ -95,12 +80,11 @@ async def test_patch_project_node_entrypoint_access( ) async def test_patch_project_node( mock_dynamic_scheduler: None, + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, expected: HTTPStatus, - mock_catalog_api_get_services_for_user_in_product: None, - mock_project_uses_available_services: None, mock_catalog_rpc_check_for_service: None, ): node_id = next(iter(user_project["workbench"])) @@ -185,7 +169,9 @@ async def test_patch_project_node( "output_1": { "store": 0, "path": "9934cba6-4b51-11ef-968a-02420a00f1c1/571ffc8d-fa6e-411f-afc8-9c62d08dd2fa/matus.txt", + "label": "matus.txt", "eTag": "d41d8cd98f00b204e9800998ecf8427e", + "dataset": None, } } } @@ -202,7 +188,6 @@ async def test_patch_project_node( _tested_node = data["workbench"][node_id] assert _tested_node["label"] == "testing-string" - assert _tested_node["progress"] is None assert _tested_node["key"] == _patch_key["key"] assert _tested_node["version"] == _patch_version["version"] assert _tested_node["inputs"] == _patch_inputs["inputs"] @@ -217,16 +202,14 @@ async def test_patch_project_node( ) async def test_patch_project_node_notifies( mocker: MockerFixture, + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, expected: HTTPStatus, - mock_catalog_api_get_services_for_user_in_product, - mock_project_uses_available_services, mock_catalog_rpc_check_for_service, mocked_notify_project_node_update, ): - node_id = next(iter(user_project["workbench"])) assert client.app base_url = client.app.router["patch_project_node"].url_for( @@ -252,13 +235,11 @@ async def test_patch_project_node_notifies( "user_role,expected", [(UserRole.USER, status.HTTP_204_NO_CONTENT)] ) async def test_patch_project_node_inputs_notifies( - mocker: MockerFixture, + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, expected: HTTPStatus, - mock_catalog_api_get_services_for_user_in_product, - mock_project_uses_available_services, mocked_notify_project_node_update, ): node_id = next(iter(user_project["workbench"])) @@ -283,22 +264,25 @@ async def test_patch_project_node_inputs_notifies( await assert_status(resp, expected) assert mocked_notify_project_node_update.call_count > 1 # 1 message per node updated - assert [ - call_args[0][2] - for call_args in mocked_notify_project_node_update.await_args_list - ] == list(user_project["workbench"].keys()) + assert not DeepDiff( + [ + call_args[0][2] + for call_args in mocked_notify_project_node_update.await_args_list + ], + list(user_project["workbench"].keys()), + ignore_order=True, + ) @pytest.mark.parametrize( "user_role,expected", [(UserRole.USER, status.HTTP_204_NO_CONTENT)] ) async def test_patch_project_node_inputs_with_data_type_change( + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, expected: HTTPStatus, - mock_catalog_api_get_services_for_user_in_product, - mock_project_uses_available_services, ): node_id = next(iter(user_project["workbench"])) assert client.app @@ -350,8 +334,6 @@ async def test_patch_project_node_service_key_with_error( logged_user: UserInfoDict, user_project: ProjectDict, expected: HTTPStatus, - mock_catalog_api_get_services_for_user_in_product, - mock_project_uses_available_services, mocker: MockerFixture, ): node_id = next(iter(user_project["workbench"])) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__services_access.py b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__services_access.py index 238cba620554..af61022e28b2 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__services_access.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handlers__services_access.py @@ -17,7 +17,7 @@ from models_library.services_history import ServiceRelease from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from servicelib.rabbitmq import RPCServerError from simcore_service_webserver.db.models import UserRole diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_patch_project_and_notify_users_locking.py b/services/web/server/tests/unit/with_dbs/02/test_projects_patch_project_and_notify_users_locking.py new file mode 100644 index 000000000000..3d82099cd563 --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_patch_project_and_notify_users_locking.py @@ -0,0 +1,215 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + +""" +Tests for patch_project_and_notify_users function focusing on the Redis locking mechanism +and concurrent access patterns. +""" + +import asyncio +from http import HTTPStatus +from typing import Any +from uuid import uuid4 + +import pytest +from aiohttp.test_utils import TestClient +from faker import Faker +from models_library.projects import ProjectID +from pytest_simcore.helpers.webserver_users import UserInfoDict +from servicelib.aiohttp import status +from servicelib.redis import increment_and_return_project_document_version +from simcore_service_webserver.db.models import UserRole +from simcore_service_webserver.projects._projects_service import ( + patch_project_and_notify_users, +) +from simcore_service_webserver.projects.models import ProjectDict +from simcore_service_webserver.redis import get_redis_document_manager_client_sdk + + +@pytest.fixture +def concurrent_patch_data_list(faker: Faker) -> list[dict[str, Any]]: + """Generate multiple different patch data for concurrent testing""" + return [{"name": f"concurrent-test-{faker.word()}-{i}"} for i in range(10)] + + +@pytest.fixture +def user_primary_gid(logged_user: UserInfoDict) -> int: + """Extract user primary group ID from logged user""" + return int(logged_user["primary_gid"]) + + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.USER, status.HTTP_200_OK), + ], +) +async def test_patch_project_and_notify_users_sequential( + user_role: UserRole, + with_enabled_rtc_collaboration: None, + expected: HTTPStatus, + client: TestClient, + user_project: ProjectDict, + user_primary_gid: int, + faker: Faker, +): + """Test that patch_project_and_notify_users works correctly in sequential mode""" + assert client.app + project_uuid = ProjectID(user_project["uuid"]) + + # Perform sequential patches + patch_data_1 = {"name": f"sequential-test-{faker.word()}-1"} + patch_data_2 = {"name": f"sequential-test-{faker.word()}-2"} + + # First patch + await patch_project_and_notify_users( + app=client.app, + project_uuid=project_uuid, + patch_project_data=patch_data_1, + user_primary_gid=user_primary_gid, + client_session_id=None, + ) + + # Get version after first patch + redis_client = get_redis_document_manager_client_sdk(client.app) + version_1 = await increment_and_return_project_document_version( + redis_client=redis_client, project_uuid=project_uuid + ) + + # Second patch + await patch_project_and_notify_users( + app=client.app, + project_uuid=project_uuid, + patch_project_data=patch_data_2, + user_primary_gid=user_primary_gid, + client_session_id=None, + ) + + # Get version after second patch + version_2 = await increment_and_return_project_document_version( + redis_client=redis_client, project_uuid=project_uuid + ) + + # Verify versions are incrementing correctly + assert version_2 > version_1 + assert version_2 - version_1 == 2 # Two operations should increment by 2 + + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.USER, status.HTTP_200_OK), + ], +) +async def test_patch_project_and_notify_users_concurrent_locking( + user_role: UserRole, + with_enabled_rtc_collaboration: None, + expected: HTTPStatus, + client: TestClient, + user_project: ProjectDict, + user_primary_gid: int, + concurrent_patch_data_list: list[dict[str, Any]], +): + """Test that patch_project_and_notify_users handles concurrent access correctly with locking""" + assert client.app + project_uuid = ProjectID(user_project["uuid"]) + + # Get initial version + redis_client = get_redis_document_manager_client_sdk(client.app) + initial_version = await increment_and_return_project_document_version( + redis_client=redis_client, project_uuid=project_uuid + ) + + # Create concurrent patch tasks + tasks = [ + patch_project_and_notify_users( + app=client.app, + project_uuid=project_uuid, + patch_project_data=patch_data, + user_primary_gid=user_primary_gid, + client_session_id=None, + ) + for patch_data in concurrent_patch_data_list + ] + + # Execute all tasks concurrently + await asyncio.gather(*tasks) + + # Get final version + final_version = await increment_and_return_project_document_version( + redis_client=redis_client, project_uuid=project_uuid + ) + + # Verify that all concurrent operations were processed and version incremented correctly + # Each patch_project_and_notify_users call should increment version by 1 + expected_final_version = initial_version + len(concurrent_patch_data_list) + 1 + assert final_version == expected_final_version + + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.USER, status.HTTP_200_OK), + ], +) +async def test_patch_project_and_notify_users_concurrent_different_projects( + user_role: UserRole, + with_enabled_rtc_collaboration: None, + expected: HTTPStatus, + client: TestClient, + user_project: ProjectDict, + user_primary_gid: int, + faker: Faker, +): + """Test that concurrent patches to different projects don't interfere with each other""" + assert client.app + + # Use different project UUIDs to simulate different projects + project_uuid_1 = ProjectID(user_project["uuid"]) + project_uuid_2 = ProjectID(str(uuid4())) # Simulate second project + project_uuid_3 = ProjectID(str(uuid4())) # Simulate third project + + redis_client = get_redis_document_manager_client_sdk(client.app) + + # Get initial versions + initial_version_1 = await increment_and_return_project_document_version( + redis_client=redis_client, project_uuid=project_uuid_1 + ) + initial_version_2 = await increment_and_return_project_document_version( + redis_client=redis_client, project_uuid=project_uuid_2 + ) + initial_version_3 = await increment_and_return_project_document_version( + redis_client=redis_client, project_uuid=project_uuid_3 + ) + + # Note: For this test, we only test the locking mechanism for project_1 + # as we would need to create actual projects for the others + patch_data = {"name": f"concurrent-different-projects-{faker.word()}"} + + # Only test project_1 (real project) but verify version isolation + await patch_project_and_notify_users( + app=client.app, + project_uuid=project_uuid_1, + patch_project_data=patch_data, + user_primary_gid=user_primary_gid, + client_session_id=None, + ) + + # Get final versions + final_version_1 = await increment_and_return_project_document_version( + redis_client=redis_client, project_uuid=project_uuid_1 + ) + final_version_2 = await increment_and_return_project_document_version( + redis_client=redis_client, project_uuid=project_uuid_2 + ) + final_version_3 = await increment_and_return_project_document_version( + redis_client=redis_client, project_uuid=project_uuid_3 + ) + + # Verify that only project_1 version changed + assert final_version_1 == initial_version_1 + 2 # One patch + one version check + assert final_version_2 == initial_version_2 + 1 # Only version check + assert final_version_3 == initial_version_3 + 1 # Only version check diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_ports_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_ports_handlers.py index 78e337acedcd..bd2b14a4ad00 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_ports_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_ports_handlers.py @@ -12,6 +12,7 @@ import pytest from aiohttp.test_utils import TestClient from aioresponses import aioresponses as AioResponsesMock # noqa: N812 +from deepdiff import DeepDiff from models_library.api_schemas_directorv2.computations import TasksOutputs from models_library.api_schemas_webserver.projects import ProjectGet from models_library.utils.fastapi_encoders import jsonable_encoder @@ -20,8 +21,8 @@ from pytest_simcore.helpers.webserver_fake_ports_data import ( PROJECTS_METADATA_PORTS_RESPONSE_BODY_DATA, ) -from pytest_simcore.helpers.webserver_login import UserInfoDict from pytest_simcore.helpers.webserver_parametrizations import MockedStorageSubsystem +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from servicelib.aiohttp.long_running_tasks.client import long_running_task_request from simcore_service_webserver.db.models import UserRole @@ -74,7 +75,7 @@ def mock_directorv2_service_api_responses( return aioresponses_mocker -@pytest.mark.acceptance_test() +@pytest.mark.acceptance_test @pytest.mark.parametrize( "user_role,expected", [ @@ -109,55 +110,62 @@ async def test_io_workflow( ports_meta, error = await assert_status(resp, expected_status_code=expected) if not error: - assert ports_meta == [ - { - "key": "38a0d401-af4b-4ea7-ab4c-5005c712a546", - "kind": "input", - "content_schema": { - "description": "Input integer value", - "title": "X", - "type": "integer", + diff = DeepDiff( + ports_meta, + [ + { + "key": "38a0d401-af4b-4ea7-ab4c-5005c712a546", + "kind": "input", + "content_schema": { + "description": "Input integer value", + "title": "X", + "type": "integer", + }, }, - }, - { - "key": "fc48252a-9dbb-4e07-bf9a-7af65a18f612", - "kind": "input", - "content_schema": { - "description": "Input integer value", - "title": "Z", - "type": "integer", + { + "key": "fc48252a-9dbb-4e07-bf9a-7af65a18f612", + "kind": "input", + "content_schema": { + "description": "Input integer value", + "title": "Z", + "type": "integer", + }, }, - }, - { - "key": "7bf0741f-bae4-410b-b662-fc34b47c27c9", - "kind": "input", - "content_schema": { - "description": "Input boolean value", - "title": "on", - "type": "boolean", + { + "key": "7bf0741f-bae4-410b-b662-fc34b47c27c9", + "kind": "input", + "content_schema": { + "description": "Input boolean value", + "title": "on", + "type": "boolean", + }, }, - }, - { - "key": "09fd512e-0768-44ca-81fa-0cecab74ec1a", - "kind": "output", - "content_schema": { - "description": "Output integer value", - "title": "Random sleep interval_2", - "type": "integer", + { + "key": "09fd512e-0768-44ca-81fa-0cecab74ec1a", + "kind": "output", + "content_schema": { + "description": "Output integer value", + "title": "Random sleep interval_2", + "type": "integer", + }, }, - }, - { - "key": "76f607b4-8761-4f96-824d-cab670bc45f5", - "kind": "output", - "content_schema": { - "description": "Output integer value", - "title": "Random sleep interval", - "type": "integer", + { + "key": "76f607b4-8761-4f96-824d-cab670bc45f5", + "kind": "output", + "content_schema": { + "description": "Output integer value", + "title": "Random sleep interval", + "type": "integer", + }, }, - }, - ] + ], + ignore_order=True, + ) - assert ports_meta == PROJECTS_METADATA_PORTS_RESPONSE_BODY_DATA + assert not diff + assert not DeepDiff( + ports_meta, PROJECTS_METADATA_PORTS_RESPONSE_BODY_DATA, ignore_order=True + ) # get_project_inputs expected_url = client.app.router["get_project_inputs"].url_for( diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_repository.py b/services/web/server/tests/unit/with_dbs/02/test_projects_repository.py index 378e4c9c0d34..146d1964f0db 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_repository.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_repository.py @@ -12,7 +12,7 @@ from common_library.users_enums import UserRole from models_library.basic_types import IDStr from models_library.rest_ordering import OrderBy, OrderDirection -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from simcore_service_webserver.projects import ( _projects_repository as projects_service_repository, ) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_rpc.py b/services/web/server/tests/unit/with_dbs/02/test_projects_rpc.py index ac38c6061d4c..a7ec233d29d5 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_rpc.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_rpc.py @@ -21,7 +21,7 @@ from pydantic import ValidationError from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict +from pytest_simcore.helpers.webserver_users import NewUser, UserInfoDict from servicelib.rabbitmq import RabbitMQRPCClient from servicelib.rabbitmq.rpc_interfaces.webserver import projects as projects_rpc from servicelib.rabbitmq.rpc_interfaces.webserver.errors import ( @@ -90,6 +90,7 @@ async def test_rpc_client_mark_project_as_job( user_id=user_id, project_uuid=project_uuid, job_parent_resource_name="solvers/solver123/version/1.2.3", + storage_assets_deleted=False, ) @@ -109,6 +110,7 @@ async def test_rpc_client_list_my_projects_marked_as_jobs( user_id=user_id, project_uuid=project_uuid, job_parent_resource_name="solvers/solver123/version/1.2.3", + storage_assets_deleted=False, ) # List projects marked as jobs @@ -170,6 +172,7 @@ async def test_errors_on_rpc_client_mark_project_as_job( user_id=other_user_id, # <-- no access project_uuid=project_uuid, job_parent_resource_name="solvers/solver123/version/1.2.3", + storage_assets_deleted=False, ) assert exc_info.value.error_context()["project_uuid"] == project_uuid @@ -181,6 +184,7 @@ async def test_errors_on_rpc_client_mark_project_as_job( user_id=logged_user["id"], project_uuid=UUID("00000000-0000-0000-0000-000000000000"), # <-- wont find job_parent_resource_name="solvers/solver123/version/1.2.3", + storage_assets_deleted=False, ) with pytest.raises(ValidationError, match="job_parent_resource_name") as exc_info: @@ -190,6 +194,7 @@ async def test_errors_on_rpc_client_mark_project_as_job( user_id=user_id, project_uuid=project_uuid, job_parent_resource_name="This is not a resource", # <-- wrong format + storage_assets_deleted=False, ) assert exc_info.value.error_count() == 1 @@ -216,6 +221,7 @@ async def test_rpc_client_list_projects_marked_as_jobs_with_metadata_filter( user_id=user_id, project_uuid=project_uuid, job_parent_resource_name="solvers/solver123/version/1.2.3", + storage_assets_deleted=False, ) # Set custom metadata on the project @@ -307,3 +313,147 @@ async def test_rpc_client_list_projects_marked_as_jobs_with_metadata_filter( assert page.meta.total == 0 assert len(page.data) == 0 + + +async def test_rpc_client_get_project_marked_as_job_found( + rpc_client: RabbitMQRPCClient, + product_name: ProductName, + logged_user: UserInfoDict, + user_project: ProjectDict, +): + project_uuid = ProjectID(user_project["uuid"]) + user_id = logged_user["id"] + job_parent_resource_name = "solvers/solver123/version/1.2.3" + + # Mark the project as a job first + await projects_rpc.mark_project_as_job( + rpc_client=rpc_client, + product_name=product_name, + user_id=user_id, + project_uuid=project_uuid, + job_parent_resource_name=job_parent_resource_name, + storage_assets_deleted=False, + ) + + # Should be able to retrieve it + project_job = await projects_rpc.get_project_marked_as_job( + rpc_client=rpc_client, + product_name=product_name, + user_id=user_id, + project_uuid=project_uuid, + job_parent_resource_name=job_parent_resource_name, + ) + assert project_job.uuid == project_uuid + assert project_job.job_parent_resource_name == job_parent_resource_name + assert project_job.name == user_project["name"] + + +async def test_rpc_client_get_project_marked_as_job_not_found( + rpc_client: RabbitMQRPCClient, + product_name: ProductName, + logged_user: UserInfoDict, + user_project: ProjectDict, +): + + project_uuid = ProjectID(user_project["uuid"]) + user_id = logged_user["id"] + job_parent_resource_name = "solvers/solver123/version/1.2.3" + + # Do NOT mark the project as a job, so it should not be found + with pytest.raises(ProjectNotFoundRpcError): + await projects_rpc.get_project_marked_as_job( + rpc_client=rpc_client, + product_name=product_name, + user_id=user_id, + project_uuid=project_uuid, + job_parent_resource_name=job_parent_resource_name, + ) + + +async def test_rpc_client_get_project_marked_as_job_forbidden( + rpc_client: RabbitMQRPCClient, + product_name: ProductName, + logged_user: UserInfoDict, + other_user: UserInfoDict, + user_project: ProjectDict, +): + """ + Ensures ProjectForbiddenRpcError is raised if the user does not have read access to the project. + """ + project_uuid = ProjectID(user_project["uuid"]) + job_parent_resource_name = "solvers/solver123/version/1.2.3" + + # Mark the project as a job as the owner + await projects_rpc.mark_project_as_job( + rpc_client=rpc_client, + product_name=product_name, + user_id=logged_user["id"], + project_uuid=project_uuid, + job_parent_resource_name=job_parent_resource_name, + storage_assets_deleted=False, + ) + + # Try to get the project as another user (should not have access) + with pytest.raises(ProjectForbiddenRpcError): + await projects_rpc.get_project_marked_as_job( + rpc_client=rpc_client, + product_name=product_name, + user_id=other_user["id"], + project_uuid=project_uuid, + job_parent_resource_name=job_parent_resource_name, + ) + + +async def test_mark_and_get_project_job_storage_assets_deleted( + rpc_client: RabbitMQRPCClient, + product_name: ProductName, + logged_user: UserInfoDict, + user_project: ProjectDict, +): + """ + Marks a project as a job with storage_assets_deleted True, checks the value, + then marks it again with storage_assets_deleted False and checks the value again. + """ + project_uuid = ProjectID(user_project["uuid"]) + user_id = logged_user["id"] + job_parent_resource_name = "solvers/solver123/version/1.2.3" + + # First mark as job with storage_assets_deleted=True + await projects_rpc.mark_project_as_job( + rpc_client=rpc_client, + product_name=product_name, + user_id=user_id, + project_uuid=project_uuid, + job_parent_resource_name=job_parent_resource_name, + storage_assets_deleted=True, + ) + + # Retrieve and check + project_job = await projects_rpc.get_project_marked_as_job( + rpc_client=rpc_client, + product_name=product_name, + user_id=user_id, + project_uuid=project_uuid, + job_parent_resource_name=job_parent_resource_name, + ) + assert project_job.storage_assets_deleted is True + + # Mark again as job with storage_assets_deleted=False + await projects_rpc.mark_project_as_job( + rpc_client=rpc_client, + product_name=product_name, + user_id=user_id, + project_uuid=project_uuid, + job_parent_resource_name=job_parent_resource_name, + storage_assets_deleted=False, + ) + + # Retrieve and check again + project_job = await projects_rpc.get_project_marked_as_job( + rpc_client=rpc_client, + product_name=product_name, + user_id=user_id, + project_uuid=project_uuid, + job_parent_resource_name=job_parent_resource_name, + ) + assert project_job.storage_assets_deleted is False diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py index 68f0c0dfe6e5..cfa61a8e5df4 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py @@ -6,13 +6,14 @@ # pylint: disable=unused-variable import asyncio -import time +import contextlib +import logging from collections.abc import AsyncIterator, Awaitable, Callable, Iterator from copy import deepcopy from datetime import UTC, datetime, timedelta from decimal import Decimal from http import HTTPStatus -from typing import Any +from typing import Any, TypedDict from unittest import mock from unittest.mock import call @@ -21,6 +22,7 @@ import sqlalchemy as sa from aiohttp import ClientResponse from aiohttp.test_utils import TestClient, TestServer +from deepdiff import DeepDiff # type: ignore[attr-defined] from faker import Faker from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( @@ -30,13 +32,15 @@ from models_library.api_schemas_resource_usage_tracker.credit_transactions import ( WalletTotalCredits, ) +from models_library.api_schemas_webserver.projects import ( + ProjectShareStateOutputSchema, + ProjectStateOutputSchema, +) from models_library.api_schemas_webserver.projects_nodes import NodeGet, NodeGetIdle +from models_library.groups import GroupID from models_library.projects import ProjectID -from models_library.projects_access import Owner from models_library.projects_state import ( - ProjectLocked, ProjectRunningState, - ProjectState, ProjectStatus, RunningState, ) @@ -46,18 +50,21 @@ ServiceResourcesDictHelpers, ) from models_library.utils.fastapi_encoders import jsonable_encoder -from pydantic import PositiveInt +from pydantic import TypeAdapter from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.webserver_login import UserInfoDict, log_client_in +from pytest_simcore.helpers.logging_tools import log_context +from pytest_simcore.helpers.webserver_login import LoggedUser, log_client_in from pytest_simcore.helpers.webserver_parametrizations import ( ExpectedResponse, standard_role_response, + standard_user_role_response, ) from pytest_simcore.helpers.webserver_projects import assert_get_same_project +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE +from settings_library.rabbit import RabbitSettings from simcore_postgres_database.models.products import products from simcore_postgres_database.models.wallets import wallets from simcore_service_webserver._meta import API_VTAG @@ -66,32 +73,38 @@ from simcore_service_webserver.socketio.messages import SOCKET_IO_PROJECT_UPDATED_EVENT from simcore_service_webserver.utils import to_datetime from socketio.exceptions import ConnectionError as SocketConnectionError +from tenacity import ( + RetryError, + before_sleep_log, + retry, + retry_if_exception_type, + retry_unless_exception_type, + stop_after_delay, + wait_fixed, +) -RESOURCE_NAME = "projects" -API_PREFIX = f"/{API_VTAG}" +pytest_simcore_core_services_selection = [ + "rabbit", +] -@pytest.fixture -def app_environment( - app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch -) -> EnvVarsDict: - # disable the garbage collector - monkeypatch.setenv("WEBSERVER_GARBAGE_COLLECTOR", "null") - monkeypatch.setenv("WEBSERVER_DEV_FEATURES_ENABLED", "1") - return app_environment | { - "WEBSERVER_GARBAGE_COLLECTOR": "null", - "WEBSERVER_DEV_FEATURES_ENABLED": "1", - } +RESOURCE_NAME = "projects" +API_PREFIX = f"/{API_VTAG}" def assert_replaced(current_project, update_data): def _extract(dikt, keys): return {k: dikt[k] for k in keys} - modified = [ + skip = [ "lastChangeDate", + "templateType", + "trashedAt", + "trashedBy", + "workspaceId", + "folderId", ] - keep = [k for k in update_data if k not in modified] + keep = [k for k in update_data if k not in skip] assert _extract(current_project, keep) == _extract(update_data, keep) @@ -101,7 +114,7 @@ def _extract(dikt, keys): async def _list_projects( client: TestClient, - expected: HTTPStatus, + expected: int, query_parameters: dict | None = None, ) -> list[ProjectDict]: assert client.app @@ -114,11 +127,12 @@ async def _list_projects( resp = await client.get(f"{url}") data, _ = await assert_status(resp, expected) + assert isinstance(data, list) return data async def _replace_project( - client: TestClient, project_update: ProjectDict, expected: HTTPStatus + client: TestClient, project_update: ProjectDict, expected: int ) -> ProjectDict: assert client.app @@ -137,30 +151,44 @@ async def _replace_project( return data -async def _connect_websocket( - socketio_client_factory: Callable, - check_connection: bool, - client: TestClient, - client_id: str, - events: dict[str, Callable] | None = None, -) -> socketio.AsyncClient | None: - try: - sio = await socketio_client_factory(client_id, client) +class _SocketHandlers(TypedDict): + SOCKET_IO_PROJECT_UPDATED_EVENT: mock.Mock + + +@pytest.fixture +async def create_socketio_connection_with_handlers( + create_socketio_connection: Callable[ + [str | None, TestClient | None], Awaitable[tuple[socketio.AsyncClient, str]] + ], + mocker: MockerFixture, +) -> Callable[ + [str | None, TestClient], + Awaitable[tuple[socketio.AsyncClient, str, _SocketHandlers]], +]: + async def _( + client_session_id: str | None, client: TestClient + ) -> tuple[socketio.AsyncClient, str, _SocketHandlers]: + sio, received_client_id = await create_socketio_connection( + client_session_id, client + ) assert sio.sid - if events: - for event, handler in events.items(): - sio.on(event, handler=handler) - return sio - except SocketConnectionError: - if check_connection: - pytest.fail("socket io connection should not fail") + + event_handlers = _SocketHandlers( + **{SOCKET_IO_PROJECT_UPDATED_EVENT: mocker.Mock()} + ) + + for event, handler in event_handlers.items(): + sio.on(event, handler=handler) + return sio, received_client_id, event_handlers + + return _ async def _open_project( client: TestClient, client_id: str, project: ProjectDict, - expected: HTTPStatus | list[HTTPStatus], + expected: int | list[int], ) -> tuple[dict, dict]: assert client.app @@ -186,7 +214,7 @@ async def _open_project( async def _close_project( - client: TestClient, client_id: str, project: dict, expected: HTTPStatus + client: TestClient, client_id: str, project: dict, expected: int ): assert client.app @@ -198,8 +226,8 @@ async def _close_project( async def _state_project( client: TestClient, project: dict, - expected: HTTPStatus, - expected_project_state: ProjectState, + expected: int, + expected_project_state: ProjectStateOutputSchema, ): assert client.app @@ -207,44 +235,53 @@ async def _state_project( resp = await client.get(f"{url}") data, error = await assert_status(resp, expected) if not error: - # the project is locked - received_state = ProjectState(**data) + received_state = ProjectStateOutputSchema(**data) assert received_state == expected_project_state async def _assert_project_state_updated( handler: mock.Mock, shared_project: dict, - expected_project_state_updates: list[ProjectState], + expected_project_state_updates: list[ProjectStateOutputSchema], ) -> None: - if not expected_project_state_updates: - handler.assert_not_called() - else: - # wait for the calls - now = time.monotonic() - MAX_WAITING_TIME = 15 - while time.monotonic() - now < MAX_WAITING_TIME: - await asyncio.sleep(1) - if handler.call_count == len(expected_project_state_updates): - break - if time.monotonic() - now > MAX_WAITING_TIME: - pytest.fail( - f"waited more than {MAX_WAITING_TIME}s and got only {handler.call_count}/{len(expected_project_state_updates)} calls" - ) + with log_context(logging.INFO, "assert_project_state_updated") as ctx: + + @retry( + wait=wait_fixed(1), + stop=stop_after_delay(15), + retry=retry_if_exception_type(AssertionError), + reraise=True, + before_sleep=before_sleep_log(ctx.logger, logging.INFO), + ) + async def _received_project_update_event() -> None: + assert handler.call_count == len( + expected_project_state_updates + ), f"received {handler.call_count}:{handler.call_args_list} of {len(expected_project_state_updates)} expected calls" + if expected_project_state_updates: + calls = [ + call( + jsonable_encoder( + { + "project_uuid": shared_project["uuid"], + "data": p_state.model_dump( + by_alias=True, exclude_unset=True + ), + } + ) + ) + for p_state in expected_project_state_updates + ] + handler.assert_has_calls(calls) + handler.reset_mock() - calls = [ - call( - jsonable_encoder( - { - "project_uuid": shared_project["uuid"], - "data": p_state.model_dump(by_alias=True, exclude_unset=True), - } - ) - ) - for p_state in expected_project_state_updates - ] - handler.assert_has_calls(calls) - handler.reset_mock() + if not expected_project_state_updates: + with contextlib.suppress(RetryError): + await _received_project_update_event.retry_with( + stop=stop_after_delay(3), + retry=retry_unless_exception_type(AssertionError), + )() + else: + await _received_project_update_event() async def _delete_project(client: TestClient, project: dict) -> ClientResponse: @@ -256,6 +293,83 @@ async def _delete_project(client: TestClient, project: dict) -> ClientResponse: @pytest.mark.parametrize(*standard_role_response()) +async def test_share_project_user_roles( + rabbit_service: RabbitSettings, + mock_dynamic_scheduler: None, + client: TestClient, + logged_user: dict, + primary_group: dict[str, str], + standard_groups: list[dict[str, str]], + all_group: dict[str, str], + user_role: UserRole, + expected: ExpectedResponse, + storage_subsystem_mock, + mocked_dynamic_services_interface: dict[str, mock.Mock], + project_db_cleaner, + request_create_project: Callable[..., Awaitable[ProjectDict]], + exit_stack: contextlib.AsyncExitStack, +): + # Use-case: test how different user roles can access shared projects + # Test with full access rights for all roles + share_rights = {"read": True, "write": True, "delete": True} + + # create a project with full access rights for the all_group + new_project = await request_create_project( + client, + expected.accepted, + expected.created, + logged_user, + primary_group, + project={"accessRights": {str(all_group["gid"]): share_rights}}, + ) + if new_project: + assert new_project["accessRights"] == { + f"{primary_group['gid']}": {"read": True, "write": True, "delete": True}, + f"{(all_group['gid'])}": share_rights, + } + + # user 1 can always get to his project + await assert_get_same_project(client, new_project, expected.ok) + + # get another user logged in now + await log_client_in( + client, + {"role": user_role.name}, + enable_check=user_role != UserRole.ANONYMOUS, + exit_stack=exit_stack, + ) + if new_project: + # user 2 can get the project if they have proper role permissions + await assert_get_same_project( + client, + new_project, + expected.ok, + ) + + # user 2 can list projects if they have proper role permissions + list_projects = await _list_projects(client, expected.ok) + expected_project_count = 1 if user_role != UserRole.ANONYMOUS else 0 + assert len(list_projects) == expected_project_count + + # user 2 can update the project if they have proper role permissions + project_update = deepcopy(new_project) + project_update["name"] = "my super name" + project_update.pop("accessRights") + await _replace_project( + client, + project_update, + expected.no_content, + ) + + # user 2 can delete projects if they have proper role permissions + resp = await _delete_project(client, new_project) + await assert_status( + resp, + expected_status_code=expected.no_content, + ) + + +@pytest.mark.parametrize(*standard_user_role_response()) @pytest.mark.parametrize( "share_rights", [ @@ -264,9 +378,9 @@ async def _delete_project(client: TestClient, project: dict) -> ClientResponse: {"read": True, "write": False, "delete": False}, {"read": False, "write": False, "delete": False}, ], - ids=str, + ids=["full_access", "no_delete", "read_only", "no_access"], ) -async def test_share_project( +async def test_share_project_access_rights( mock_dynamic_scheduler: None, client: TestClient, logged_user: dict, @@ -277,14 +391,15 @@ async def test_share_project( expected: ExpectedResponse, storage_subsystem_mock, mocked_dynamic_services_interface: dict[str, mock.Mock], - catalog_subsystem_mock: Callable[[list[ProjectDict]], None], share_rights: dict, project_db_cleaner, request_create_project: Callable[..., Awaitable[ProjectDict]], + exit_stack: contextlib.AsyncExitStack, ): - # Use-case: the user shares some projects with a group + # Use-case: test how different access rights affect project sharing + # Test with USER role only but different access rights - # create a few projects + # create a project with specific access rights new_project = await request_create_project( client, expected.accepted, @@ -304,7 +419,10 @@ async def test_share_project( # get another user logged in now await log_client_in( - client, {"role": user_role.name}, enable_check=user_role != UserRole.ANONYMOUS + client, + {"role": user_role.name}, + enable_check=user_role != UserRole.ANONYMOUS, + exit_stack=exit_stack, ) if new_project: # user 2 can get the project if user 2 has read access @@ -351,7 +469,10 @@ async def test_open_project( client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, - client_session_id_factory: Callable[[], str], + create_socketio_connection_with_handlers: Callable[ + [str | None, TestClient], + Awaitable[tuple[socketio.AsyncClient, str, _SocketHandlers]], + ], expected: HTTPStatus, save_state: bool, mocked_dynamic_services_interface: dict[str, mock.Mock], @@ -365,8 +486,14 @@ async def test_open_project( # POST /v0/projects/{project_id}:open # open project assert client.app + + # Only create socketio connection for non-anonymous users + client_id = None + if expected != status.HTTP_401_UNAUTHORIZED: + _, client_id, _ = await create_socketio_connection_with_handlers(None, client) + url = client.app.router["open_project"].url_for(project_id=user_project["uuid"]) - resp = await client.post(f"{url}", json=client_session_id_factory()) + resp = await client.post(f"{url}", json=client_id) await assert_status(resp, expected) @@ -431,10 +558,14 @@ def wallets_clean_db(postgres_db: sa.engine.Engine) -> Iterator[None]: ], ) async def test_open_project__in_debt( + with_dev_features_enabled: None, client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, - client_session_id_factory: Callable[[], str], + create_socketio_connection_with_handlers: Callable[ + [str | None, TestClient], + Awaitable[tuple[socketio.AsyncClient, str, _SocketHandlers]], + ], expected: HTTPStatus, mocked_dynamic_services_interface: dict[str, mock.Mock], mock_service_resources: ServiceResourcesDict, @@ -472,8 +603,11 @@ async def test_open_project__in_debt( # POST /v0/projects/{project_id}:open assert client.app + + _, client_id, _ = await create_socketio_connection_with_handlers(None, client) + url = client.app.router["open_project"].url_for(project_id=user_project["uuid"]) - resp = await client.post(f"{url}", json=client_session_id_factory()) + resp = await client.post(f"{url}", json=client_id) await assert_status(resp, expected) assert mock_get_project_wallet_total_credits.assert_called_once @@ -492,7 +626,10 @@ async def test_open_template_project_for_edition( client: TestClient, logged_user: UserInfoDict, create_template_project: Callable[..., Awaitable[ProjectDict]], - client_session_id_factory: Callable[[], str], + create_socketio_connection_with_handlers: Callable[ + [str | None, TestClient], + Awaitable[tuple[socketio.AsyncClient, str, _SocketHandlers]], + ], expected: HTTPStatus, save_state: bool, mocked_dynamic_services_interface: dict[str, mock.Mock], @@ -512,8 +649,13 @@ async def test_open_template_project_for_edition( logged_user["primary_gid"]: {"read": True, "write": True, "delete": False} } ) + + # Only create socketio connection for non-anonymous users + client_id = None + if expected != status.HTTP_401_UNAUTHORIZED: + _, client_id, _ = await create_socketio_connection_with_handlers(None, client) url = client.app.router["open_project"].url_for(project_id=template_project["uuid"]) - resp = await client.post(f"{url}", json=client_session_id_factory()) + resp = await client.post(f"{url}", json=client_id) await assert_status(resp, expected) if resp.status == status.HTTP_200_OK: @@ -563,8 +705,6 @@ async def test_open_template_project_for_edition( @pytest.mark.parametrize( "user_role,expected", [ - (UserRole.ANONYMOUS, status.HTTP_401_UNAUTHORIZED), - (UserRole.GUEST, status.HTTP_403_FORBIDDEN), (UserRole.USER, status.HTTP_403_FORBIDDEN), (UserRole.TESTER, status.HTTP_403_FORBIDDEN), ], @@ -573,7 +713,10 @@ async def test_open_template_project_for_edition_with_missing_write_rights( client: TestClient, logged_user: UserInfoDict, create_template_project: Callable[..., Awaitable[ProjectDict]], - client_session_id_factory: Callable[[], str], + create_socketio_connection_with_handlers: Callable[ + [str | None, TestClient], + Awaitable[tuple[socketio.AsyncClient, str, _SocketHandlers]], + ], expected: HTTPStatus, mocked_dynamic_services_interface: dict[str, mock.Mock], mock_service_resources: ServiceResourcesDict, @@ -589,23 +732,25 @@ async def test_open_template_project_for_edition_with_missing_write_rights( logged_user["primary_gid"]: {"read": True, "write": False, "delete": True} } ) + + # Only create socketio connection for non-anonymous users + client_id = None + if expected != status.HTTP_401_UNAUTHORIZED: + _, client_id, _ = await create_socketio_connection_with_handlers(None, client) url = client.app.router["open_project"].url_for(project_id=template_project["uuid"]) - resp = await client.post(f"{url}", json=client_session_id_factory()) + resp = await client.post(f"{url}", json=client_id) await assert_status(resp, expected) -def standard_user_role() -> tuple[str, tuple]: - all_roles = standard_role_response() - - return (all_roles[0], (pytest.param(*all_roles[1][2], id="standard user role"),)) - - -@pytest.mark.parametrize(*standard_user_role()) +@pytest.mark.parametrize(*standard_user_role_response()) async def test_open_project_with_small_amount_of_dynamic_services_starts_them_automatically( client: TestClient, logged_user: UserInfoDict, user_project_with_num_dynamic_services: Callable[[int], Awaitable[ProjectDict]], - client_session_id_factory: Callable, + create_socketio_connection_with_handlers: Callable[ + [str | None, TestClient], + Awaitable[tuple[socketio.AsyncClient, str, _SocketHandlers]], + ], expected: ExpectedResponse, mocked_dynamic_services_interface: dict[str, mock.Mock], mock_catalog_api: dict[str, mock.Mock], @@ -631,8 +776,12 @@ async def test_open_project_with_small_amount_of_dynamic_services_starts_them_au for service_id in range(num_service_already_running) ] + # Only create socketio connection for non-anonymous users + client_id = "" + if expected.ok: + _, client_id, _ = await create_socketio_connection_with_handlers(None, client) url = client.app.router["open_project"].url_for(project_id=project["uuid"]) - resp = await client.post(f"{url}", json=client_session_id_factory()) + resp = await client.post(f"{url}", json=client_id) await assert_status(resp, expected.ok) mocked_notifications_plugin["subscribe"].assert_called_once_with( client.app, ProjectID(project["uuid"]) @@ -646,26 +795,27 @@ async def test_open_project_with_small_amount_of_dynamic_services_starts_them_au ].reset_mock() -@pytest.mark.parametrize(*standard_user_role()) +@pytest.mark.parametrize(*standard_user_role_response()) async def test_open_project_with_disable_service_auto_start_set_overrides_behavior( client: TestClient, logged_user: UserInfoDict, user_project_with_num_dynamic_services: Callable[[int], Awaitable[ProjectDict]], - client_session_id_factory: Callable, + create_socketio_connection_with_handlers: Callable[ + [str | None, TestClient], + Awaitable[tuple[socketio.AsyncClient, str, _SocketHandlers]], + ], expected: ExpectedResponse, mocked_dynamic_services_interface: dict[str, mock.Mock], mock_catalog_api: dict[str, mock.Mock], max_amount_of_auto_started_dyn_services: int, - faker: Faker, mocked_notifications_plugin: dict[str, mock.Mock], ): assert client.app - num_of_dyn_services = max_amount_of_auto_started_dyn_services or faker.pyint( - min_value=3, max_value=250 + project = await user_project_with_num_dynamic_services( + max_amount_of_auto_started_dyn_services ) - project = await user_project_with_num_dynamic_services(num_of_dyn_services) all_service_uuids = list(project["workbench"]) - for num_service_already_running in range(num_of_dyn_services): + for num_service_already_running in range(max_amount_of_auto_started_dyn_services): mocked_dynamic_services_interface[ "dynamic_scheduler.api.list_dynamic_services" ].return_value = [ @@ -673,14 +823,22 @@ async def test_open_project_with_disable_service_auto_start_set_overrides_behavi for service_id in range(num_service_already_running) ] + # Only create socketio connection for non-anonymous users + client_id = "" + if expected.ok: + sio, client_id, *_ = await create_socketio_connection_with_handlers( + None, client + ) url = ( client.app.router["open_project"] .url_for(project_id=project["uuid"]) .with_query(disable_service_auto_start=f"{True}") ) - resp = await client.post(f"{url}", json=client_session_id_factory()) + resp = await client.post(f"{url}", json=client_id) await assert_status(resp, expected.ok) + if expected.ok: + await sio.disconnect() mocked_notifications_plugin["subscribe"].assert_called_once_with( client.app, ProjectID(project["uuid"]) ) @@ -690,12 +848,15 @@ async def test_open_project_with_disable_service_auto_start_set_overrides_behavi ].assert_not_called() -@pytest.mark.parametrize(*standard_user_role()) +@pytest.mark.parametrize(*standard_user_role_response()) async def test_open_project_with_large_amount_of_dynamic_services_does_not_start_them_automatically( client: TestClient, logged_user: UserInfoDict, user_project_with_num_dynamic_services: Callable[[int], Awaitable[ProjectDict]], - client_session_id_factory: Callable, + create_socketio_connection_with_handlers: Callable[ + [str | None, TestClient], + Awaitable[tuple[socketio.AsyncClient, str, _SocketHandlers]], + ], expected: ExpectedResponse, mocked_dynamic_services_interface: dict[str, mock.Mock], mock_catalog_api: dict[str, mock.Mock], @@ -723,8 +884,12 @@ async def test_open_project_with_large_amount_of_dynamic_services_does_not_start for service_id in range(num_service_already_running) ] + # Only create socketio connection for non-anonymous users + client_id = "" + if expected.ok: + _, client_id, _ = await create_socketio_connection_with_handlers(None, client) url = client.app.router["open_project"].url_for(project_id=project["uuid"]) - resp = await client.post(f"{url}", json=client_session_id_factory()) + resp = await client.post(f"{url}", json=client_id) await assert_status(resp, expected.ok) mocked_notifications_plugin["subscribe"].assert_called_once_with( client.app, ProjectID(project["uuid"]) @@ -735,14 +900,17 @@ async def test_open_project_with_large_amount_of_dynamic_services_does_not_start ].assert_not_called() -@pytest.mark.parametrize(*standard_user_role()) +@pytest.mark.parametrize(*standard_user_role_response()) async def test_open_project_with_large_amount_of_dynamic_services_starts_them_if_setting_disabled( mock_get_total_project_dynamic_nodes_creation_interval: None, disable_max_number_of_running_dynamic_nodes: dict[str, str], client: TestClient, logged_user: UserInfoDict, user_project_with_num_dynamic_services: Callable[[int], Awaitable[ProjectDict]], - client_session_id_factory: Callable, + create_socketio_connection_with_handlers: Callable[ + [str | None, TestClient], + Awaitable[tuple[socketio.AsyncClient, str, _SocketHandlers]], + ], expected: ExpectedResponse, mocked_dynamic_services_interface: dict[str, mock.Mock], mock_catalog_api: dict[str, mock.Mock], @@ -773,8 +941,12 @@ async def test_open_project_with_large_amount_of_dynamic_services_starts_them_if for service_id in range(num_service_already_running) ] + # Only create socketio connection for non-anonymous users + client_id = "" + if expected.ok: + _, client_id, _ = await create_socketio_connection_with_handlers(None, client) url = client.app.router["open_project"].url_for(project_id=project["uuid"]) - resp = await client.post(f"{url}", json=client_session_id_factory()) + resp = await client.post(f"{url}", json=client_id) await assert_status(resp, expected.ok) mocked_notifications_plugin["subscribe"].assert_called_once_with( client.app, ProjectID(project["uuid"]) @@ -785,12 +957,15 @@ async def test_open_project_with_large_amount_of_dynamic_services_starts_them_if ].assert_called() -@pytest.mark.parametrize(*standard_user_role()) +@pytest.mark.parametrize(*standard_user_role_response()) async def test_open_project_with_deprecated_services_ok_but_does_not_start_dynamic_services( client: TestClient, logged_user, user_project, - client_session_id_factory: Callable, + create_socketio_connection_with_handlers: Callable[ + [str | None, TestClient], + Awaitable[tuple[socketio.AsyncClient, str, _SocketHandlers]], + ], expected: ExpectedResponse, mocked_dynamic_services_interface: dict[str, mock.Mock], mock_service_resources: ServiceResourcesDict, @@ -801,8 +976,12 @@ async def test_open_project_with_deprecated_services_ok_but_does_not_start_dynam mock_catalog_api["get_service"].return_value["deprecated"] = ( datetime.now(UTC) - timedelta(days=1) ).isoformat() + # Only create socketio connection for non-anonymous users + client_id = "" + if expected.ok: + _, client_id, _ = await create_socketio_connection_with_handlers(None, client) url = client.app.router["open_project"].url_for(project_id=user_project["uuid"]) - resp = await client.post(url, json=client_session_id_factory()) + resp = await client.post(url, json=client_id) await assert_status(resp, expected.ok) mocked_notifications_plugin["subscribe"].assert_called_once_with( client.app, ProjectID(user_project["uuid"]) @@ -837,12 +1016,15 @@ def one_max_open_studies_per_user( ) -@pytest.mark.parametrize(*standard_role_response()) +@pytest.mark.parametrize(*standard_user_role_response()) async def test_open_project_more_than_limitation_of_max_studies_open_per_user( one_max_open_studies_per_user: None, client: TestClient, logged_user, - client_session_id_factory: Callable, + create_socketio_connection_with_handlers: Callable[ + [str | None, TestClient], + Awaitable[tuple[socketio.AsyncClient, str, _SocketHandlers]], + ], user_project: ProjectDict, shared_project: ProjectDict, expected: ExpectedResponse, @@ -851,20 +1033,30 @@ async def test_open_project_more_than_limitation_of_max_studies_open_per_user( user_role: UserRole, mocked_notifications_plugin: dict[str, mock.Mock], ): - client_id_1 = client_session_id_factory() + # Only create socketio connection for non-anonymous users + client_id_1 = "" + if user_role != UserRole.ANONYMOUS: + _, client_id_1, _ = await create_socketio_connection_with_handlers(None, client) await _open_project( client, client_id_1, user_project, - expected.ok if user_role != UserRole.GUEST else status.HTTP_200_OK, + HTTPStatus(expected.ok) if user_role != UserRole.GUEST else HTTPStatus.OK, ) - client_id_2 = client_session_id_factory() + # Only create socketio connection for non-anonymous users + client_id_2 = "" + if user_role != UserRole.ANONYMOUS: + _, client_id_2, _ = await create_socketio_connection_with_handlers(None, client) await _open_project( client, client_id_2, shared_project, - expected.conflict if user_role != UserRole.GUEST else status.HTTP_409_CONFLICT, + ( + HTTPStatus(expected.conflict) + if user_role != UserRole.GUEST + else HTTPStatus.CONFLICT + ), ) @@ -873,7 +1065,7 @@ async def test_close_project( client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, - client_session_id_factory: Callable, + client_session_id_factory: Callable[[], str], expected, mocked_dynamic_services_interface: dict[str, mock.Mock], mock_catalog_api: dict[str, mock.Mock], @@ -965,26 +1157,28 @@ async def test_close_project( ], ) async def test_get_active_project( + with_disabled_rtc_collaboration: None, client: TestClient, - logged_user, - user_project, - client_session_id_factory: Callable, - expected, - socketio_client_factory: Callable, + logged_user: UserInfoDict, + user_project: ProjectDict, + expected: int, + create_socketio_connection: Callable[ + [str | None, TestClient | None], Awaitable[tuple[socketio.AsyncClient, str]] + ], mocked_dynamic_services_interface: dict[str, mock.Mock], mock_catalog_api: dict[str, mock.Mock], mocked_notifications_plugin: dict[str, mock.Mock], ): # login with socket using client session id - client_id1 = client_session_id_factory() - sio = None + client_id1 = "" try: - sio = await socketio_client_factory(client_id1) + sio, client_id1 = await create_socketio_connection(None, client) assert sio.sid except SocketConnectionError: if expected == status.HTTP_200_OK: pytest.fail("socket io connection should not fail") assert client.app + # get active projects -> empty get_active_projects_url = ( client.app.router["get_active_project"] @@ -1011,8 +1205,8 @@ async def test_get_active_project( client.app, ProjectID(user_project["uuid"]) ) assert not error - assert ProjectState(**data.pop("state")).locked.value - data.pop("folderId") + assert ProjectStateOutputSchema(**data.pop("state")).share_state.locked + data.pop("folderId", None) user_project_last_change_date = user_project.pop("lastChangeDate") data_last_change_date = data.pop("lastChangeDate") @@ -1023,9 +1217,9 @@ async def test_get_active_project( mocked_notifications_plugin["subscribe"].assert_not_called() # login with socket using client session id2 - client_id2 = client_session_id_factory() + client_id2 = "" try: - sio = await socketio_client_factory(client_id2) + sio, client_id2 = await create_socketio_connection(None, client) assert sio.sid except SocketConnectionError: if expected == status.HTTP_200_OK: @@ -1139,6 +1333,8 @@ async def test_project_node_lifetime( # noqa: PLR0915 ) node_sample = deepcopy(NodeGet.model_config["json_schema_extra"]["examples"][1]) + assert node_sample + assert isinstance(node_sample, dict) mocked_dynamic_services_interface[ "dynamic_scheduler.api.get_dynamic_service" ].return_value = NodeGet.model_validate( @@ -1256,43 +1452,390 @@ def clean_redis_table(redis_client) -> None: """this just ensures the redis table is cleaned up between test runs""" -@pytest.mark.parametrize(*standard_role_response()) -async def test_open_shared_project_2_users_locked( +@pytest.mark.acceptance_test +@pytest.mark.parametrize(*standard_user_role_response()) +async def test_open_shared_project_multiple_users( + max_number_of_user_sessions: int, + with_enabled_rtc_collaboration: None, + client: TestClient, + client_on_running_server_factory: Callable[[], TestClient], + logged_user: dict, + shared_project: dict, + expected: ExpectedResponse, + exit_stack: contextlib.AsyncExitStack, + create_socketio_connection_with_handlers: Callable[ + [str | None, TestClient], + Awaitable[tuple[socketio.AsyncClient, str, _SocketHandlers]], + ], + mocked_dynamic_services_interface: dict[str, mock.Mock], + mock_catalog_api: dict[str, mock.Mock], +): + base_client = client + ( + sio_base, + base_client_tab_id, + sio_base_handlers, + ) = await create_socketio_connection_with_handlers(None, base_client) + + # current state is closed and unlocked + closed_project_state = ProjectStateOutputSchema( + share_state=ProjectShareStateOutputSchema( + locked=False, status=ProjectStatus.CLOSED, current_user_groupids=[] + ), + state=ProjectRunningState(value=RunningState.NOT_STARTED), + ) + await _state_project(base_client, shared_project, expected.ok, closed_project_state) + + # now user 1 opens the shared project + await _open_project(base_client, base_client_tab_id, shared_project, expected.ok) + opened_project_state = closed_project_state.model_copy( + update={ + "share_state": ProjectShareStateOutputSchema( + locked=False, + status=ProjectStatus.OPENED, + current_user_groupids=[logged_user["primary_gid"]], + ), + } + ) + await _assert_project_state_updated( + sio_base_handlers[SOCKET_IO_PROJECT_UPDATED_EVENT], + shared_project, + [opened_project_state] * 2, + ) + await _state_project(base_client, shared_project, expected.ok, opened_project_state) + + # now we create more users and open the same project until we reach the maximum number of user sessions + other_users: list[ + tuple[UserInfoDict, TestClient, str, socketio.AsyncClient, _SocketHandlers] + ] = [] + for user_session in range(1, max_number_of_user_sessions): + client_i = client_on_running_server_factory() + + # user i logs in + user_i = await exit_stack.enter_async_context( + LoggedUser(client_i, {"role": logged_user["role"]}) + ) + + ( + sio_i, + client_i_tab_id, + sio_i_handlers, + ) = await create_socketio_connection_with_handlers(None, client_i) + assert sio_i + + # user i opens the shared project + await _open_project(client_i, client_i_tab_id, shared_project, expected.ok) + opened_project_state = opened_project_state.model_copy( + update={ + "share_state": ProjectShareStateOutputSchema( + locked=(not user_session < max_number_of_user_sessions - 1), + status=ProjectStatus.OPENED, + current_user_groupids=[ + *opened_project_state.share_state.current_user_groupids, + TypeAdapter(GroupID).validate_python(user_i["primary_gid"]), + ], + ), + } + ) + await _assert_project_state_updated( + sio_i_handlers[SOCKET_IO_PROJECT_UPDATED_EVENT], + shared_project, + [opened_project_state] + * 1, # NOTE: only one call per user since they are part of the everyone group + ) + for _user_j, client_j, _, _sio_j, sio_j_handlers in other_users: + # check already opened by other users which should also notify + await _assert_project_state_updated( + sio_j_handlers[SOCKET_IO_PROJECT_UPDATED_EVENT], + shared_project, + [opened_project_state], + ) + await _state_project( + client_j, shared_project, expected.ok, opened_project_state + ) + + await _assert_project_state_updated( + sio_base_handlers[SOCKET_IO_PROJECT_UPDATED_EVENT], + shared_project, + [opened_project_state] + * 2, # NOTE: 2 calls since base user is part of the primary group and the all group + ) + await _state_project( + client_i, shared_project, expected.ok, opened_project_state + ) + await _state_project( + base_client, shared_project, expected.ok, opened_project_state + ) + other_users.append((user_i, client_i, client_i_tab_id, sio_i, sio_i_handlers)) + + # + # TEST more user sessions cannot be opened: create an additional user, opening the project again shall raise + client_n = client_on_running_server_factory() + + user_n = await exit_stack.enter_async_context( + LoggedUser(client_n, {"role": logged_user["role"]}) + ) + assert user_n + ( + sio_n, + client_n_tab_id, + sio_n_handlers, + ) = await create_socketio_connection_with_handlers(None, client_n) + assert sio_n + assert sio_n_handlers + + # user i opens the shared project --> no events since it's blocked + await _open_project(client_n, client_n_tab_id, shared_project, expected.conflict) + await _assert_project_state_updated( + sio_n_handlers[SOCKET_IO_PROJECT_UPDATED_EVENT], shared_project, [] + ) + + # close project from base user shall trigger an event for all the other users + await _close_project( + base_client, base_client_tab_id, shared_project, expected.no_content + ) + opened_project_state = opened_project_state.model_copy( + update={ + "share_state": ProjectShareStateOutputSchema( + locked=False, + status=ProjectStatus.OPENED, + current_user_groupids=[ + gid + for gid in opened_project_state.share_state.current_user_groupids + if gid + != TypeAdapter(GroupID).validate_python(logged_user["primary_gid"]) + ], + ), + } + ) + await _assert_project_state_updated( + sio_base_handlers[SOCKET_IO_PROJECT_UPDATED_EVENT], + shared_project, + [opened_project_state] * 2, + ) + # check all the other users + for _user_i, client_i, _, _sio_i, sio_i_handlers in other_users: + await _assert_project_state_updated( + sio_i_handlers[SOCKET_IO_PROJECT_UPDATED_EVENT], + shared_project, + [opened_project_state], + ) + await _state_project( + client_i, shared_project, expected.ok, opened_project_state + ) + + +@pytest.mark.parametrize(*standard_user_role_response()) +async def test_refreshing_tab_of_opened_project_multiple_users( + with_enabled_rtc_collaboration_limited_to_1_user: None, + client: TestClient, + client_on_running_server_factory: Callable[[], TestClient], + logged_user: dict, + shared_project: dict, + expected: ExpectedResponse, + create_socketio_connection_with_handlers: Callable[ + [str | None, TestClient], + Awaitable[tuple[socketio.AsyncClient, str, _SocketHandlers]], + ], + mocked_dynamic_services_interface: dict[str, mock.Mock], + mock_catalog_api: dict[str, mock.Mock], +): + # This test is a simplified version of the test_open_shared_project_multiple_users + # It only tests refreshing the tab of an already opened project + ( + original_socketio, + client_tab_id, + original_socket_handlers, + ) = await create_socketio_connection_with_handlers(None, client) + + # current state is closed and unlocked + closed_project_state = ProjectStateOutputSchema( + share_state=ProjectShareStateOutputSchema( + locked=False, status=ProjectStatus.CLOSED, current_user_groupids=[] + ), + state=ProjectRunningState(value=RunningState.NOT_STARTED), + ) + await _state_project(client, shared_project, expected.ok, closed_project_state) + + # now user opens the project + await _open_project(client, client_tab_id, shared_project, expected.ok) + opened_project_state = closed_project_state.model_copy( + update={ + "share_state": ProjectShareStateOutputSchema( + locked=True, + status=ProjectStatus.OPENED, + current_user_groupids=[logged_user["primary_gid"]], + ), + } + ) + await _assert_project_state_updated( + original_socket_handlers[SOCKET_IO_PROJECT_UPDATED_EVENT], + shared_project, + [opened_project_state] + * 2, # NOTE: 2 calls since base user is part of the primary group and the all group + ) + await _state_project(client, shared_project, expected.ok, opened_project_state) + # opening a second time should also work as this is a no-op + await _open_project(client, client_tab_id, shared_project, expected.ok) + await _assert_project_state_updated( + original_socket_handlers[SOCKET_IO_PROJECT_UPDATED_EVENT], + shared_project, + [opened_project_state] + * 2, # NOTE: 2 calls since base user is part of the primary group and the all group + ) + await _state_project(client, shared_project, expected.ok, opened_project_state) + + # now we simulate refreshing the tab of the base user (the client session id remains the same), by disconnecting and reconnecting the socket.io + await original_socketio.disconnect() + await _assert_project_state_updated( + original_socket_handlers[SOCKET_IO_PROJECT_UPDATED_EVENT], + shared_project, + [], + ) + + # now we connect again the socket.io with a new socket + ( + new_socketio, + new_client_tab_id, + new_socket_handlers, + ) = await create_socketio_connection_with_handlers(client_tab_id, client) + assert new_socketio + assert new_client_tab_id == client_tab_id, "refreshing changed the tab id!" + + await _open_project(client, client_tab_id, shared_project, expected.ok) + + await _assert_project_state_updated( + new_socket_handlers[SOCKET_IO_PROJECT_UPDATED_EVENT], + shared_project, + [opened_project_state] + * 2, # NOTE: 2 calls since base user is part of the primary group and the all group + ) + # check old socket is silent + await _assert_project_state_updated( + original_socket_handlers[SOCKET_IO_PROJECT_UPDATED_EVENT], + shared_project, + [], + ) + + +@pytest.mark.parametrize(*standard_user_role_response()) +async def test_closing_and_reopening_tab_of_opened_project_multiple_users( + with_enabled_rtc_collaboration_limited_to_1_user: None, + client: TestClient, + client_on_running_server_factory: Callable[[], TestClient], + logged_user: dict, + shared_project: dict, + expected: ExpectedResponse, + exit_stack: contextlib.AsyncExitStack, + create_socketio_connection_with_handlers: Callable[ + [str | None, TestClient], + Awaitable[tuple[socketio.AsyncClient, str, _SocketHandlers]], + ], + mocked_dynamic_services_interface: dict[str, mock.Mock], + mock_catalog_api: dict[str, mock.Mock], +): + # This test is a simplified version of the test_open_shared_project_multiple_users + # It only tests closing and reopening the tab of an already opened project which should still open + ( + original_socketio, + original_client_tab_id, + original_socket_handlers, + ) = await create_socketio_connection_with_handlers(None, client) + + # current state is closed and unlocked + closed_project_state = ProjectStateOutputSchema( + share_state=ProjectShareStateOutputSchema( + locked=False, status=ProjectStatus.CLOSED, current_user_groupids=[] + ), + state=ProjectRunningState(value=RunningState.NOT_STARTED), + ) + await _state_project(client, shared_project, expected.ok, closed_project_state) + + # now user opens the project + await _open_project(client, original_client_tab_id, shared_project, expected.ok) + opened_project_state = closed_project_state.model_copy( + update={ + "share_state": ProjectShareStateOutputSchema( + locked=True, + status=ProjectStatus.OPENED, + current_user_groupids=[logged_user["primary_gid"]], + ), + } + ) + await _assert_project_state_updated( + original_socket_handlers[SOCKET_IO_PROJECT_UPDATED_EVENT], + shared_project, + [opened_project_state] + * 2, # NOTE: 2 calls since base user is part of the primary group and the all group + ) + await _state_project(client, shared_project, expected.ok, opened_project_state) + + # now we simulate refreshing the tab of the base user (the client session id DOES NOT remain the same) + await original_socketio.disconnect() + await asyncio.sleep(5) # wait for the disconnect to be processed + ( + new_socketio, + new_client_session_id, + new_socketio_handlers, + ) = await create_socketio_connection_with_handlers(None, client) + assert original_client_tab_id != new_client_session_id + await _assert_project_state_updated( + new_socketio_handlers[SOCKET_IO_PROJECT_UPDATED_EVENT], + shared_project, + [], + ) + # re-open the project with the new socket / tab id + await _open_project(client, new_client_session_id, shared_project, expected.ok) + + await _assert_project_state_updated( + new_socketio_handlers[SOCKET_IO_PROJECT_UPDATED_EVENT], + shared_project, + [opened_project_state] + * 2, # NOTE: 2 calls since base user is part of the primary group and the all group + ) + # check old socket is silent + await _assert_project_state_updated( + original_socket_handlers[SOCKET_IO_PROJECT_UPDATED_EVENT], + shared_project, + [], + ) + + +@pytest.mark.parametrize(*standard_user_role_response()) +async def test_open_shared_project_2_users_locked_remove_once_rtc_collaboration_is_defaulted( + with_disabled_rtc_collaboration: None, client: TestClient, client_on_running_server_factory: Callable[[], TestClient], logged_user: dict, shared_project: dict, - socketio_client_factory: Callable, - client_session_id_factory: Callable, user_role: UserRole, expected: ExpectedResponse, - mocker, mocked_dynamic_services_interface: dict[str, mock.Mock], mock_orphaned_services, mock_catalog_api: dict[str, mock.Mock], clean_redis_table: None, mock_dynamic_scheduler_rabbitmq: None, mocked_notifications_plugin: dict[str, mock.Mock], + exit_stack: contextlib.AsyncExitStack, + create_socketio_connection_with_handlers: Callable[ + [str | None, TestClient], + Awaitable[tuple[socketio.AsyncClient, str, _SocketHandlers]], + ], ): # Use-case: user 1 opens a shared project, user 2 tries to open it as well - mock_project_state_updated_handler = mocker.Mock() client_1 = client - client_id1 = client_session_id_factory() client_2 = client_on_running_server_factory() - client_id2 = client_session_id_factory() # 1. user 1 opens project - await _connect_websocket( - socketio_client_factory, - user_role != UserRole.ANONYMOUS, - client_1, - client_id1, - {SOCKET_IO_PROJECT_UPDATED_EVENT: mock_project_state_updated_handler}, + sio1, client_id1, sio1_handlers = await create_socketio_connection_with_handlers( + None, client_1 ) # expected is that the project is closed and unlocked - expected_project_state_client_1 = ProjectState( - locked=ProjectLocked(value=False, status=ProjectStatus.CLOSED), + expected_project_state_client_1 = ProjectStateOutputSchema( + share_state=ProjectShareStateOutputSchema( + locked=False, status=ProjectStatus.CLOSED, current_user_groupids=[] + ), state=ProjectRunningState(value=RunningState.NOT_STARTED), ) for _client_id in [client_id1, None]: @@ -1309,17 +1852,21 @@ async def test_open_shared_project_2_users_locked( expected.ok if user_role != UserRole.GUEST else status.HTTP_200_OK, ) # now the expected result is that the project is locked and opened by client 1 - owner1 = Owner( - user_id=logged_user["id"], - first_name=logged_user.get("first_name"), - last_name=logged_user.get("last_name"), + expected_project_state_client_1 = expected_project_state_client_1.model_copy( + update={ + "share_state": ProjectShareStateOutputSchema( + locked=True, + status=ProjectStatus.OPENED, + current_user_groupids=[ + logged_user["primary_gid"] + ], # this should be the group of that user + ), + } ) - expected_project_state_client_1.locked.value = True - expected_project_state_client_1.locked.status = ProjectStatus.OPENED - expected_project_state_client_1.locked.owner = owner1 + # NOTE: there are 2 calls since we are part of the primary group and the all group await _assert_project_state_updated( - mock_project_state_updated_handler, + sio1_handlers[SOCKET_IO_PROJECT_UPDATED_EVENT], shared_project, [expected_project_state_client_1] * (0 if user_role == UserRole.ANONYMOUS else 2), @@ -1333,14 +1880,13 @@ async def test_open_shared_project_2_users_locked( # 2. create a separate client now and log in user2, try to open the same shared project user_2 = await log_client_in( - client_2, {"role": user_role.name}, enable_check=user_role != UserRole.ANONYMOUS - ) - await _connect_websocket( - socketio_client_factory, - user_role != UserRole.ANONYMOUS, client_2, - client_id2, - {SOCKET_IO_PROJECT_UPDATED_EVENT: mock_project_state_updated_handler}, + {"role": user_role.name}, + enable_check=user_role != UserRole.ANONYMOUS, + exit_stack=exit_stack, + ) + sio2, client_id2, sio2_handlers = await create_socketio_connection_with_handlers( + None, client_2 ) await _open_project( client_2, @@ -1348,8 +1894,15 @@ async def test_open_shared_project_2_users_locked( shared_project, expected.locked if user_role != UserRole.GUEST else status.HTTP_423_LOCKED, ) - expected_project_state_client_2 = deepcopy(expected_project_state_client_1) - expected_project_state_client_2.locked.status = ProjectStatus.OPENED + expected_project_state_client_2 = expected_project_state_client_1.model_copy( + update={ + "share_state": ProjectShareStateOutputSchema( + locked=expected_project_state_client_1.share_state.locked, + status=ProjectStatus.OPENED, + current_user_groupids=expected_project_state_client_1.share_state.current_user_groupids, + ), + } + ) await _state_project( client_2, @@ -1362,22 +1915,54 @@ async def test_open_shared_project_2_users_locked( await _close_project(client_1, client_id1, shared_project, expected.no_content) if not any(user_role == role for role in [UserRole.ANONYMOUS, UserRole.GUEST]): # Guests cannot close projects - expected_project_state_client_1 = ProjectState( - locked=ProjectLocked(value=False, status=ProjectStatus.CLOSED), + expected_project_state_client_1 = ProjectStateOutputSchema( + share_state=ProjectShareStateOutputSchema( + locked=False, status=ProjectStatus.CLOSED, current_user_groupids=[] + ), state=ProjectRunningState(value=RunningState.NOT_STARTED), ) # we should receive an event that the project lock state changed - # NOTE: there are 2x3 calls since we are part of the primary group and the all group and user 2 is part of the all group - # first CLOSING, then CLOSED + # NOTE: user 1 is part of the primary group owning the project, and the all group + # there will be an event when the project is CLOSING, then another once the services are removed and the project is CLOSED + # user 2 is only part of the all group, therefore only receives 1 event + + await _assert_project_state_updated( + sio1_handlers[SOCKET_IO_PROJECT_UPDATED_EVENT], + shared_project, + [ + expected_project_state_client_1.model_copy( + update={ + "share_state": ProjectShareStateOutputSchema( + locked=True, + status=ProjectStatus.CLOSING, + current_user_groupids=[logged_user["primary_gid"]], + ) + } + ) + ] + * ( + 0 + if any(user_role == role for role in [UserRole.ANONYMOUS, UserRole.GUEST]) + else 2 + ) + + [expected_project_state_client_1] + * ( + 0 + if any(user_role == role for role in [UserRole.ANONYMOUS, UserRole.GUEST]) + else 2 + ), + ) await _assert_project_state_updated( - mock_project_state_updated_handler, + sio2_handlers[SOCKET_IO_PROJECT_UPDATED_EVENT], shared_project, [ expected_project_state_client_1.model_copy( update={ - "locked": ProjectLocked( - value=True, status=ProjectStatus.CLOSING, owner=owner1 + "share_state": ProjectShareStateOutputSchema( + locked=True, + status=ProjectStatus.CLOSING, + current_user_groupids=[logged_user["primary_gid"]], ) } ) @@ -1385,13 +1970,13 @@ async def test_open_shared_project_2_users_locked( * ( 0 if any(user_role == role for role in [UserRole.ANONYMOUS, UserRole.GUEST]) - else 3 + else 1 ) + [expected_project_state_client_1] * ( 0 if any(user_role == role for role in [UserRole.ANONYMOUS, UserRole.GUEST]) - else 3 + else 1 ), ) await _state_project( @@ -1409,26 +1994,44 @@ async def test_open_shared_project_2_users_locked( expected.ok if user_role != UserRole.GUEST else status.HTTP_423_LOCKED, ) if not any(user_role == role for role in [UserRole.ANONYMOUS, UserRole.GUEST]): - expected_project_state_client_2.locked.value = True - expected_project_state_client_2.locked.status = ProjectStatus.OPENED - owner2 = Owner( - user_id=PositiveInt(user_2["id"]), - first_name=user_2.get("first_name", None), - last_name=user_2.get("last_name", None), + expected_project_state_client_2 = expected_project_state_client_1.model_copy( + update={ + "share_state": ProjectShareStateOutputSchema( + locked=True, + status=ProjectStatus.OPENED, + current_user_groupids=[int(user_2["primary_gid"])], + ), + } ) - expected_project_state_client_2.locked.owner = owner2 - expected_project_state_client_1.locked.value = True - expected_project_state_client_1.locked.status = ProjectStatus.OPENED - expected_project_state_client_1.locked.owner = owner2 + expected_project_state_client_1 = expected_project_state_client_1.model_copy( + update={ + "share_state": ProjectShareStateOutputSchema( + locked=True, + status=ProjectStatus.OPENED, + current_user_groupids=[int(user_2["primary_gid"])], + ), + } + ) + # NOTE: there are 3 calls since we are part of the primary group and the all group await _assert_project_state_updated( - mock_project_state_updated_handler, + sio1_handlers[SOCKET_IO_PROJECT_UPDATED_EVENT], shared_project, [expected_project_state_client_1] * ( 0 if any(user_role == role for role in [UserRole.ANONYMOUS, UserRole.GUEST]) - else 3 + else 2 + ), + ) + await _assert_project_state_updated( + sio2_handlers[SOCKET_IO_PROJECT_UPDATED_EVENT], + shared_project, + [expected_project_state_client_1] + * ( + 0 + if any(user_role == role for role in [UserRole.ANONYMOUS, UserRole.GUEST]) + else 1 ), ) await _state_project( @@ -1439,14 +2042,13 @@ async def test_open_shared_project_2_users_locked( ) -@pytest.mark.parametrize(*standard_role_response()) +@pytest.mark.parametrize(*standard_user_role_response()) async def test_open_shared_project_at_same_time( + with_disabled_rtc_collaboration: None, client: TestClient, client_on_running_server_factory: Callable[[], TestClient], logged_user: dict, shared_project: ProjectDict, - socketio_client_factory: Callable, - client_session_id_factory: Callable, user_role: UserRole, expected: ExpectedResponse, mocked_dynamic_services_interface: dict[str, mock.Mock], @@ -1455,16 +2057,17 @@ async def test_open_shared_project_at_same_time( clean_redis_table, mock_dynamic_scheduler_rabbitmq: None, mocked_notifications_plugin: dict[str, mock.Mock], + exit_stack: contextlib.AsyncExitStack, + create_socketio_connection_with_handlers: Callable[ + [str | None, TestClient], + Awaitable[tuple[socketio.AsyncClient, str, _SocketHandlers]], + ], ): NUMBER_OF_ADDITIONAL_CLIENTS = 10 # log client 1 client_1 = client - client_id1 = client_session_id_factory() - sio_1 = await _connect_websocket( - socketio_client_factory, - user_role != UserRole.ANONYMOUS, - client_1, - client_id1, + sio_1, client_id1, _ = await create_socketio_connection_with_handlers( + None, client_1 ) clients = [ {"client": client_1, "user": logged_user, "client_id": client_id1, "sio": sio_1} @@ -1476,13 +2079,10 @@ async def test_open_shared_project_at_same_time( new_client, {"role": user_role.name}, enable_check=user_role != UserRole.ANONYMOUS, + exit_stack=exit_stack, ) - client_id = client_session_id_factory() - sio = await _connect_websocket( - socketio_client_factory, - user_role != UserRole.ANONYMOUS, - new_client, - client_id, + sio, client_id, _ = await create_socketio_connection_with_handlers( + None, new_client ) clients.append( {"client": new_client, "user": user, "client_id": client_id, "sio": sio} @@ -1509,7 +2109,7 @@ async def test_open_shared_project_at_same_time( *open_project_tasks, return_exceptions=True, ) - + assert isinstance(results, list) # one should be opened, the other locked if user_role != UserRole.ANONYMOUS: num_assertions = 0 @@ -1518,25 +2118,28 @@ async def test_open_shared_project_at_same_time( if error: num_assertions += 1 elif data: - project_status = ProjectState(**data.pop("state")) + project_status = ProjectStateOutputSchema(**data.pop("state")) data.pop("folderId") - assert data == {k: shared_project[k] for k in data} - assert project_status.locked.value - assert project_status.locked.owner - assert project_status.locked.owner.first_name in [ - c["user"]["first_name"] for c in clients + assert not DeepDiff( + data, + {k: shared_project[k] for k in data}, + exclude_paths=["root['lastChangeDate']"], + ) + assert project_status.share_state.locked + assert project_status.share_state.current_user_groupids + assert len(project_status.share_state.current_user_groupids) == 1 + assert project_status.share_state.current_user_groupids[0] in [ + c["user"]["primary_gid"] for c in clients ] assert num_assertions == NUMBER_OF_ADDITIONAL_CLIENTS -@pytest.mark.parametrize(*standard_role_response()) +@pytest.mark.parametrize(*standard_user_role_response()) async def test_opened_project_can_still_be_opened_after_refreshing_tab( client: TestClient, logged_user: dict[str, Any], user_project: dict[str, Any], - client_session_id_factory: Callable, - socketio_client_factory: Callable, user_role: UserRole, expected: ExpectedResponse, mocked_dynamic_services_interface: dict[str, mock.MagicMock], @@ -1544,6 +2147,9 @@ async def test_opened_project_can_still_be_opened_after_refreshing_tab( mock_catalog_api: dict[str, mock.Mock], clean_redis_table, mocked_notifications_plugin: dict[str, mock.Mock], + create_socketio_connection: Callable[ + [str | None, TestClient | None], Awaitable[tuple[socketio.AsyncClient, str]] + ], ): """Simulating a refresh goes as follows: The user opens a project, then hit the F5 refresh page. @@ -1551,13 +2157,7 @@ async def test_opened_project_can_still_be_opened_after_refreshing_tab( client_session_id remains the same """ - client_session_id = client_session_id_factory() - sio = await _connect_websocket( - socketio_client_factory, - user_role != UserRole.ANONYMOUS, - client, - client_session_id, - ) + sio, client_session_id = await create_socketio_connection(None, client) assert client.app url = client.app.router["open_project"].url_for(project_id=user_project["uuid"]) resp = await client.post(f"{url}", json=client_session_id) @@ -1573,13 +2173,11 @@ async def test_opened_project_can_still_be_opened_after_refreshing_tab( # give some time await asyncio.sleep(1) # re-connect using the same client session id - sio2 = await _connect_websocket( - socketio_client_factory, - user_role != UserRole.ANONYMOUS, - client, - client_session_id, + sio2, received_client_session_id = await create_socketio_connection( + client_session_id, client ) assert sio2 + assert received_client_session_id == client_session_id # re-open the project resp = await client.post(f"{url}", json=client_session_id) await assert_status( diff --git a/services/web/server/tests/unit/with_dbs/03/conftest.py b/services/web/server/tests/unit/with_dbs/03/conftest.py index 3d75d45f3dc3..c4e299493a4b 100644 --- a/services/web/server/tests/unit/with_dbs/03/conftest.py +++ b/services/web/server/tests/unit/with_dbs/03/conftest.py @@ -5,16 +5,77 @@ from collections.abc import AsyncIterator +from typing import Any -import aiopg.sa import pytest +import sqlalchemy as sa +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.typing_env import EnvVarsDict from simcore_postgres_database.models.user_preferences import user_preferences_frontend +from sqlalchemy.ext.asyncio import AsyncEngine @pytest.fixture async def drop_all_preferences( - aiopg_engine: aiopg.sa.engine.Engine, + asyncpg_engine: AsyncEngine, ) -> AsyncIterator[None]: yield - async with aiopg_engine.acquire() as conn: + async with asyncpg_engine.connect() as conn: await conn.execute(user_preferences_frontend.delete()) + + +@pytest.fixture +def app_environment( + monkeypatch: pytest.MonkeyPatch, + app_environment: EnvVarsDict, +) -> EnvVarsDict: + return app_environment | setenvs_from_dict( + monkeypatch, + { + # disable tracing for tests + "TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT": "null", + "TRACING_OPENTELEMETRY_COLLECTOR_PORT": "null", + "WEBSERVER_TRACING": "null", + }, + ) + + +@pytest.fixture +async def support_group_before_app_starts( + asyncpg_engine: AsyncEngine, + product_name: str, +) -> AsyncIterator[dict[str, Any]]: + """Creates a standard support group and assigns it to the current product + + NOTE: this has to be added BEFORE any client fixture in the tests + """ + from pytest_simcore.helpers.postgres_tools import insert_and_get_row_lifespan + from simcore_postgres_database.models.groups import groups + from simcore_postgres_database.models.products import products + + # Create support group using direct database insertion + group_values = { + "name": "Support Group", + "description": "Support group for product", + "type": "STANDARD", + } + + # pylint: disable=contextmanager-generator-missing-cleanup + async with insert_and_get_row_lifespan( + asyncpg_engine, + table=groups, + values=group_values, + pk_col=groups.c.gid, + ) as group_row: + group_id = group_row["gid"] + + # Update product to set support_standard_group_id + async with asyncpg_engine.begin() as conn: + await conn.execute( + sa.update(products) + .where(products.c.name == product_name) + .values(support_standard_group_id=group_id) + ) + + yield group_row + # group will be deleted after test diff --git a/services/web/server/tests/unit/with_dbs/03/invitations/test_invitations.py b/services/web/server/tests/unit/with_dbs/03/invitations/test_invitations.py index 030a88e55cc5..0e1de44dddda 100644 --- a/services/web/server/tests/unit/with_dbs/03/invitations/test_invitations.py +++ b/services/web/server/tests/unit/with_dbs/03/invitations/test_invitations.py @@ -95,7 +95,7 @@ async def test_invalid_invitation_if_guest_is_already_registered_in_product( # user exists, and we skip product registration to do this test mocker.patch( - "pytest_simcore.helpers.webserver_login.auto_add_user_to_product_group", + "pytest_simcore.helpers.webserver_users.groups_service.auto_add_user_to_product_group", return_value=f"Mocked in {__file__}. SKIPPED auto_add_user_to_product_group", autospec=True, ) diff --git a/services/web/server/tests/unit/with_dbs/03/invitations/test_login_handlers_registration_invitations.py b/services/web/server/tests/unit/with_dbs/03/invitations/test_login_handlers_registration_invitations.py index 8bb512d5d4c0..f16c6bb9fb57 100644 --- a/services/web/server/tests/unit/with_dbs/03/invitations/test_login_handlers_registration_invitations.py +++ b/services/web/server/tests/unit/with_dbs/03/invitations/test_login_handlers_registration_invitations.py @@ -78,7 +78,7 @@ async def test_check_registration_invitations_with_old_code( assert invitation.email is None -@pytest.mark.acceptance_test() +@pytest.mark.acceptance_test async def test_check_registration_invitation_and_get_email( client: TestClient, mocker: MockerFixture, @@ -153,18 +153,22 @@ async def _register_account(invitation_url: HttpUrl, product_deployed: ProductNa invitation_product_a = await generate_invitation( client.app, ApiInvitationInputs(issuer="PO", guest=guest_email, product=product_a), + product_origin_url=URL("http://product_a.com/some/path").origin(), ) # 2. PO creates invitation for product B invitation_product_b = await generate_invitation( client.app, ApiInvitationInputs(issuer="PO", guest=guest_email, product=product_b), + product_origin_url=URL("http://product_b.com/some/path").origin(), ) # CAN register for product A in deploy of product A + assert invitation_product_a.invitation_url.host == "product_a.com" response = await _register_account(invitation_product_a.invitation_url, product_a) await assert_status(response, status.HTTP_200_OK) # CANNOT register in product B in deploy of product A + assert invitation_product_b.invitation_url.host == "product_b.com" response = await _register_account(invitation_product_b.invitation_url, product_a) await assert_status(response, status.HTTP_409_CONFLICT) diff --git a/services/web/server/tests/unit/with_dbs/03/invitations/test_products_rest_invitations.py b/services/web/server/tests/unit/with_dbs/03/invitations/test_products_rest_invitations.py index 6b267396786e..3e2f08e68b50 100644 --- a/services/web/server/tests/unit/with_dbs/03/invitations/test_products_rest_invitations.py +++ b/services/web/server/tests/unit/with_dbs/03/invitations/test_products_rest_invitations.py @@ -20,9 +20,10 @@ from pytest_simcore.aioresponses_mocker import AioResponsesMock from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.faker_factories import DEFAULT_TEST_PASSWORD -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.users import UserRole +from simcore_service_webserver.models import PhoneNumberStr @pytest.mark.parametrize( @@ -33,7 +34,7 @@ (UserRole.USER, status.HTTP_403_FORBIDDEN), (UserRole.TESTER, status.HTTP_403_FORBIDDEN), (UserRole.PRODUCT_OWNER, status.HTTP_200_OK), - (UserRole.ADMIN, status.HTTP_403_FORBIDDEN), + (UserRole.ADMIN, status.HTTP_200_OK), ], ) async def test_role_access_to_generate_invitation( @@ -131,13 +132,14 @@ async def test_pre_registration_and_invitation_workflow( expected_status: HTTPStatus, guest_email: str, faker: Faker, + user_phone_number: PhoneNumberStr, ): requester_info = { "firstName": faker.first_name(), "lastName": faker.last_name(), "email": guest_email, "companyName": faker.company(), - "phone": faker.phone_number(), + "phone": user_phone_number, # billing info "address": faker.address().replace("\n", ", "), "city": faker.city(), diff --git a/services/web/server/tests/unit/with_dbs/03/test_users_rest_registration.py b/services/web/server/tests/unit/with_dbs/03/invitations/test_users_accounts_rest_registration.py similarity index 53% rename from services/web/server/tests/unit/with_dbs/03/test_users_rest_registration.py rename to services/web/server/tests/unit/with_dbs/03/invitations/test_users_accounts_rest_registration.py index ed41138d5b80..e44ef69a2daa 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_users_rest_registration.py +++ b/services/web/server/tests/unit/with_dbs/03/invitations/test_users_accounts_rest_registration.py @@ -6,9 +6,11 @@ # pylint: disable=unused-variable -from collections.abc import AsyncGenerator +import asyncio +from collections.abc import AsyncGenerator, AsyncIterator from http import HTTPStatus from typing import Any +from unittest.mock import AsyncMock import pytest import simcore_service_webserver.login._auth_service @@ -21,8 +23,11 @@ from models_library.api_schemas_webserver.users import ( UserAccountGet, ) +from models_library.groups import AccessRightsDict from models_library.products import ProductName from models_library.rest_pagination import Page +from pytest_mock import MockerFixture +from pytest_simcore.aioresponses_mocker import AioResponsesMock from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.faker_factories import ( DEFAULT_TEST_PASSWORD, @@ -32,12 +37,14 @@ from pytest_simcore.helpers.webserver_login import ( UserInfoDict, ) +from pytest_simcore.helpers.webserver_users import NewUser from servicelib.aiohttp import status from servicelib.rest_constants import X_PRODUCT_NAME_HEADER from simcore_postgres_database.models.users_details import ( users_pre_registration_details, ) from simcore_service_webserver.db.plugin import get_asyncpg_engine +from simcore_service_webserver.models import PhoneNumberStr @pytest.fixture @@ -54,6 +61,65 @@ def app_environment( ) +@pytest.fixture +def mock_email_session(mocker: MockerFixture) -> AsyncMock: + """Mock the email session and capture sent messages""" + # Create a mock email session + mock_session = AsyncMock() + + # List to store sent messages + sent_messages = [] + + async def mock_send_message(msg): + """Mock send_message method to capture messages""" + sent_messages.append(msg) + + mock_session.send_message = mock_send_message + mock_session.sent_messages = sent_messages + + # Mock the context manager behavior + mock_session.__aenter__ = AsyncMock(return_value=mock_session) + mock_session.__aexit__ = AsyncMock(return_value=None) + + # Use mocker to patch the create_email_session function + mocker.patch( + "simcore_service_webserver.users._accounts_service.create_email_session", + return_value=mock_session, + ) + + return mock_session + + +@pytest.fixture +async def support_user( + support_group_before_app_starts: dict, + client: TestClient, +) -> AsyncIterator[UserInfoDict]: + """Creates an active user that belongs to the product's support group.""" + async with NewUser( + user_data={ + "name": "support-user", + "status": UserStatus.ACTIVE.name, + "role": UserRole.USER.name, + }, + app=client.app, + ) as user_info: + # Add the user to the support group + assert client.app + + from simcore_service_webserver.groups import _groups_repository + + # Now add user to support group with read-only access + await _groups_repository.add_new_user_in_group( + client.app, + group_id=support_group_before_app_starts["gid"], + new_user_id=user_info["id"], + access_rights=AccessRightsDict(read=True, write=False, delete=False), + ) + + yield user_info + + @pytest.mark.parametrize( "user_role,expected", [ @@ -61,9 +127,10 @@ def app_environment( *( (role, status.HTTP_403_FORBIDDEN) for role in UserRole - if role not in {UserRole.PRODUCT_OWNER, UserRole.ANONYMOUS} + if UserRole.ANONYMOUS < role < UserRole.PRODUCT_OWNER ), (UserRole.PRODUCT_OWNER, status.HTTP_200_OK), + (UserRole.ADMIN, status.HTTP_200_OK), ], ) async def test_access_rights_on_search_users_only_product_owners_can_access( @@ -81,14 +148,37 @@ async def test_access_rights_on_search_users_only_product_owners_can_access( await assert_status(resp, expected) +async def test_access_rights_on_search_users_support_user_can_access_when_above_guest( + support_user: UserInfoDict, + # keep support_user first since it has to be created before the app starts + client: TestClient, + pre_registration_details_db_cleanup: None, +): + """Test that support users with role > GUEST can access the search endpoint.""" + assert client.app + + from pytest_simcore.helpers.webserver_login import switch_client_session_to + + # Switch client session to the support user + async with switch_client_session_to(client, support_user): + url = client.app.router["search_user_accounts"].url_for() + assert url.path == "/v0/admin/user-accounts:search" + + resp = await client.get(url.path, params={"email": "do-not-exists@foo.com"}) + await assert_status(resp, status.HTTP_200_OK) + + @pytest.fixture -def account_request_form(faker: Faker) -> dict[str, Any]: +def account_request_form( + faker: Faker, + user_phone_number: PhoneNumberStr, +) -> dict[str, Any]: # This is AccountRequestInfo.form form = { "firstName": faker.first_name(), "lastName": faker.last_name(), "email": faker.email(), - "phone": faker.phone_number(), + "phone": user_phone_number, "company": faker.company(), # billing info "address": faker.address().replace("\n", ", "), @@ -141,6 +231,14 @@ async def test_search_and_pre_registration( ): assert client.app + # NOTE: listing of user accounts drops nullable fields to avoid lengthy responses (even if they have no defaults) + # therefore they are reconstructed here from http response payloads + nullable_fields = { + name: None + for name, field in UserAccountGet.model_fields.items() + if is_nullable(field) + } + # ONLY in `users` and NOT `users_pre_registration_details` resp = await client.get( "/v0/admin/user-accounts:search", params={"email": logged_user["email"]} @@ -150,12 +248,6 @@ async def test_search_and_pre_registration( found, _ = await assert_status(resp, status.HTTP_200_OK) assert len(found) == 1 - nullable_fields = { - name: None - for name, field in UserAccountGet.model_fields.items() - if is_nullable(field) - } - got = UserAccountGet.model_validate({**nullable_fields, **found[0]}) expected = { "first_name": logged_user.get("first_name"), @@ -171,6 +263,9 @@ async def test_search_and_pre_registration( "extras": {}, "registered": True, "status": UserStatus.ACTIVE, + "user_id": logged_user["id"], + "user_name": logged_user["name"], + "user_primary_group_id": logged_user.get("primary_gid"), } assert got.model_dump(include=set(expected)) == expected @@ -188,8 +283,8 @@ async def test_search_and_pre_registration( ) found, _ = await assert_status(resp, status.HTTP_200_OK) assert len(found) == 1 - got = UserAccountGet(**found[0], state=None, status=None) + got = UserAccountGet.model_validate({**nullable_fields, **found[0]}) assert got.model_dump(include={"registered", "status"}) == { "registered": False, "status": None, @@ -212,10 +307,11 @@ async def test_search_and_pre_registration( ) found, _ = await assert_status(resp, status.HTTP_200_OK) assert len(found) == 1 - got = UserAccountGet(**found[0], state=None) + + got = UserAccountGet.model_validate({**nullable_fields, **found[0]}) assert got.model_dump(include={"registered", "status"}) == { "registered": True, - "status": new_user["status"].name, + "status": new_user["status"], } @@ -290,6 +386,7 @@ async def test_list_users_accounts( status_upon_creation=UserStatus.ACTIVE, expires_at=None, ) + assert new_user["status"] == UserStatus.ACTIVE # 3. Test filtering by status # a. Check PENDING filter (should exclude the registered user) @@ -387,6 +484,7 @@ async def test_reject_user_account( faker: Faker, product_name: ProductName, pre_registration_details_db_cleanup: None, + mock_email_session: AsyncMock, ): assert client.app @@ -423,7 +521,19 @@ async def test_reject_user_account( ) await assert_status(resp, status.HTTP_204_NO_CONTENT) - # 4. Verify the user is no longer in PENDING status + # 4. Verify rejection email was sent + # Wait a bit for fire-and-forget task to complete + + await asyncio.sleep(0.1) + + assert len(mock_email_session.sent_messages) == 1 + rejection_msg = mock_email_session.sent_messages[0] + + # Verify email recipients and content + assert pre_registered_email in rejection_msg["To"] + assert "denied" in rejection_msg["Subject"].lower() + + # 5. Verify the user is no longer in PENDING status url = client.app.router["list_users_accounts"].url_for() resp = await client.get( f"{url}?review_status=PENDING", headers={X_PRODUCT_NAME_HEADER: product_name} @@ -432,7 +542,7 @@ async def test_reject_user_account( pending_emails = [user["email"] for user in pending_data] assert pre_registered_email not in pending_emails - # 5. Verify the user is now in REJECTED status + # 6. Verify the user is now in REJECTED status # First get user details to check status resp = await client.get( "/v0/admin/user-accounts:search", @@ -445,10 +555,10 @@ async def test_reject_user_account( # Check that account_request_status is REJECTED user_data = found[0] assert user_data["accountRequestStatus"] == "REJECTED" - assert user_data["accountRequestReviewedBy"] == logged_user["id"] + assert user_data["accountRequestReviewedBy"] == logged_user["name"] assert user_data["accountRequestReviewedAt"] is not None - # 6. Verify that a rejected user cannot be approved + # 7. Verify that a rejected user cannot be approved url = client.app.router["approve_user_account"].url_for() resp = await client.post( f"{url}", @@ -457,3 +567,277 @@ async def test_reject_user_account( ) # Should fail as the account is already reviewed assert resp.status == status.HTTP_400_BAD_REQUEST + + +@pytest.mark.parametrize( + "user_role", + [ + UserRole.PRODUCT_OWNER, + ], +) +async def test_approve_user_account_with_full_invitation_details( + client: TestClient, + logged_user: UserInfoDict, + account_request_form: dict[str, Any], + faker: Faker, + product_name: ProductName, + pre_registration_details_db_cleanup: None, + mock_invitations_service_http_api: AioResponsesMock, + mock_email_session: AsyncMock, +): + """Test approving user account with complete invitation details (trial days + credits)""" + assert client.app + + test_email = faker.email() + + # 1. Create a pre-registered user + form_data = account_request_form.copy() + form_data["firstName"] = faker.first_name() + form_data["lastName"] = faker.last_name() + form_data["email"] = test_email + + resp = await client.post( + "/v0/admin/user-accounts:pre-register", + json=form_data, + headers={X_PRODUCT_NAME_HEADER: product_name}, + ) + await assert_status(resp, status.HTTP_200_OK) + + # 2. Approve the user with full invitation details + approval_payload = { + "email": test_email, + "invitation": { + "trialAccountDays": 30, + "extraCreditsInUsd": 100.0, + }, + } + + url = client.app.router["approve_user_account"].url_for() + resp = await client.post( + f"{url}", + headers={X_PRODUCT_NAME_HEADER: product_name}, + json=approval_payload, + ) + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + # 3. Verify approval email was sent + # Wait a bit for fire-and-forget task to complete + + await asyncio.sleep(0.1) + + assert len(mock_email_session.sent_messages) == 1 + approval_msg = mock_email_session.sent_messages[0] + + # Verify email recipients and content + assert test_email in approval_msg["To"] + assert "accepted" in approval_msg["Subject"].lower() + + # 4. Verify the user account status and invitation data in extras + resp = await client.get( + "/v0/admin/user-accounts:search", + params={"email": test_email}, + headers={X_PRODUCT_NAME_HEADER: product_name}, + ) + found, _ = await assert_status(resp, status.HTTP_200_OK) + assert len(found) == 1 + + user_data = found[0] + assert user_data["accountRequestStatus"] == "APPROVED" + assert user_data["accountRequestReviewedBy"] == logged_user["name"] + assert user_data["accountRequestReviewedAt"] is not None + + # 5. Verify invitation data is stored in extras + assert "invitation" in user_data["extras"] + invitation_data = user_data["extras"]["invitation"] + assert invitation_data["guest"] == test_email + assert invitation_data["issuer"] == str(logged_user["id"]) + assert invitation_data["trial_account_days"] == 30 + assert invitation_data["extra_credits_in_usd"] == 100.0 + assert invitation_data["product"] == product_name + assert "invitation_url" in invitation_data + + +@pytest.mark.parametrize( + "user_role", + [UserRole.PRODUCT_OWNER], +) +async def test_approve_user_account_with_trial_days_only( + client: TestClient, + logged_user: UserInfoDict, + account_request_form: dict[str, Any], + faker: Faker, + product_name: ProductName, + pre_registration_details_db_cleanup: None, + mock_invitations_service_http_api: AioResponsesMock, +): + """Test approving user account with only trial days""" + assert client.app + + test_email = faker.email() + + # 1. Create a pre-registered user + form_data = account_request_form.copy() + form_data["firstName"] = faker.first_name() + form_data["lastName"] = faker.last_name() + form_data["email"] = test_email + + resp = await client.post( + "/v0/admin/user-accounts:pre-register", + json=form_data, + headers={X_PRODUCT_NAME_HEADER: product_name}, + ) + await assert_status(resp, status.HTTP_200_OK) + + # 2. Approve the user with only trial days + approval_payload = { + "email": test_email, + "invitation": { + "trialAccountDays": 15, + # No extra_credits_in_usd + }, + } + + url = client.app.router["approve_user_account"].url_for() + resp = await client.post( + f"{url}", + headers={X_PRODUCT_NAME_HEADER: product_name}, + json=approval_payload, + ) + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + # 3. Verify invitation data in extras + resp = await client.get( + "/v0/admin/user-accounts:search", + params={"email": test_email}, + headers={X_PRODUCT_NAME_HEADER: product_name}, + ) + found, _ = await assert_status(resp, status.HTTP_200_OK) + user_data = found[0] + + assert "invitation" in user_data["extras"] + invitation_data = user_data["extras"]["invitation"] + assert invitation_data["trial_account_days"] == 15 + assert invitation_data["extra_credits_in_usd"] is None + + +@pytest.mark.parametrize( + "user_role", + [UserRole.PRODUCT_OWNER], +) +async def test_approve_user_account_with_credits_only( + client: TestClient, + logged_user: UserInfoDict, + account_request_form: dict[str, Any], + faker: Faker, + product_name: ProductName, + pre_registration_details_db_cleanup: None, + mock_invitations_service_http_api: AioResponsesMock, +): + """Test approving user account with only extra credits""" + assert client.app + + test_email = faker.email() + + # 1. Create a pre-registered user + form_data = account_request_form.copy() + form_data["firstName"] = faker.first_name() + form_data["lastName"] = faker.last_name() + form_data["email"] = test_email + + resp = await client.post( + "/v0/admin/user-accounts:pre-register", + json=form_data, + headers={X_PRODUCT_NAME_HEADER: product_name}, + ) + await assert_status(resp, status.HTTP_200_OK) + + # 2. Approve the user with only extra credits + approval_payload = { + "email": test_email, + "invitation": { + # No trial_account_days + "extraCreditsInUsd": 50.0, + }, + } + + url = client.app.router["approve_user_account"].url_for() + resp = await client.post( + f"{url}", + headers={X_PRODUCT_NAME_HEADER: product_name}, + json=approval_payload, + ) + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + # 3. Verify invitation data in extras + resp = await client.get( + "/v0/admin/user-accounts:search", + params={"email": test_email}, + headers={X_PRODUCT_NAME_HEADER: product_name}, + ) + found, _ = await assert_status(resp, status.HTTP_200_OK) + user_data = found[0] + + assert "invitation" in user_data["extras"] + invitation_data = user_data["extras"]["invitation"] + assert invitation_data["trial_account_days"] is None + assert invitation_data["extra_credits_in_usd"] == 50.0 + + +@pytest.mark.parametrize( + "user_role", + [ + UserRole.PRODUCT_OWNER, + ], +) +async def test_approve_user_account_without_invitation( + client: TestClient, + logged_user: UserInfoDict, + account_request_form: dict[str, Any], + faker: Faker, + product_name: ProductName, + pre_registration_details_db_cleanup: None, +): + """Test approving user account without any invitation details""" + assert client.app + + test_email = faker.email() + + # 1. Create a pre-registered user + form_data = account_request_form.copy() + form_data["firstName"] = faker.first_name() + form_data["lastName"] = faker.last_name() + form_data["email"] = test_email + + resp = await client.post( + "/v0/admin/user-accounts:pre-register", + json=form_data, + headers={X_PRODUCT_NAME_HEADER: product_name}, + ) + await assert_status(resp, status.HTTP_200_OK) + + # 2. Approve the user without invitation + approval_payload = { + "email": test_email, + # No invitation field + } + + url = client.app.router["approve_user_account"].url_for() + resp = await client.post( + f"{url}", + headers={X_PRODUCT_NAME_HEADER: product_name}, + json=approval_payload, + ) + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + # 3. Verify no invitation data in extras + resp = await client.get( + "/v0/admin/user-accounts:search", + params={"email": test_email}, + headers={X_PRODUCT_NAME_HEADER: product_name}, + ) + found, _ = await assert_status(resp, status.HTTP_200_OK) + user_data = found[0] + + assert user_data["accountRequestStatus"] == "APPROVED" + # Verify no invitation data stored + assert "invitation" not in user_data["extras"] diff --git a/services/web/server/tests/unit/with_dbs/03/login/conftest.py b/services/web/server/tests/unit/with_dbs/03/login/conftest.py index 7268bee5d2ab..a6b1dd9d5969 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/conftest.py +++ b/services/web/server/tests/unit/with_dbs/03/login/conftest.py @@ -14,7 +14,7 @@ from models_library.basic_types import IDStr from pytest_mock import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict -from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict +from pytest_simcore.helpers.webserver_users import NewUser, UserInfoDict from simcore_postgres_database.models.users import users from simcore_postgres_database.models.wallets import wallets from simcore_service_webserver.login._login_repository_legacy import ( @@ -76,11 +76,6 @@ def app_environment( return {**app_environment, **envs_plugins, **envs_login, **envs_twilio} -@pytest.fixture -def user_phone_number(faker: Faker) -> str: - return faker.phone_number() - - @pytest.fixture def fake_weak_password(faker: Faker) -> str: return faker.password( diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_preregistration.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_accounts_preregistration.py similarity index 80% rename from services/web/server/tests/unit/with_dbs/03/login/test_login_preregistration.py rename to services/web/server/tests/unit/with_dbs/03/login/test_login_accounts_preregistration.py index 565df3495bcf..bf4e2d894ce7 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_preregistration.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_accounts_preregistration.py @@ -10,14 +10,17 @@ import pytest from aiohttp import ClientResponseError from aiohttp.test_utils import TestClient +from common_library.users_enums import AccountRequestStatus from faker import Faker from models_library.api_schemas_webserver.auth import AccountRequestInfo +from models_library.api_schemas_webserver.users import UserAccountGet from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict +from pytest_simcore.helpers.webserver_login import switch_client_session_to +from pytest_simcore.helpers.webserver_users import NewUser, UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.users import UserRole -from simcore_service_webserver.login._constants import MSG_USER_DELETED +from simcore_service_webserver.login.constants import MSG_USER_DELETED from simcore_service_webserver.products.products_service import get_product @@ -57,28 +60,12 @@ def mocked_send_email(mocker: MockerFixture) -> MagicMock: @pytest.fixture def mocked_captcha_session(mocker: MockerFixture) -> MagicMock: return mocker.patch( - "simcore_service_webserver.login._controller.rest.preregistration.get_session", + "simcore_service_webserver.login_accounts._controller_rest.session_service.get_session", spec=True, return_value={"captcha": "123456"}, ) -@pytest.mark.parametrize( - "user_role", [role for role in UserRole if role >= UserRole.USER] -) -async def test_check_auth(client: TestClient, logged_user: UserInfoDict): - assert client.app - - response = await client.get("/v0/auth:check") - await assert_status(response, status.HTTP_204_NO_CONTENT) - - response = await client.post("/v0/auth/logout") - await assert_status(response, status.HTTP_200_OK) - - response = await client.get("/v0/auth:check") - await assert_status(response, status.HTTP_401_UNAUTHORIZED) - - @pytest.mark.parametrize( "user_role", [role for role in UserRole if role >= UserRole.USER] ) @@ -184,7 +171,7 @@ async def test_request_an_account( assert client.app # A form similar to the one in https://github.com/ITISFoundation/osparc-simcore/pull/5378 user_data = { - **AccountRequestInfo.model_config["json_schema_extra"]["example"]["form"], + **AccountRequestInfo.model_json_schema()["example"]["form"], # fields required in the form "firstName": faker.first_name(), "lastName": faker.last_name(), @@ -204,8 +191,33 @@ async def test_request_an_account( product = get_product(client.app, product_name="osparc") - # sent email? + # check email was sent mimetext = mocked_send_email.call_args[1]["message"] assert "account" in mimetext["Subject"].lower() assert mimetext["From"] == product.support_email assert mimetext["To"] == product.product_owners_email or product.support_email + + # check it appears in PO center + async with NewUser( + user_data={ + "email": "po-user@email.com", + "name": "po-user-fixture", + "role": UserRole.PRODUCT_OWNER, + }, + app=client.app, + ) as product_owner_user, switch_client_session_to(client, product_owner_user): + + response = await client.get( + "v0/admin/user-accounts?limit=20&offset=0&review_status=PENDING" + ) + + data, _ = await assert_status(response, status.HTTP_200_OK) + + assert len(data) == 1 + user = UserAccountGet.model_validate(data[0]) + assert user.first_name == user_data["firstName"] + assert not user.registered + assert user.status is None + assert user.account_request_status == AccountRequestStatus.PENDING + + # TODO add a test for reregistration `AlreadyPreRegisteredError` diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_auth.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_auth.py index 97042d6ed1c3..f9246da56ec4 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_auth.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_auth.py @@ -1,22 +1,29 @@ +# pylint: disable=protected-access # pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments # pylint: disable=unused-argument # pylint: disable=unused-variable + +import asyncio import json import time +from collections.abc import AsyncIterator, Callable +from contextlib import AsyncExitStack from http import HTTPStatus import pytest -from aiohttp.test_utils import TestClient +from aiohttp.test_utils import TestClient, TestServer from cryptography import fernet from faker import Faker from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import NewUser +from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict from servicelib.aiohttp import status from settings_library.utils_session import DEFAULT_SESSION_COOKIE_NAME +from simcore_postgres_database.models.users import UserRole from simcore_service_webserver.constants import APP_SETTINGS_KEY from simcore_service_webserver.db.models import UserStatus -from simcore_service_webserver.login._constants import ( +from simcore_service_webserver.login.constants import ( MSG_ACTIVATION_REQUIRED, MSG_LOGGED_IN, MSG_UNKNOWN_EMAIL, @@ -28,6 +35,25 @@ from simcore_service_webserver.session.settings import get_plugin_settings +@pytest.mark.parametrize( + "user_role", [role for role in UserRole if role >= UserRole.USER] +) +async def test_check_auth(client: TestClient, logged_user: UserInfoDict): + assert client.app + + url = client.app.router["check_auth"].url_for() + assert url.path == "/v0/auth:check" + + response = await client.get("/v0/auth:check") + await assert_status(response, status.HTTP_204_NO_CONTENT) + + response = await client.post("/v0/auth/logout") + await assert_status(response, status.HTTP_200_OK) + + response = await client.get("/v0/auth:check") + await assert_status(response, status.HTTP_401_UNAUTHORIZED) + + def test_login_plugin_setup_succeeded(client: TestClient): assert client.app print(client.app[APP_SETTINGS_KEY].model_dump_json(indent=1)) @@ -198,3 +224,73 @@ def _build_proxy_session_cookie(identity: str): if not error: assert data["login"] == user["email"] + + +@pytest.fixture +async def multiple_users( + client: TestClient, num_users: int = 5 +) -> AsyncIterator[list[dict[str, str]]]: + """Fixture that creates multiple test users with an AsyncExitStack for cleanup.""" + async with AsyncExitStack() as exit_stack: + users = [] + for _ in range(num_users): + # Use enter_async_context to properly register each NewUser context manager + user_ctx = await exit_stack.enter_async_context(NewUser(app=client.app)) + users.append( + { + "email": user_ctx["email"], + "password": user_ctx["raw_password"], + } + ) + + yield users + # AsyncExitStack will automatically clean up all users when exiting + + +async def test_multiple_users_login_logout_concurrently( + web_server: TestServer, + client: TestClient, + multiple_users: list[dict[str, str]], + aiohttp_client: Callable, +): + """Test multiple users can login concurrently and properly get logged out.""" + assert client.app + + # URLs + login_url = client.app.router["auth_login"].url_for().path + profile_url = client.app.router["get_my_profile"].url_for().path + logout_url = client.app.router["auth_logout"].url_for().path + + async def user_session_flow(user_creds): + # Create a new client for each user to ensure isolated sessions + user_client = await aiohttp_client(web_server) + + # Login + login_resp = await user_client.post( + login_url, + json={"email": user_creds["email"], "password": user_creds["password"]}, + ) + login_data, _ = await assert_status(login_resp, status.HTTP_200_OK) + assert MSG_LOGGED_IN in login_data["message"] + + # Access profile (cookies are automatically sent by the client) + profile_resp = await user_client.get(profile_url) + profile_data, _ = await assert_status(profile_resp, status.HTTP_200_OK) + assert profile_data["login"] == user_creds["email"] + + # Logout + logout_resp = await user_client.post(logout_url) + await assert_status(logout_resp, status.HTTP_200_OK) + + # Try to access profile after logout + profile_after_logout_resp = await user_client.get(profile_url) + _, error = await assert_status( + profile_after_logout_resp, status.HTTP_401_UNAUTHORIZED + ) + + # No need to manually close the client as aiohttp_client fixture handles cleanup + + await user_session_flow(multiple_users[0]) + + # Run all user flows concurrently + await asyncio.gather(*(user_session_flow(user) for user in multiple_users)) diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_change_email.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_change_email.py index ce08c455852a..c0257fa5e20d 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_change_email.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_change_email.py @@ -8,7 +8,7 @@ from pytest_simcore.helpers.webserver_login import LoggedUser, NewUser, parse_link from servicelib.aiohttp import status from simcore_service_webserver.constants import INDEX_RESOURCE_NAME -from simcore_service_webserver.login._constants import ( +from simcore_service_webserver.login.constants import ( MSG_CHANGE_EMAIL_REQUESTED, MSG_LOGGED_IN, MSG_LOGGED_OUT, diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_change_password.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_change_password.py index a9b5308a8b48..9a3f422037be 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_change_password.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_change_password.py @@ -9,7 +9,7 @@ from pytest_simcore.helpers.webserver_login import LoggedUser from servicelib.aiohttp import status from servicelib.rest_responses import unwrap_envelope -from simcore_service_webserver.login._constants import ( +from simcore_service_webserver.login.constants import ( MSG_LOGGED_IN, MSG_PASSWORD_CHANGED, MSG_PASSWORD_MISMATCH, @@ -34,7 +34,6 @@ async def test_unauthorized_to_change_password(client: TestClient, new_password: "confirm": new_password, }, ) - assert response.status == 401 await assert_status(response, status.HTTP_401_UNAUTHORIZED) @@ -54,7 +53,7 @@ async def test_wrong_current_password( }, ) assert response.url.path == url.path - assert response.status == 422 + assert response.status == status.HTTP_422_UNPROCESSABLE_ENTITY assert MSG_WRONG_PASSWORD in await response.text() await assert_status( response, status.HTTP_422_UNPROCESSABLE_ENTITY, MSG_WRONG_PASSWORD @@ -90,6 +89,7 @@ async def test_wrong_confirm_pass(client: TestClient, new_password: str): "field": "confirm", } ], + "message": "Invalid field/s 'confirm' in request body", } @@ -110,13 +110,13 @@ async def test_success(client: TestClient, new_password: str): }, ) assert response.url.path == url_change_password.path - assert response.status == 200 + assert response.status == status.HTTP_200_OK assert MSG_PASSWORD_CHANGED in await response.text() await assert_status(response, status.HTTP_200_OK, MSG_PASSWORD_CHANGED) # logout response = await client.post(f"{url_logout}") - assert response.status == 200 + assert response.status == status.HTTP_200_OK assert response.url.path == url_logout.path # login with new password @@ -127,6 +127,6 @@ async def test_success(client: TestClient, new_password: str): "password": new_password, }, ) - assert response.status == 200 + assert response.status == status.HTTP_200_OK assert response.url.path == url_login.path await assert_status(response, status.HTTP_200_OK, MSG_LOGGED_IN) diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_confirmation_service.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_confirmation_service.py index cb4d599f8190..30f06a474fac 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_confirmation_service.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_confirmation_service.py @@ -2,7 +2,7 @@ from aiohttp.test_utils import make_mocked_request from aiohttp.web import Application -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from simcore_service_webserver.login import _confirmation_service, _confirmation_web from simcore_service_webserver.login._login_repository_legacy import AsyncpgStorage from simcore_service_webserver.login.settings import LoginOptions diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_controller_confirmation_rest.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_controller_confirmation_rest.py index 826f98498c5c..ce12c1244f6f 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_controller_confirmation_rest.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_controller_confirmation_rest.py @@ -11,7 +11,7 @@ from aiohttp.test_utils import TestClient from common_library.users_enums import UserStatus from models_library.products import ProductName -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from simcore_service_webserver.login._login_repository_legacy import ( ActionLiteralStr, diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_logout.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_logout.py index 13aa95c32e41..52ab61ed19ed 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_logout.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_logout.py @@ -15,8 +15,7 @@ async def test_logout(client: TestClient, db: AsyncpgStorage): logout_url = client.app.router["auth_logout"].url_for() protected_url = client.app.router["get_my_profile"].url_for() - async with LoggedUser(client) as user: - + async with LoggedUser(client): # try to access protected page response = await client.get(f"{protected_url}") assert response.url.path == protected_url.path @@ -31,5 +30,3 @@ async def test_logout(client: TestClient, db: AsyncpgStorage): response = await client.get(f"{protected_url}") assert response.url.path == protected_url.path await assert_status(response, status.HTTP_401_UNAUTHORIZED) - - await db.delete_user(user) diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_registration.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_registration.py index cc8e68689ab3..d54213cb29b2 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_registration.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_registration.py @@ -9,7 +9,7 @@ import pytest from aiohttp.test_utils import TestClient from faker import Faker -from models_library.api_schemas_webserver.users import MyProfileGet +from models_library.api_schemas_webserver.users import MyProfileRestGet from models_library.products import ProductName from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_error, assert_status @@ -19,14 +19,15 @@ from servicelib.rest_responses import unwrap_envelope from simcore_service_webserver.db.models import UserStatus from simcore_service_webserver.groups.api import auto_add_user_to_product_group +from simcore_service_webserver.login import _auth_service from simcore_service_webserver.login._confirmation_web import _url_for_confirmation -from simcore_service_webserver.login._constants import ( +from simcore_service_webserver.login._login_repository_legacy import AsyncpgStorage +from simcore_service_webserver.login.constants import ( MSG_EMAIL_ALREADY_REGISTERED, MSG_LOGGED_IN, MSG_PASSWORD_MISMATCH, MSG_WEAK_PASSWORD, ) -from simcore_service_webserver.login._login_repository_legacy import AsyncpgStorage from simcore_service_webserver.login.settings import ( LoginOptions, LoginSettingsForProduct, @@ -107,6 +108,7 @@ async def test_register_body_validation( "field": "confirm", }, ], + "message": "Invalid field/s 'email, confirm' in request body", } @@ -262,7 +264,6 @@ async def test_registration_with_invalid_confirmation_code( async def test_registration_without_confirmation( client: TestClient, - db: AsyncpgStorage, mocker: MockerFixture, user_email: str, user_password: str, @@ -292,13 +293,12 @@ async def test_registration_without_confirmation( data, _ = await assert_status(response, status.HTTP_200_OK) assert MSG_LOGGED_IN in data["message"] - user = await db.get_user({"email": user_email}) + user = await _auth_service.get_user_or_none(client.app, email=user_email) assert user async def test_registration_with_confirmation( client: TestClient, - db: AsyncpgStorage, capsys: pytest.CaptureFixture, mocker: MockerFixture, user_email: str, @@ -330,7 +330,8 @@ async def test_registration_with_confirmation( data, error = unwrap_envelope(await response.json()) assert response.status == 200, (data, error) - user = await db.get_user({"email": user_email}) + user = await _auth_service.get_user_or_none(client.app, email=user_email) + assert user assert user["status"] == UserStatus.CONFIRMATION_PENDING.name assert "verification link" in data["message"] @@ -349,7 +350,8 @@ async def test_registration_with_confirmation( assert response.status == 200 # user is active - user = await db.get_user({"email": user_email}) + user = await _auth_service.get_user_or_none(client.app, email=user_email) + assert user assert user["status"] == UserStatus.ACTIVE.name @@ -494,7 +496,7 @@ async def test_registraton_with_invitation_for_trial_account( url = client.app.router["get_my_profile"].url_for() response = await client.get(url.path) data, _ = await assert_status(response, status.HTTP_200_OK) - profile = MyProfileGet.model_validate(data) + profile = MyProfileRestGet.model_validate(data) expected = invitation.user["created_at"] + timedelta(days=TRIAL_DAYS) assert profile.expiration_date diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_reset_password.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_reset_password.py index 2451e0adee7c..000694d6fffb 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_reset_password.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_reset_password.py @@ -18,7 +18,11 @@ from servicelib.utils_secrets import generate_password from simcore_service_webserver.db.models import ConfirmationAction, UserStatus from simcore_service_webserver.groups import api as groups_service -from simcore_service_webserver.login._constants import ( +from simcore_service_webserver.login._login_repository_legacy import ( + AsyncpgStorage, + ConfirmationTokenDict, +) +from simcore_service_webserver.login.constants import ( MSG_ACTIVATION_REQUIRED, MSG_EMAIL_SENT, MSG_LOGGED_IN, @@ -26,12 +30,8 @@ MSG_USER_BANNED, MSG_USER_EXPIRED, ) -from simcore_service_webserver.login._login_repository_legacy import ( - AsyncpgStorage, - ConfirmationTokenDict, -) from simcore_service_webserver.login.settings import LoginOptions -from simcore_service_webserver.users import api as users_service +from simcore_service_webserver.users import users_service as users_service from yarl import URL # diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_twofa.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_twofa.py index 3bd849f086bd..b31b1fae3fcb 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_twofa.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_twofa.py @@ -21,10 +21,7 @@ from simcore_postgres_database.models.products import ProductLoginSettingsDict, products from simcore_service_webserver.application_settings import ApplicationSettings from simcore_service_webserver.db.models import UserStatus -from simcore_service_webserver.login._constants import ( - CODE_2FA_SMS_CODE_REQUIRED, - MSG_2FA_UNAVAILABLE, -) +from simcore_service_webserver.login import _auth_service from simcore_service_webserver.login._login_repository_legacy import AsyncpgStorage from simcore_service_webserver.login._twofa_service import ( _do_create_2fa_code, @@ -34,10 +31,15 @@ get_redis_validation_code_client, send_email_code, ) +from simcore_service_webserver.login.constants import ( + CODE_2FA_SMS_CODE_REQUIRED, + MSG_2FA_UNAVAILABLE, + MSG_LOGGED_IN, +) from simcore_service_webserver.products import products_web from simcore_service_webserver.products.errors import UnknownProductError from simcore_service_webserver.products.models import Product -from simcore_service_webserver.users import preferences_api as user_preferences_api +from simcore_service_webserver.user_preferences import user_preferences_service from twilio.base.exceptions import TwilioRestException @@ -160,7 +162,8 @@ def _get_confirmation_link_from_email(): assert response.status == status.HTTP_200_OK # check email+password registered - user = await db.get_user({"email": user_email}) + user = await _auth_service.get_user_or_none(client.app, email=user_email) + assert user assert user["status"] == UserStatus.ACTIVE.name assert user["phone"] is None @@ -194,7 +197,8 @@ def _get_confirmation_link_from_email(): assert phone == user_phone_number # check phone still NOT in db (TODO: should be in database and unconfirmed) - user = await db.get_user({"email": user_email}) + user = await _auth_service.get_user_or_none(client.app, email=user_email) + assert user assert user["status"] == UserStatus.ACTIVE.name assert user["phone"] is None @@ -210,7 +214,8 @@ def _get_confirmation_link_from_email(): ) await assert_status(response, status.HTTP_200_OK) # check user has phone confirmed - user = await db.get_user({"email": user_email}) + user = await _auth_service.get_user_or_none(client.app, email=user_email) + assert user assert user["status"] == UserStatus.ACTIVE.name assert user["phone"] == user_phone_number @@ -251,7 +256,8 @@ def _get_confirmation_link_from_email(): await assert_status(response, status.HTTP_200_OK) # assert users is successfully registered - user = await db.get_user({"email": user_email}) + user = await _auth_service.get_user_or_none(client.app, email=user_email) + assert user assert user["email"] == user_email assert user["phone"] == user_phone_number assert user["status"] == UserStatus.ACTIVE.value @@ -259,9 +265,9 @@ def _get_confirmation_link_from_email(): # login (via EMAIL) --------------------------------------------------------- # Change 2fa user preference _preference_id = ( - user_preferences_api.TwoFAFrontendUserPreference().preference_identifier + user_preferences_service.TwoFAFrontendUserPreference().preference_identifier ) - await user_preferences_api.set_frontend_user_preference( + await user_preferences_service.set_frontend_user_preference( client.app, user_id=user["id"], product_name="osparc", @@ -290,7 +296,7 @@ def _get_confirmation_link_from_email(): assert "support" in parsed_context["support_email"] # login (2FA Disabled) --------------------------------------------------------- - await user_preferences_api.set_frontend_user_preference( + await user_preferences_service.set_frontend_user_preference( client.app, user_id=user["id"], product_name="osparc", @@ -307,7 +313,7 @@ def _get_confirmation_link_from_email(): }, ) data, _ = await assert_status(response, status.HTTP_200_OK) - assert "logged in" in data["message"] + assert MSG_LOGGED_IN in data["message"] async def test_can_register_same_phone_in_different_accounts( @@ -462,7 +468,7 @@ async def test_2fa_sms_failure_during_login( response, status.HTTP_503_SERVICE_UNAVAILABLE ) assert not data - assert error["errors"][0]["message"].startswith(MSG_2FA_UNAVAILABLE[:10]) + assert error["message"].startswith(MSG_2FA_UNAVAILABLE[:10]) # Expects logs like 'Failed while setting up 2FA code and sending SMS to 157XXXXXXXX3 [OEC:140392495277888]' assert f"{user_phone_number[:3]}" in caplog.text diff --git a/services/web/server/tests/unit/with_dbs/03/login/test_login_twofa_resend.py b/services/web/server/tests/unit/with_dbs/03/login/test_login_twofa_resend.py index 4f413eb35005..9ab57d5d6390 100644 --- a/services/web/server/tests/unit/with_dbs/03/login/test_login_twofa_resend.py +++ b/services/web/server/tests/unit/with_dbs/03/login/test_login_twofa_resend.py @@ -9,15 +9,15 @@ from pytest_mock import MockFixture from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.products import ProductLoginSettingsDict, products from simcore_service_webserver.application_settings import ApplicationSettings -from simcore_service_webserver.login._constants import CODE_2FA_SMS_CODE_REQUIRED -from simcore_service_webserver.login._controller.rest.auth import ( +from simcore_service_webserver.login._controller.rest.auth_schemas import ( CodePageParams, NextPage, ) +from simcore_service_webserver.login.constants import CODE_2FA_SMS_CODE_REQUIRED @pytest.fixture diff --git a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_admin_pricing_plans.py b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_admin_pricing_plans.py index 35f6255a5e1c..d5fffc76ef4a 100644 --- a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_admin_pricing_plans.py +++ b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_admin_pricing_plans.py @@ -14,7 +14,7 @@ from models_library.resource_tracker import PricingPlanClassification from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from simcore_service_webserver.db.models import UserRole diff --git a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_list_osparc_credits_aggregated_usages.py b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_list_osparc_credits_aggregated_usages.py index 08b00cefd1b1..168b0779ae51 100644 --- a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_list_osparc_credits_aggregated_usages.py +++ b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_list_osparc_credits_aggregated_usages.py @@ -19,7 +19,7 @@ OsparcCreditsAggregatedUsagesPage, ) from pytest_mock.plugin import MockerFixture -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.wallets import wallets from simcore_service_webserver.db.models import UserRole diff --git a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_pricing_plans.py b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_pricing_plans.py index 046bf286d48e..3a4196903540 100644 --- a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_pricing_plans.py +++ b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_pricing_plans.py @@ -18,7 +18,7 @@ from models_library.utils.fastapi_encoders import jsonable_encoder from pytest_simcore.aioresponses_mocker import AioResponsesMock from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from settings_library.resource_usage_tracker import ResourceUsageTrackerSettings from simcore_service_webserver.db.models import UserRole @@ -77,7 +77,7 @@ async def test_get_pricing_plan_user_role_access( await assert_status(resp, expected) -@pytest.mark.parametrize("user_role", [(UserRole.USER)]) +@pytest.mark.parametrize("user_role", [UserRole.USER]) async def test_get_pricing_plan( client: TestClient, logged_user: UserInfoDict, @@ -104,7 +104,7 @@ async def test_get_pricing_plan( assert len(data["pricingUnits"]) == 1 -@pytest.mark.parametrize("user_role", [(UserRole.USER)]) +@pytest.mark.parametrize("user_role", [UserRole.USER]) async def test_list_pricing_plans( client: TestClient, logged_user: UserInfoDict, diff --git a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_usage_services__export.py b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_usage_services__export.py index ce24dfb9c439..7d6312724ade 100644 --- a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_usage_services__export.py +++ b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_usage_services__export.py @@ -18,7 +18,7 @@ from models_library.rest_ordering import OrderBy from pydantic import AnyUrl, TypeAdapter from pytest_mock.plugin import MockerFixture -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.wallets import wallets from simcore_service_webserver.db.models import UserRole diff --git a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_usage_services__list.py b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_usage_services__list.py index 1370507e491e..5b201e5cf0a5 100644 --- a/services/web/server/tests/unit/with_dbs/03/resource_usage/test_usage_services__list.py +++ b/services/web/server/tests/unit/with_dbs/03/resource_usage/test_usage_services__list.py @@ -19,7 +19,7 @@ ) from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.wallets import wallets from simcore_service_webserver.db.models import UserRole @@ -112,7 +112,7 @@ async def test_list_service_usage_user_role_access( await assert_status(resp, expected) -@pytest.mark.parametrize("user_role", [(UserRole.USER)]) +@pytest.mark.parametrize("user_role", [UserRole.USER]) async def test_list_service_usage( client: TestClient, logged_user: UserInfoDict, @@ -159,7 +159,7 @@ async def test_list_service_usage( assert mock_list_usage_services.call_args[1]["access_all_wallet_usage"] is False -@pytest.mark.parametrize("user_role", [(UserRole.USER)]) +@pytest.mark.parametrize("user_role", [UserRole.USER]) async def test_list_service_usage_with_order_by_query_param( client: TestClient, logged_user: UserInfoDict, @@ -269,7 +269,7 @@ async def test_list_service_usage_with_order_by_query_param( assert error["errors"][0]["field"] == "order_by.field" -@pytest.mark.parametrize("user_role", [(UserRole.USER)]) +@pytest.mark.parametrize("user_role", [UserRole.USER]) async def test_list_service_usage_with_filters_query_param( client: TestClient, logged_user: UserInfoDict, diff --git a/services/web/server/tests/unit/with_dbs/03/tags/conftest.py b/services/web/server/tests/unit/with_dbs/03/tags/conftest.py index 03a8f66e9a20..cdf12044e6cc 100644 --- a/services/web/server/tests/unit/with_dbs/03/tags/conftest.py +++ b/services/web/server/tests/unit/with_dbs/03/tags/conftest.py @@ -10,8 +10,8 @@ from aioresponses import aioresponses from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.webserver_login import UserInfoDict from pytest_simcore.helpers.webserver_projects import NewProject, delete_all_projects +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp.application import create_safe_application from simcore_service_webserver.application_settings import setup_settings from simcore_service_webserver.db.plugin import setup_db diff --git a/services/web/server/tests/unit/with_dbs/03/tags/test_tags.py b/services/web/server/tests/unit/with_dbs/03/tags/test_tags.py index 323bd0e33755..51a446d39439 100644 --- a/services/web/server/tests/unit/with_dbs/03/tags/test_tags.py +++ b/services/web/server/tests/unit/with_dbs/03/tags/test_tags.py @@ -4,32 +4,34 @@ # pylint: disable=too-many-arguments -from collections.abc import AsyncIterator, Callable, Iterator +from collections.abc import AsyncIterator, Iterator from typing import Any import pytest import sqlalchemy as sa from aiohttp.test_utils import TestClient from faker import Faker +from models_library.api_schemas_webserver.projects import ( + ProjectShareStateOutputSchema, + ProjectStateOutputSchema, +) from models_library.basic_types import IdInt from models_library.groups import EVERYONE_GROUP_ID from models_library.products import ProductName from models_library.projects_state import ( - ProjectLocked, ProjectRunningState, - ProjectState, ProjectStatus, RunningState, ) from models_library.utils.fastapi_encoders import jsonable_encoder from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.postgres_tags import create_tag, delete_tag -from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict from pytest_simcore.helpers.webserver_projects import assert_get_same_project +from pytest_simcore.helpers.webserver_users import NewUser, UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.tags import tags from simcore_service_webserver.db.models import UserRole -from simcore_service_webserver.db.plugin import get_database_engine +from simcore_service_webserver.db.plugin import get_database_engine_legacy from simcore_service_webserver.products._service import get_product from simcore_service_webserver.projects.models import ProjectDict @@ -52,9 +54,7 @@ async def test_tags_to_studies( client: TestClient, faker: Faker, user_project: ProjectDict, - catalog_subsystem_mock: Callable[[list[ProjectDict]], None], ): - catalog_subsystem_mock([user_project]) assert client.app # Add test tags @@ -82,9 +82,11 @@ async def test_tags_to_studies( # check the tags are in user_project["tags"] = [tag["id"] for tag in added_tags] user_project["state"] = jsonable_encoder( - ProjectState( - locked=ProjectLocked(value=False, status=ProjectStatus.CLOSED), - state=ProjectRunningState(value=RunningState.UNKNOWN), + ProjectStateOutputSchema( + share_state=ProjectShareStateOutputSchema( + locked=False, status=ProjectStatus.CLOSED, current_user_groupids=[] + ), + state=ProjectRunningState(value=RunningState.NOT_STARTED), ), exclude_unset=True, ) @@ -123,7 +125,7 @@ async def test_tags_to_studies( @pytest.fixture async def everybody_tag_id(client: TestClient) -> AsyncIterator[int]: assert client.app - engine = get_database_engine(client.app) + engine = get_database_engine_legacy(client.app) assert engine async with engine.acquire() as conn: diff --git a/services/web/server/tests/unit/with_dbs/03/test__openapi_specs.py b/services/web/server/tests/unit/with_dbs/03/test__openapi_specs.py index 886817bbf263..d3b561cae319 100644 --- a/services/web/server/tests/unit/with_dbs/03/test__openapi_specs.py +++ b/services/web/server/tests/unit/with_dbs/03/test__openapi_specs.py @@ -41,7 +41,6 @@ def app_environment( "WEBSERVER_GARBAGE_COLLECTOR": "null", # enable plugins that by default are disabled "WEBSERVER_DEV_FEATURES_ENABLED": "1", - "WEBSERVER_CLUSTERS": "1", # enables activity WEBSERVER_ACTIVITY "PROMETHEUS_URL": f"https://{faker.domain_name()}", "PROMETHEUS_USERNAME": faker.user_name(), diff --git a/services/web/server/tests/unit/with_dbs/03/test_email.py b/services/web/server/tests/unit/with_dbs/03/test_email.py index f31c54f259ed..5cf04129d014 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_email.py +++ b/services/web/server/tests/unit/with_dbs/03/test_email.py @@ -15,13 +15,14 @@ import pytest from aiohttp import web from aiohttp.test_utils import TestClient, make_mocked_request +from common_library.users_enums import UserRole from faker import Faker from pydantic import ValidationError from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.webserver_login import UserInfoDict, UserRole +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from settings_library.email import EmailProtocol, SMTPSettings from simcore_service_webserver._meta import API_VTAG @@ -29,10 +30,10 @@ from simcore_service_webserver.email._core import _remove_comments, _render_template from simcore_service_webserver.email._handlers import EmailTestFailed, EmailTestPassed from simcore_service_webserver.email.plugin import setup_email -from simcore_service_webserver.login._controller.rest.preregistration import ( +from simcore_service_webserver.login_accounts._controller_rest import ( _get_ipinfo, ) -from simcore_service_webserver.login._preregistration_service import ( +from simcore_service_webserver.login_accounts._service import ( _json_encoder_and_dumps, ) @@ -115,7 +116,7 @@ async def test_email_handlers( mocked_aiosmtplib: MagicMock, mocked_send_email: MagicMock, ): - assert logged_user["role"] == user_role.name + assert logged_user["role"] == user_role destination_email = faker.email() response = await client.post( @@ -213,6 +214,7 @@ def test_render_templates(template_path: Path, faker: Faker): "dumps": functools.partial(_json_encoder_and_dumps, indent=1), "request_form": fake_request_form, "ipinfo": _get_ipinfo(request), + "extra_context": {"extra": "information"}, }, ) diff --git a/services/web/server/tests/unit/with_dbs/03/test_login_auth_app.py b/services/web/server/tests/unit/with_dbs/03/test_login_auth_app.py new file mode 100644 index 000000000000..8f80e463063c --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/03/test_login_auth_app.py @@ -0,0 +1,243 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + +import logging +from collections.abc import Callable +from pathlib import Path + +import pytest +import pytest_asyncio +import sqlalchemy as sa +import yaml +from aiohttp import web +from aiohttp.test_utils import TestClient, TestServer +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.typing_env import EnvVarsDict +from pytest_simcore.helpers.webserver_login import UserInfoDict +from servicelib.aiohttp import status +from simcore_service_webserver.application import create_application_auth +from simcore_service_webserver.application_settings import ( + ApplicationSettings, + get_application_settings, +) +from simcore_service_webserver.application_settings_utils import AppConfigDict +from simcore_service_webserver.security import security_web + + +@pytest.fixture +def service_name() -> str: + return "wb-auth" + + +@pytest.fixture +def app_environment_for_wb_authz_service_dict( + docker_compose_service_environment_dict: EnvVarsDict, + docker_compose_service_hostname: str, + default_app_cfg: AppConfigDict, +) -> EnvVarsDict: + + postgres_cfg = default_app_cfg["db"]["postgres"] + + # Checks that docker-compose service environment is correct + assert ( + docker_compose_service_environment_dict["WEBSERVER_APP_FACTORY_NAME"] + == "WEBSERVER_AUTHZ_APP_FACTORY" + ) + # expected tracing in the docker-environ BUT we will disable it for tests + assert "WEBSERVER_TRACING" in docker_compose_service_environment_dict + assert ( + "TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT" + in docker_compose_service_environment_dict + ) + assert "WEBSERVER_DIAGNOSTICS" in docker_compose_service_environment_dict + assert "WEBSERVER_PROFILING" in docker_compose_service_environment_dict + + return { + **docker_compose_service_environment_dict, + # NOTE: TEST-stack uses different env-vars + # this is temporary here until we get rid of config files + # SEE https://github.com/ITISFoundation/osparc-simcore/issues/8129 + "POSTGRES_DB": postgres_cfg["database"], + "POSTGRES_HOST": postgres_cfg["host"], + "POSTGRES_PORT": postgres_cfg["port"], + "POSTGRES_USER": postgres_cfg["user"], + "POSTGRES_PASSWORD": postgres_cfg["password"], + "HOSTNAME": docker_compose_service_hostname, + "WEBSERVER_TRACING": "null", # BUT we will disable it for tests + } + + +@pytest.fixture +def app_environment_for_wb_authz_service( + monkeypatch: pytest.MonkeyPatch, + app_environment_for_wb_authz_service_dict: EnvVarsDict, + service_name: str, +) -> EnvVarsDict: + """Mocks the environment variables for the auth app service (considering docker-compose's environment).""" + + mocked_envs = setenvs_from_dict( + monkeypatch, {**app_environment_for_wb_authz_service_dict} + ) + + # test how service will load + settings = ApplicationSettings.create_from_envs() + + logging.info( + "Application settings:\n%s", + settings.model_dump_json(indent=2), + ) + + assert service_name == settings.WEBSERVER_HOST + assert settings.WEBSERVER_DB is not None + assert settings.WEBSERVER_SESSION is not None + assert settings.WEBSERVER_SECURITY is not None + assert settings.WEBSERVER_TRACING is None, "No tracing for tests" + + return mocked_envs + + +@pytest.fixture +async def wb_auth_app( + app_environment_for_wb_authz_service: EnvVarsDict, +) -> web.Application: + assert app_environment_for_wb_authz_service + + # creates auth application instead + app = create_application_auth() + + settings = get_application_settings(app) + assert settings.WEBSERVER_APP_FACTORY_NAME == "WEBSERVER_AUTHZ_APP_FACTORY" + assert ( + settings.APP_NAME == "simcore_service_wb_auth" + ), "APP_NAME in docker-compose for wb-auth is not set correctly" + + # checks endpoint exposed + url = app.router["check_auth"].url_for() + assert url.path == "/v0/auth:check" + + return app + + +@pytest_asyncio.fixture(loop_scope="function", scope="function") +async def web_server( + postgres_db: sa.engine.Engine, # sets up postgres database + wb_auth_app: web.Application, + webserver_test_server_port: int, + # tools + aiohttp_server: Callable, +) -> TestServer: + # Overrides tests/unit/with_dbs/context.py:web_server fixture + + # Add test routes for login/logout + async def test_login(request: web.Request) -> web.Response: + data = await request.json() + response = web.Response(status=200) + return await security_web.remember_identity( + request, response, user_email=data["email"] + ) + + async def test_logout(request: web.Request) -> web.Response: + response = web.Response(status=200) + await security_web.forget_identity(request, response) + return response + + wb_auth_app.router.add_post("/v0/test/login", test_login) + wb_auth_app.router.add_post("/v0/test/logout", test_logout) + + return await aiohttp_server(wb_auth_app, port=webserver_test_server_port) + + +# @pytest.mark.parametrize( +# "user_role", [role for role in UserRole if role > UserRole.ANONYMOUS] +# ) +async def test_check_endpoint_in_auth_app(client: TestClient, user: UserInfoDict): + assert client.app + + # user is not signed it (ANONYMOUS) + response = await client.get("/v0/auth:check") + await assert_status(response, status.HTTP_401_UNAUTHORIZED) + + # Sign in using test login route + await client.post("/v0/test/login", json={"email": user["email"]}) + + # Now user should be authorized + response = await client.get("/v0/auth:check") + await assert_status(response, status.HTTP_204_NO_CONTENT) + + await client.post("/v0/test/logout") + + response = await client.get("/v0/auth:check") + await assert_status(response, status.HTTP_401_UNAUTHORIZED) + + +def test_docker_compose_dev_vendors_forwardauth_configuration( + services_docker_compose_dev_vendors_file: Path, + env_devel_dict: EnvVarsDict, +): + """Test that manual service forwardauth.address points to correct WB_AUTH_WEBSERVER_HOST and port. + + NOTE: traefik's `forwardauth` labels are also used in + `services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/proxy.py` + """ + + # Load docker-compose file + compose_config = yaml.safe_load( + services_docker_compose_dev_vendors_file.read_text() + ) + + # Get the manual service configuration + manual_service = compose_config.get("services", {}).get("manual") + assert ( + manual_service is not None + ), "Manual service not found in docker-compose-dev-vendors.yml" + + # Extract forwardauth.address from deploy labels + deploy_labels = manual_service.get("deploy", {}).get("labels", []) + forwardauth_address_label = None + + for label in deploy_labels: + if "forwardauth.address=" in label: + forwardauth_address_label = label + break + + assert ( + forwardauth_address_label is not None + ), "forwardauth.address label not found in manual service" + + # Parse the forwardauth address + # Expected format: traefik.http.middlewares.${SWARM_STACK_NAME}_manual-auth.forwardauth.address=http://${WB_AUTH_WEBSERVER_HOST}:${WB_AUTH_WEBSERVER_PORT}/v0/auth:check + address_part = forwardauth_address_label.split("forwardauth.address=")[1] + + # Verify it contains the expected pattern + assert ( + "${WB_AUTH_WEBSERVER_HOST}" in address_part + ), "forwardauth.address should reference WB_AUTH_WEBSERVER_HOST" + assert ( + "${WB_AUTH_WEBSERVER_PORT}" in address_part + ), "forwardauth.address should reference WB_AUTH_WEBSERVER_PORT" + assert ( + "/v0/auth:check" in address_part + ), "forwardauth.address should point to /v0/auth:check endpoint" + + # Verify the full expected pattern + expected_pattern = ( + "http://${WB_AUTH_WEBSERVER_HOST}:${WB_AUTH_WEBSERVER_PORT}/v0/auth:check" + ) + assert ( + address_part == expected_pattern + ), f"forwardauth.address should be '{expected_pattern}', got '{address_part}'" + + # Verify that WB_AUTH_WEBSERVER_HOST and WB_AUTH_WEBSERVER_PORT are configured in the .env-devel file! + wb_auth_host = env_devel_dict.get("WB_AUTH_WEBSERVER_HOST") + wb_auth_port = env_devel_dict.get("WB_AUTH_WEBSERVER_PORT") + + assert ( + wb_auth_host is not None + ), "WB_AUTH_WEBSERVER_HOST should be configured in test environment" + assert ( + wb_auth_port is not None + ), "WB_AUTH_WEBSERVER_PORT should be configured in test environment" diff --git a/services/web/server/tests/unit/with_dbs/03/test_project_db.py b/services/web/server/tests/unit/with_dbs/03/test_project_db.py index 390650bd48ca..e8731a32fb0f 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_project_db.py +++ b/services/web/server/tests/unit/with_dbs/03/test_project_db.py @@ -5,6 +5,7 @@ # pylint: disable=unused-variable import asyncio +import contextlib from collections.abc import AsyncIterator, Awaitable, Callable, Iterator from copy import deepcopy from random import randint @@ -16,14 +17,14 @@ import pytest import sqlalchemy as sa from aiohttp.test_utils import TestClient -from common_library.dict_tools import copy_from_dict_ex, remap_keys from faker import Faker from models_library.projects import ProjectID, ProjectTemplateType from models_library.projects_nodes_io import NodeID, NodeIDStr from psycopg2.errors import UniqueViolation from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.webserver_login import UserInfoDict, log_client_in +from pytest_simcore.helpers.webserver_login import log_client_in +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.utils import logged_gather from simcore_postgres_database.models.projects import ProjectType, projects from simcore_postgres_database.models.projects_to_products import projects_to_products @@ -48,7 +49,6 @@ ProjectNodeRequiredInputsNotSetError, ProjectNotFoundError, ) -from simcore_service_webserver.projects.models import ProjectDict from simcore_service_webserver.users.exceptions import UserNotFoundError from simcore_service_webserver.utils import to_datetime from sqlalchemy.engine.result import Row @@ -710,89 +710,6 @@ async def test_get_node_ids_from_project( assert node_ids_inside_project == set(some_projects_and_nodes[project_id]) -@pytest.mark.parametrize( - "user_role", - [UserRole.USER], -) -async def test_replace_user_project( - db_api: ProjectDBAPI, - user_project: ProjectDict, - logged_user: UserInfoDict, - osparc_product_name: str, - postgres_db: sa.engine.Engine, - aiopg_engine: aiopg.sa.engine.Engine, -): - PROJECT_DICT_IGNORE_FIELDS = {"lastChangeDate"} - original_project = remap_keys( - user_project, - rename={"trashedAt": "trashed"}, - ) - - # replace the project with the same should do nothing - working_project = await db_api.replace_project( - original_project, - user_id=logged_user["id"], - product_name=osparc_product_name, - project_uuid=original_project["uuid"], - ) - assert copy_from_dict_ex( - original_project, PROJECT_DICT_IGNORE_FIELDS - ) == copy_from_dict_ex(working_project, PROJECT_DICT_IGNORE_FIELDS) - _assert_projects_to_product_db_row( - postgres_db, working_project, osparc_product_name - ) - await _assert_projects_nodes_db_rows(aiopg_engine, working_project) - - # now let's create some outputs (similar to what happens when running services) - NODE_INDEX = 1 # this is not the file-picker - node_id = tuple(working_project["workbench"].keys())[NODE_INDEX] - node_data = working_project["workbench"][node_id] - node_data["progress"] = 100 - node_data["outputs"] = { - "output_1": { - "store": 0, - "path": "687b8dc2-fea2-11ec-b7fd-02420a6e3a4d/d61a2ec8-19b4-4375-adcb-fdd22f850333/single_number.txt", - "eTag": "c4ca4238a0b923820dcc509a6f75849b", - }, - "output_2": 5, - } - node_data["runHash"] = ( - "5b0583fa546ac82f0e41cef9705175b7187ce3928ba42892e842add912c16676" - ) - # replacing with the new entries shall return the very same data - replaced_project = await db_api.replace_project( - working_project, - user_id=logged_user["id"], - product_name=osparc_product_name, - project_uuid=working_project["uuid"], - ) - assert copy_from_dict_ex( - working_project, PROJECT_DICT_IGNORE_FIELDS - ) == copy_from_dict_ex(replaced_project, PROJECT_DICT_IGNORE_FIELDS) - _assert_projects_to_product_db_row( - postgres_db, replaced_project, osparc_product_name - ) - await _assert_projects_nodes_db_rows(aiopg_engine, replaced_project) - - # the frontend sends project without some fields, but for FRONTEND type of nodes - # replacing should keep the values - FRONTEND_EXCLUDED_FIELDS = ["outputs", "progress", "runHash"] - incoming_frontend_project = deepcopy(original_project) - for node_data in incoming_frontend_project["workbench"].values(): - if "frontend" not in node_data["key"]: - for field in FRONTEND_EXCLUDED_FIELDS: - node_data.pop(field, None) - replaced_project = await db_api.replace_project( - incoming_frontend_project, - user_id=logged_user["id"], - product_name=osparc_product_name, - project_uuid=incoming_frontend_project["uuid"], - ) - assert copy_from_dict_ex( - working_project, PROJECT_DICT_IGNORE_FIELDS - ) == copy_from_dict_ex(replaced_project, PROJECT_DICT_IGNORE_FIELDS) - - @pytest.mark.parametrize("user_role", [UserRole.ANONYMOUS]) # worst case @pytest.mark.parametrize("access_rights", [x.value for x in ProjectAccessRights]) async def test_has_permission( @@ -805,12 +722,13 @@ async def test_has_permission( client: TestClient, aiopg_engine: aiopg.sa.engine.Engine, insert_project_in_db: Callable[..., Awaitable[dict[str, Any]]], + exit_stack: contextlib.AsyncExitStack, ): project_id = faker.uuid4(cast_to=None) owner_id = logged_user["id"] second_user: UserInfoDict = await log_client_in( - client=client, user_data={"role": UserRole.USER.name} + client=client, user_data={"role": UserRole.USER.name}, exit_stack=exit_stack ) new_project = deepcopy(fake_project) diff --git a/services/web/server/tests/unit/with_dbs/03/test_session_access_policies.py b/services/web/server/tests/unit/with_dbs/03/test_session_access_policies.py index 9a20ee62e4c1..bdfe1c7dffe4 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_session_access_policies.py +++ b/services/web/server/tests/unit/with_dbs/03/test_session_access_policies.py @@ -14,7 +14,7 @@ from servicelib.aiohttp import status from simcore_service_webserver.application_settings import ApplicationSettings from simcore_service_webserver.constants import APP_SETTINGS_KEY -from simcore_service_webserver.login._constants import ( +from simcore_service_webserver.login.constants import ( MAX_2FA_CODE_RESEND, MAX_2FA_CODE_TRIALS, MSG_UNAUTHORIZED_LOGIN_2FA, diff --git a/services/web/server/tests/unit/with_dbs/03/test_socketio.py b/services/web/server/tests/unit/with_dbs/03/test_socketio.py index deca5e69c4e2..53fec6171766 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_socketio.py +++ b/services/web/server/tests/unit/with_dbs/03/test_socketio.py @@ -10,7 +10,7 @@ from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.users import UserRole from simcore_service_webserver.application_settings import ApplicationSettings diff --git a/services/web/server/tests/unit/with_dbs/03/test_users__notifications.py b/services/web/server/tests/unit/with_dbs/03/test_user_notifications_rest.py similarity index 96% rename from services/web/server/tests/unit/with_dbs/03/test_users__notifications.py rename to services/web/server/tests/unit/with_dbs/03/test_user_notifications_rest.py index a1d99e48268f..6cc78e61b449 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_users__notifications.py +++ b/services/web/server/tests/unit/with_dbs/03/test_user_notifications_rest.py @@ -22,11 +22,11 @@ from pydantic import TypeAdapter from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.users import UserRole from simcore_service_webserver.redis import get_redis_user_notifications_client -from simcore_service_webserver.users._notifications import ( +from simcore_service_webserver.user_notifications._models import ( MAX_NOTIFICATIONS_FOR_USER_TO_KEEP, MAX_NOTIFICATIONS_FOR_USER_TO_SHOW, NotificationCategory, @@ -34,7 +34,18 @@ UserNotificationCreate, get_notification_key, ) -from simcore_service_webserver.users._notifications_rest import _get_user_notifications +from simcore_service_webserver.user_notifications._repository import ( + UserNotificationsRepository, +) + + +async def _get_user_notifications( + redis_client, + user_id, + product_name, +): + repo = UserNotificationsRepository(redis_client=redis_client) + return await repo.list_notifications(user_id=user_id, product_name=product_name) @pytest.fixture @@ -234,7 +245,7 @@ async def test_create_user_notification( assert error is not None -@pytest.mark.parametrize("user_role", [(UserRole.USER)]) +@pytest.mark.parametrize("user_role", [UserRole.USER]) @pytest.mark.parametrize( "notification_count", [ @@ -290,7 +301,7 @@ async def test_create_user_notification_capped_list_length( assert len(user_notifications) <= MAX_NOTIFICATIONS_FOR_USER_TO_KEEP -@pytest.mark.parametrize("user_role", [(UserRole.USER)]) +@pytest.mark.parametrize("user_role", [UserRole.USER]) async def test_create_user_notification_per_product( logged_user: UserInfoDict, notification_redis_client: aioredis.Redis, diff --git a/services/web/server/tests/unit/with_dbs/03/test_users__preferences_models.py b/services/web/server/tests/unit/with_dbs/03/test_user_preferences_models.py similarity index 98% rename from services/web/server/tests/unit/with_dbs/03/test_users__preferences_models.py rename to services/web/server/tests/unit/with_dbs/03/test_user_preferences_models.py index 0a33d8f8921e..db9527c62f74 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_users__preferences_models.py +++ b/services/web/server/tests/unit/with_dbs/03/test_user_preferences_models.py @@ -16,7 +16,7 @@ from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from simcore_service_webserver.application_settings import ApplicationSettings from simcore_service_webserver.constants import APP_SETTINGS_KEY -from simcore_service_webserver.users._preferences_models import ( +from simcore_service_webserver.user_preferences._models import ( ALL_FRONTEND_PREFERENCES, TelemetryLowDiskSpaceWarningThresholdFrontendUserPreference, get_preference_identifier, diff --git a/services/web/server/tests/unit/with_dbs/03/test_users__preferences_handlers.py b/services/web/server/tests/unit/with_dbs/03/test_user_preferences_rest.py similarity index 95% rename from services/web/server/tests/unit/with_dbs/03/test_users__preferences_handlers.py rename to services/web/server/tests/unit/with_dbs/03/test_user_preferences_rest.py index 73910d7a2c13..9887d2fbce16 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_users__preferences_handlers.py +++ b/services/web/server/tests/unit/with_dbs/03/test_user_preferences_rest.py @@ -15,10 +15,12 @@ from models_library.users import UserID from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict -from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict +from pytest_simcore.helpers.webserver_users import NewUser, UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.users import UserRole, UserStatus -from simcore_service_webserver.users._preferences_models import ALL_FRONTEND_PREFERENCES +from simcore_service_webserver.user_preferences._models import ( + ALL_FRONTEND_PREFERENCES, +) @pytest.fixture diff --git a/services/web/server/tests/unit/with_dbs/03/test_users__preferences_api.py b/services/web/server/tests/unit/with_dbs/03/test_user_preferences_service.py similarity index 98% rename from services/web/server/tests/unit/with_dbs/03/test_users__preferences_api.py rename to services/web/server/tests/unit/with_dbs/03/test_user_preferences_service.py index 96f6ba522410..37c3eb8f0c00 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_users__preferences_api.py +++ b/services/web/server/tests/unit/with_dbs/03/test_user_preferences_service.py @@ -24,11 +24,11 @@ groups_extra_properties, ) from simcore_postgres_database.models.users import UserStatus -from simcore_service_webserver.users._preferences_models import ( +from simcore_service_webserver.user_preferences._models import ( ALL_FRONTEND_PREFERENCES, BillingCenterUsageColumnOrderFrontendUserPreference, ) -from simcore_service_webserver.users._preferences_service import ( +from simcore_service_webserver.user_preferences._service import ( _get_frontend_user_preferences, get_frontend_user_preferences_aggregation, set_frontend_user_preference, diff --git a/services/web/server/tests/unit/with_dbs/03/test_users__tokens.py b/services/web/server/tests/unit/with_dbs/03/test_user_tokens.py similarity index 93% rename from services/web/server/tests/unit/with_dbs/03/test_users__tokens.py rename to services/web/server/tests/unit/with_dbs/03/test_user_tokens.py index 76481526d96e..56af58d1efe7 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_users__tokens.py +++ b/services/web/server/tests/unit/with_dbs/03/test_user_tokens.py @@ -17,15 +17,15 @@ from faker import Faker from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict -from pytest_simcore.helpers.webserver_login import UserInfoDict from pytest_simcore.helpers.webserver_tokens import ( create_token_in_db, delete_all_tokens_from_db, get_token_from_db, ) +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.users import UserRole -from simcore_service_webserver.db.plugin import get_database_engine +from simcore_service_webserver.db.plugin import get_database_engine_legacy @pytest.fixture @@ -48,7 +48,7 @@ async def tokens_db_cleanup( client: TestClient, ) -> AsyncIterator[None]: assert client.app - engine = get_database_engine(client.app) + engine = get_database_engine_legacy(client.app) yield None @@ -75,7 +75,7 @@ async def fake_tokens( "token_secret": faker.md5(raw_output=False), } await create_token_in_db( - get_database_engine(client.app), + get_database_engine_legacy(client.app), user_id=logged_user["id"], token_service=data["service"], token_data=data, @@ -115,7 +115,7 @@ async def test_create_token( data, error = await assert_status(resp, expected) if not error: db_token = await get_token_from_db( - get_database_engine(client.app), token_data=token + get_database_engine_legacy(client.app), token_data=token ) assert db_token assert db_token["token_data"] == token @@ -191,5 +191,7 @@ async def test_delete_token( if not error: assert not ( - await get_token_from_db(get_database_engine(client.app), token_service=sid) + await get_token_from_db( + get_database_engine_legacy(client.app), token_service=sid + ) ) diff --git a/services/web/server/tests/unit/with_dbs/03/test_users_rest_models.py b/services/web/server/tests/unit/with_dbs/03/test_users_rest_models.py index ef68295a7f09..8b797d25bd44 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_users_rest_models.py +++ b/services/web/server/tests/unit/with_dbs/03/test_users_rest_models.py @@ -14,20 +14,23 @@ from faker import Faker from models_library.api_schemas_webserver.auth import AccountRequestInfo from pytest_simcore.helpers.faker_factories import random_pre_registration_details -from simcore_service_webserver.users._common.schemas import ( +from simcore_service_webserver.models import PhoneNumberStr +from simcore_service_webserver.users._controller.rest._rest_schemas import ( MAX_BYTES_SIZE_EXTRAS, - PreRegisteredUserGet, + UserAccountRestPreRegister, ) @pytest.fixture -def account_request_form(faker: Faker) -> dict[str, Any]: +def account_request_form( + faker: Faker, user_phone_number: PhoneNumberStr +) -> dict[str, Any]: # This is AccountRequestInfo.form form = { "firstName": faker.first_name(), "lastName": faker.last_name(), "email": faker.email(), - "phone": faker.phone_number(), + "phone": user_phone_number, "company": faker.company(), # billing info "address": faker.address().replace("\n", ", "), @@ -66,7 +69,7 @@ def test_preuserprofile_parse_model_from_request_form_data( data["comment"] = "extra comment" # pre-processors - pre_user_profile = PreRegisteredUserGet(**data) + pre_user_profile = UserAccountRestPreRegister(**data) print(pre_user_profile.model_dump_json(indent=1)) @@ -90,11 +93,11 @@ def test_preuserprofile_parse_model_without_extras( ): required = { f.alias or f_name - for f_name, f in PreRegisteredUserGet.model_fields.items() + for f_name, f in UserAccountRestPreRegister.model_fields.items() if f.is_required() } data = {k: account_request_form[k] for k in required} - assert not PreRegisteredUserGet(**data).extras + assert not UserAccountRestPreRegister(**data).extras def test_preuserprofile_max_bytes_size_extras_limits(faker: Faker): @@ -114,7 +117,7 @@ def test_preuserprofile_pre_given_names( account_request_form["firstName"] = given_name account_request_form["lastName"] = given_name - pre_user_profile = PreRegisteredUserGet(**account_request_form) + pre_user_profile = UserAccountRestPreRegister(**account_request_form) print(pre_user_profile.model_dump_json(indent=1)) assert pre_user_profile.first_name in ["Pedro-Luis", "Pedro Luis"] assert pre_user_profile.first_name == pre_user_profile.last_name diff --git a/services/web/server/tests/unit/with_dbs/03/test_users_rest_phone.py b/services/web/server/tests/unit/with_dbs/03/test_users_rest_phone.py new file mode 100644 index 000000000000..c93af68cd925 --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/03/test_users_rest_phone.py @@ -0,0 +1,494 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=too-many-statements +# pylint: disable=unused-argument +# pylint: disable=unused-variable + + +from http import HTTPStatus + +import pytest +from aiohttp.test_utils import TestClient +from common_library.users_enums import UserRole +from models_library.api_schemas_webserver.users import ( + MyProfileRestGet, +) +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.webserver_users import UserInfoDict +from servicelib.aiohttp import status +from simcore_service_webserver.models import PhoneNumberStr +from simcore_service_webserver.users._controller.rest.users_rest import ( + _REGISTRATION_CODE_VALUE_FAKE, +) + + +@pytest.fixture +def app_environment( + app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch +) -> EnvVarsDict: + # disables GC and DB-listener + return app_environment | setenvs_from_dict( + monkeypatch, + { + "WEBSERVER_GARBAGE_COLLECTOR": "null", + "WEBSERVER_DB_LISTENER": "0", + "WEBSERVER_DEV_FEATURES_ENABLED": "1", # NOTE: still under development + }, + ) + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_phone_registration_basic_workflow( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, +): + assert client.app + + # GET initial profile + url = client.app.router["get_my_profile"].url_for() + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + + initial_profile = MyProfileRestGet.model_validate(data) + initial_phone = initial_profile.phone + assert initial_phone + + # REGISTER phone number + # Change the last 3 digits of the initial phone number to '999' + new_phone = f"{initial_phone[:-3]}999" + url = client.app.router["my_phone_register"].url_for() + resp = await client.post( + f"{url}", + json={ + "phone": new_phone, + }, + ) + await assert_status(resp, status.HTTP_202_ACCEPTED) + + # CONFIRM phone registration + url = client.app.router["my_phone_confirm"].url_for() + resp = await client.post( + f"{url}", + json={ + "code": _REGISTRATION_CODE_VALUE_FAKE, + }, + ) + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + # GET updated profile + url = client.app.router["get_my_profile"].url_for() + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + + updated_profile = MyProfileRestGet.model_validate(data) + + # Verify phone was updated + assert updated_profile.phone == new_phone + assert updated_profile.phone != initial_phone + + # Verify other fields remained unchanged + assert updated_profile.first_name == initial_profile.first_name + assert updated_profile.last_name == initial_profile.last_name + assert updated_profile.login == initial_profile.login + assert updated_profile.user_name == initial_profile.user_name + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_phone_registration_workflow( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, +): + assert client.app + + # GET initial profile + url = client.app.router["get_my_profile"].url_for() + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + + initial_profile = MyProfileRestGet.model_validate(data) + initial_phone = initial_profile.phone + assert initial_phone + + # STEP 1: REGISTER phone number + new_phone = f"{initial_phone[:-3]}999" # Change the last 3 digits to '999' + url = client.app.router["my_phone_register"].url_for() + resp = await client.post( + f"{url}", + json={ + "phone": new_phone, + }, + ) + await assert_status(resp, status.HTTP_202_ACCEPTED) + + # STEP 2: CONFIRM phone registration + url = client.app.router["my_phone_confirm"].url_for() + resp = await client.post( + f"{url}", + json={ + "code": _REGISTRATION_CODE_VALUE_FAKE, + }, + ) + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + # GET updated profile + url = client.app.router["get_my_profile"].url_for() + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + + updated_profile = MyProfileRestGet.model_validate(data) + + # Verify phone was updated + assert updated_profile.phone == new_phone + assert updated_profile.phone != initial_phone + + # Verify other fields remained unchanged + assert updated_profile.first_name == initial_profile.first_name + assert updated_profile.last_name == initial_profile.last_name + assert updated_profile.login == initial_profile.login + assert updated_profile.user_name == initial_profile.user_name + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_phone_registration_with_resend( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, + user_phone_number: PhoneNumberStr, +): + assert client.app + + # STEP 1: REGISTER phone number + new_phone = user_phone_number + url = client.app.router["my_phone_register"].url_for() + resp = await client.post( + f"{url}", + json={ + "phone": new_phone, + }, + ) + await assert_status(resp, status.HTTP_202_ACCEPTED) + + # STEP 2: RESEND code (optional step) + url = client.app.router["my_phone_resend"].url_for() + resp = await client.post(f"{url}") + await assert_status(resp, status.HTTP_202_ACCEPTED) + + # STEP 3: CONFIRM phone registration + url = client.app.router["my_phone_confirm"].url_for() + resp = await client.post( + f"{url}", + json={ + "code": _REGISTRATION_CODE_VALUE_FAKE, + }, + ) + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + # GET updated profile + url = client.app.router["get_my_profile"].url_for() + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + + updated_profile = MyProfileRestGet.model_validate(data) + + # Verify phone was updated + assert updated_profile.phone == new_phone + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_phone_registration_change_existing_phone( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, + user_phone_number: PhoneNumberStr, +): + assert client.app + + # Set initial phone + first_phone = user_phone_number + url = client.app.router["my_phone_register"].url_for() + resp = await client.post( + f"{url}", + json={ + "phone": first_phone, + }, + ) + await assert_status(resp, status.HTTP_202_ACCEPTED) + + url = client.app.router["my_phone_confirm"].url_for() + resp = await client.post( + f"{url}", + json={ + "code": _REGISTRATION_CODE_VALUE_FAKE, + }, + ) + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + # Change to new phone + # Create a different phone number by changing the last digits + new_phone = user_phone_number[:-4] + "9999" + url = client.app.router["my_phone_register"].url_for() + resp = await client.post( + f"{url}", + json={ + "phone": new_phone, + }, + ) + await assert_status(resp, status.HTTP_202_ACCEPTED) + + url = client.app.router["my_phone_confirm"].url_for() + resp = await client.post( + f"{url}", + json={ + "code": _REGISTRATION_CODE_VALUE_FAKE, + }, + ) + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + # GET updated profile + url = client.app.router["get_my_profile"].url_for() + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + + updated_profile = MyProfileRestGet.model_validate(data) + + # Verify phone was updated to new phone + assert updated_profile.phone == new_phone + assert updated_profile.phone != first_phone + + +# +# PHONE REGISTRATION FAILURE TESTS +# + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_phone_resend_without_pending_registration( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, +): + assert client.app + + # Try to resend code without any pending registration + url = client.app.router["my_phone_resend"].url_for() + resp = await client.post(f"{url}") + await assert_status(resp, status.HTTP_400_BAD_REQUEST) + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_phone_confirm_without_pending_registration( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, +): + assert client.app + + # Try to confirm code without any pending registration + url = client.app.router["my_phone_confirm"].url_for() + resp = await client.post( + f"{url}", + json={ + "code": _REGISTRATION_CODE_VALUE_FAKE, + }, + ) + await assert_status(resp, status.HTTP_400_BAD_REQUEST) + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_phone_confirm_with_wrong_code( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, + user_phone_number: PhoneNumberStr, +): + assert client.app + + # STEP 1: REGISTER phone number + new_phone = user_phone_number + url = client.app.router["my_phone_register"].url_for() + resp = await client.post( + f"{url}", + json={ + "phone": new_phone, + }, + ) + await assert_status(resp, status.HTTP_202_ACCEPTED) + + # STEP 2: Try to confirm with wrong code + url = client.app.router["my_phone_confirm"].url_for() + resp = await client.post( + f"{url}", + json={ + "code": "wrongcode1234", + }, + ) + await assert_status(resp, status.HTTP_400_BAD_REQUEST) + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_phone_confirm_with_invalid_code_format( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, + user_phone_number: PhoneNumberStr, +): + assert client.app + + # STEP 1: REGISTER phone number + new_phone = user_phone_number + url = client.app.router["my_phone_register"].url_for() + resp = await client.post( + f"{url}", + json={ + "phone": new_phone, + }, + ) + await assert_status(resp, status.HTTP_202_ACCEPTED) + + # STEP 2: Try to confirm with invalid code format (contains special characters) + url = client.app.router["my_phone_confirm"].url_for() + resp = await client.post( + f"{url}", + json={ + "code": "123-456", # Invalid format according to pattern + }, + ) + await assert_status(resp, status.HTTP_422_UNPROCESSABLE_ENTITY) + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_phone_register_with_empty_phone( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, +): + assert client.app + + # Try to register with empty phone number + url = client.app.router["my_phone_register"].url_for() + resp = await client.post( + f"{url}", + json={ + "phone": "", # Empty phone number + }, + ) + await assert_status(resp, status.HTTP_422_UNPROCESSABLE_ENTITY) + + # Try to register with whitespace-only phone number + url = client.app.router["my_phone_register"].url_for() + resp = await client.post( + f"{url}", + json={ + "phone": " ", # Whitespace only + }, + ) + await assert_status(resp, status.HTTP_422_UNPROCESSABLE_ENTITY) + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_phone_confirm_with_empty_code( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, + user_phone_number: PhoneNumberStr, +): + assert client.app + + # STEP 1: REGISTER phone number + new_phone = user_phone_number + url = client.app.router["my_phone_register"].url_for() + resp = await client.post( + f"{url}", + json={ + "phone": new_phone, + }, + ) + await assert_status(resp, status.HTTP_202_ACCEPTED) + + # STEP 2: Try to confirm with empty code + url = client.app.router["my_phone_confirm"].url_for() + resp = await client.post( + f"{url}", + json={ + "code": "", # Empty code + }, + ) + await assert_status(resp, status.HTTP_422_UNPROCESSABLE_ENTITY) + + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, status.HTTP_401_UNAUTHORIZED), + (UserRole.GUEST, status.HTTP_403_FORBIDDEN), + ], +) +async def test_phone_register_access_rights( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, + expected: HTTPStatus, + user_phone_number: PhoneNumberStr, +): + assert client.app + + # Try to register phone with insufficient permissions + url = client.app.router["my_phone_register"].url_for() + resp = await client.post( + f"{url}", + json={ + "phone": user_phone_number, + }, + ) + await assert_status(resp, expected) + + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, status.HTTP_401_UNAUTHORIZED), + (UserRole.GUEST, status.HTTP_403_FORBIDDEN), + ], +) +async def test_phone_resend_access_rights( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, + expected: HTTPStatus, +): + assert client.app + + # Try to resend code with insufficient permissions + url = client.app.router["my_phone_resend"].url_for() + resp = await client.post(f"{url}") + await assert_status(resp, expected) + + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, status.HTTP_401_UNAUTHORIZED), + (UserRole.GUEST, status.HTTP_403_FORBIDDEN), + ], +) +async def test_phone_confirm_access_rights( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, + expected: HTTPStatus, +): + assert client.app + + # Try to confirm code with insufficient permissions + url = client.app.router["my_phone_confirm"].url_for() + resp = await client.post( + f"{url}", + json={ + "code": _REGISTRATION_CODE_VALUE_FAKE, + }, + ) + await assert_status(resp, expected) diff --git a/services/web/server/tests/unit/with_dbs/03/test_users_rest_profile.py b/services/web/server/tests/unit/with_dbs/03/test_users_rest_profile.py new file mode 100644 index 000000000000..c5ec419dbf66 --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/03/test_users_rest_profile.py @@ -0,0 +1,610 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=too-many-statements +# pylint: disable=unused-argument +# pylint: disable=unused-variable + + +import functools +from collections.abc import AsyncIterator +from copy import deepcopy +from http import HTTPStatus +from typing import Any +from unittest.mock import patch + +import pytest +import sqlalchemy as sa +from aiohttp.test_utils import TestClient +from aiopg.sa.connection import SAConnection +from common_library.users_enums import UserRole +from models_library.api_schemas_webserver.users import ( + MyProfileRestGet, +) +from models_library.groups import AccessRightsDict +from psycopg2 import OperationalError +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict +from pytest_simcore.helpers.webserver_users import UserInfoDict +from servicelib.aiohttp import status +from servicelib.rest_constants import RESPONSE_MODEL_POLICY +from simcore_service_webserver.products.models import Product +from simcore_service_webserver.user_preferences._service import ( + get_frontend_user_preferences_aggregation, +) +from sqlalchemy.exc import OperationalError as SQLAlchemyOperationalError +from sqlalchemy.ext.asyncio import AsyncConnection + + +@pytest.fixture +def app_environment( + app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch +) -> EnvVarsDict: + # disables GC and DB-listener + return app_environment | setenvs_from_dict( + monkeypatch, + { + "WEBSERVER_GARBAGE_COLLECTOR": "null", + "WEBSERVER_DB_LISTENER": "0", + "WEBSERVER_DEV_FEATURES_ENABLED": "1", # NOTE: still under development + }, + ) + + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, status.HTTP_401_UNAUTHORIZED), + *((r, status.HTTP_200_OK) for r in UserRole if r >= UserRole.GUEST), + ], +) +async def test_access_rights_on_get_profile( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, + expected: HTTPStatus, +): + assert client.app + + url = client.app.router["get_my_profile"].url_for() + assert url.path == "/v0/me" + + resp = await client.get(f"{url}") + await assert_status(resp, expected) + + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, status.HTTP_401_UNAUTHORIZED), + (UserRole.GUEST, status.HTTP_403_FORBIDDEN), + *((r, status.HTTP_204_NO_CONTENT) for r in UserRole if r >= UserRole.USER), + ], +) +async def test_access_update_profile( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, + expected: HTTPStatus, +): + assert client.app + + url = client.app.router["update_my_profile"].url_for() + assert url.path == "/v0/me" + + resp = await client.patch(f"{url}", json={"last_name": "Foo"}) + await assert_status(resp, expected) + + +@pytest.fixture +def product(client: TestClient, osparc_product_name: str) -> Product: + assert client.app + from simcore_service_webserver.products import products_service + + return products_service.get_product(client.app, osparc_product_name) + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_get_profile_user_not_in_support_group( + support_group_before_app_starts: dict[str, Any], + # after app starts because it modifies the product + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, + primary_group: dict[str, Any], + standard_groups: list[dict[str, Any]], + all_group: dict[str, str], + product: Product, +): + assert client.app + + url = client.app.router["get_my_profile"].url_for() + assert url.path == "/v0/me" + + resp = await client.get(f"{url}") + data, error = await assert_status(resp, status.HTTP_200_OK) + + assert not error + profile = MyProfileRestGet.model_validate(data) + + assert profile.login == logged_user["email"] + assert profile.first_name == logged_user.get("first_name", None) + assert profile.last_name == logged_user.get("last_name", None) + assert profile.role == user_role.name + assert profile.groups + assert profile.expiration_date is None + + got_profile_groups = profile.groups.model_dump(**RESPONSE_MODEL_POLICY, mode="json") + assert got_profile_groups["me"] == primary_group + assert got_profile_groups["all"] == all_group + assert got_profile_groups["product"] == { + "accessRights": {"delete": False, "read": False, "write": False}, + "description": "osparc product group", + "gid": product.group_id, + "label": "osparc", + "thumbnail": None, + } + + # support group exists + assert got_profile_groups["support"] + support_group_id = got_profile_groups["support"]["gid"] + + assert support_group_id == support_group_before_app_starts["gid"] + assert ( + got_profile_groups["support"]["description"] + == support_group_before_app_starts["description"] + ) + assert "accessRights" not in got_profile_groups["support"] + + # standard groups with at least read access + sorted_by_group_id = functools.partial(sorted, key=lambda d: d["gid"]) + assert sorted_by_group_id( + got_profile_groups["organizations"] + ) == sorted_by_group_id(standard_groups) + + # user is NOT part of the support group + all_standard_groups_ids = {g["gid"] for g in standard_groups} + assert support_group_id not in all_standard_groups_ids + + # preferences + assert profile.preferences == await get_frontend_user_preferences_aggregation( + client.app, user_id=logged_user["id"], product_name="osparc" + ) + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_get_profile_user_in_support_group( + support_group_before_app_starts: dict[str, Any], + # after app starts because it modifies the product + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, + primary_group: dict[str, Any], + standard_groups: list[dict[str, Any]], + all_group: dict[str, str], + product: Product, +): + assert client.app + from simcore_service_webserver.groups import _groups_repository + + # Now add user to support group with read-only access + await _groups_repository.add_new_user_in_group( + client.app, + group_id=support_group_before_app_starts["gid"], + new_user_id=logged_user["id"], + access_rights=AccessRightsDict(read=True, write=False, delete=False), + ) + + url = client.app.router["get_my_profile"].url_for() + assert url.path == "/v0/me" + + resp = await client.get(f"{url}") + data, error = await assert_status(resp, status.HTTP_200_OK) + + assert not error + profile = MyProfileRestGet.model_validate(data) + + assert profile.login == logged_user["email"] + assert profile.first_name == logged_user.get("first_name", None) + assert profile.last_name == logged_user.get("last_name", None) + assert profile.role == user_role.name + assert profile.groups + assert profile.expiration_date is None + + got_profile_groups = profile.groups.model_dump(**RESPONSE_MODEL_POLICY, mode="json") + assert got_profile_groups["me"] == primary_group + assert got_profile_groups["all"] == all_group + assert got_profile_groups["product"] == { + "accessRights": {"delete": False, "read": False, "write": False}, + "description": "osparc product group", + "gid": product.group_id, + "label": "osparc", + "thumbnail": None, + } + + # support group exists + assert got_profile_groups["support"] + support_group_id = got_profile_groups["support"]["gid"] + + assert support_group_id == support_group_before_app_starts["gid"] + assert ( + got_profile_groups["support"]["description"] + == support_group_before_app_starts["description"] + ) + assert "accessRights" not in got_profile_groups["support"] + + # When user is part of support group, it should appear in standard groups + sorted_by_group_id = functools.partial(sorted, key=lambda d: d["gid"]) + expected_standard_groups = [ + *standard_groups, + { + "gid": support_group_id, + "label": support_group_before_app_starts["name"], + "description": support_group_before_app_starts["description"], + "thumbnail": None, + "accessRights": {"read": True, "write": False, "delete": False}, + }, + ] + assert sorted_by_group_id( + got_profile_groups["organizations"] + ) == sorted_by_group_id(expected_standard_groups) + assert support_group_id in {g["gid"] for g in got_profile_groups["organizations"]} + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_update_profile( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, +): + assert client.app + + # GET + url = client.app.router["get_my_profile"].url_for() + resp = await client.get(f"{url}") + + data, _ = await assert_status(resp, status.HTTP_200_OK) + assert data["role"] == user_role.name + before = deepcopy(data) + + # UPDATE + url = client.app.router["update_my_profile"].url_for() + resp = await client.patch( + f"{url}", + json={ + "last_name": "Foo", + }, + ) + _, error = await assert_status(resp, status.HTTP_204_NO_CONTENT) + assert not error + + # GET + url = client.app.router["get_my_profile"].url_for() + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + + assert data["last_name"] == "Foo" + + def _copy(data: dict, exclude: set) -> dict: + return {k: v for k, v in data.items() if k not in exclude} + + exclude = {"last_name"} + assert _copy(data, exclude) == _copy(before, exclude) + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_profile_workflow( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, +): + assert client.app + + url = client.app.router["get_my_profile"].url_for() + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + my_profile = MyProfileRestGet.model_validate(data) + + url = client.app.router["update_my_profile"].url_for() + resp = await client.patch( + f"{url}", + json={ + "first_name": "Odei", # NOTE: still not camecase! + "userName": "odei123", + "privacy": {"hideFullname": False}, + }, + ) + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + url = client.app.router["get_my_profile"].url_for() + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + updated_profile = MyProfileRestGet.model_validate(data) + + assert updated_profile.first_name != my_profile.first_name + assert updated_profile.last_name == my_profile.last_name + assert updated_profile.login == my_profile.login + + assert updated_profile.user_name != my_profile.user_name + assert updated_profile.user_name == "odei123" + + assert updated_profile.privacy != my_profile.privacy + assert updated_profile.privacy.hide_username == my_profile.privacy.hide_username + assert updated_profile.privacy.hide_email == my_profile.privacy.hide_email + assert updated_profile.privacy.hide_fullname != my_profile.privacy.hide_fullname + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +@pytest.mark.parametrize("invalid_username", ["", "_foo", "superadmin", "foo..-123"]) +async def test_update_wrong_user_name( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, + invalid_username: str, +): + assert client.app + + url = client.app.router["update_my_profile"].url_for() + resp = await client.patch( + f"{url}", + json={ + "userName": invalid_username, + }, + ) + await assert_status(resp, status.HTTP_422_UNPROCESSABLE_ENTITY) + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_update_existing_user_name( + user_role: UserRole, + user: UserInfoDict, + logged_user: UserInfoDict, + client: TestClient, +): + assert client.app + + other_username = user["name"] + assert other_username != logged_user["name"] + + # update with SAME username (i.e. existing) + url = client.app.router["get_my_profile"].url_for() + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + + assert data["userName"] == logged_user["name"] + + url = client.app.router["update_my_profile"].url_for() + resp = await client.patch( + f"{url}", + json={ + "userName": other_username, + }, + ) + await assert_status(resp, status.HTTP_409_CONFLICT) + + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.USER, status.HTTP_503_SERVICE_UNAVAILABLE), + ], +) +async def test_get_profile_with_failing_db_connection( + logged_user: UserInfoDict, + client: TestClient, + expected: HTTPStatus, +): + """ + Reproduces issue https://github.com/ITISFoundation/osparc-simcore/pull/1160 + + A logged user fails to get profie because though authentication because + + i.e. conn.execute(query) will raise psycopg2.OperationalError: server closed the connection unexpectedly + + SEE: + - https://github.com/ITISFoundation/osparc-simcore/issues/880 + - https://github.com/ITISFoundation/osparc-simcore/pull/1160 + """ + assert client.app + + url = client.app.router["get_my_profile"].url_for() + assert str(url) == "/v0/me" + + with patch.object(SAConnection, "execute") as mock_sa_execute, patch.object( + AsyncConnection, "execute" + ) as mock_async_execute: + + # Emulates a database connection failure + mock_sa_execute.side_effect = OperationalError( + "MOCK: server closed the connection unexpectedly" + ) + mock_async_execute.side_effect = SQLAlchemyOperationalError( + statement="MOCK statement", + params=(), + orig=OperationalError("MOCK: server closed the connection unexpectedly"), + ) + + resp = await client.get(url.path) + + data, error = await assert_status(resp, expected) + assert not data + assert error["message"] == "Authentication service is temporary unavailable" + + +@pytest.fixture +async def user_pre_registration( + client: TestClient, + logged_user: UserInfoDict, +) -> AsyncIterator[int]: + """Creates pre-registration data for the logged user and yields the pre-registration ID. + Automatically cleans up after the test.""" + from simcore_postgres_database.models.users_details import ( + users_pre_registration_details, + ) + from simcore_service_webserver.db.plugin import get_asyncpg_engine + from simcore_service_webserver.users._accounts_repository import ( + create_user_pre_registration, + ) + + asyncpg_engine = get_asyncpg_engine(client.app) + + # Create pre-registration data for the logged user + pre_registration_details = { + "pre_first_name": "Pre-Registered", + "pre_last_name": "User", + "institution": "Test University", + "address": "123 Test Street", + "city": "Test City", + "state": "Test State", + "postal_code": "12345", + "country": "US", + } + + pre_registration_id = await create_user_pre_registration( + asyncpg_engine, + email=logged_user["email"], + created_by=None, # Self-registration + product_name="osparc", + link_to_existing_user=True, + **pre_registration_details, + ) + + try: + yield pre_registration_id + finally: + # Clean up pre-registration data + async with asyncpg_engine.connect() as conn: + await conn.execute( + sa.delete(users_pre_registration_details).where( + users_pre_registration_details.c.id == pre_registration_id + ) + ) + await conn.commit() + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_get_profile_user_with_pre_registration( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, + primary_group: dict[str, Any], + standard_groups: list[dict[str, Any]], + all_group: dict[str, str], + product: Product, + user_pre_registration: int, +): + """Test getting profile of a user that has pre-registration data""" + assert client.app + + url = client.app.router["get_my_profile"].url_for() + assert url.path == "/v0/me" + + resp = await client.get(f"{url}") + data, error = await assert_status(resp, status.HTTP_200_OK) + + assert not error + profile = MyProfileRestGet.model_validate(data) + + assert profile.login == logged_user["email"] + assert profile.first_name == logged_user.get("first_name", None) + assert profile.last_name == logged_user.get("last_name", None) + assert profile.role == user_role.name + assert profile.groups + assert profile.expiration_date is None + + # Verify profile groups structure is intact + got_profile_groups = profile.groups.model_dump(**RESPONSE_MODEL_POLICY, mode="json") + assert got_profile_groups["me"] == primary_group + assert got_profile_groups["all"] == all_group + + # Verify contact information from pre-registration is populated + assert profile.contact is not None + assert profile.contact.institution == "Test University" + assert profile.contact.address == "123 Test Street" + assert profile.contact.city == "Test City" + assert profile.contact.state == "Test State" + assert profile.contact.postal_code == "12345" + assert profile.contact.country == "US" + + # Verify preferences are still working + assert profile.preferences == await get_frontend_user_preferences_aggregation( + client.app, user_id=logged_user["id"], product_name="osparc" + ) + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_get_profile_user_without_pre_registration( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, + primary_group: dict[str, Any], + standard_groups: list[dict[str, Any]], + all_group: dict[str, str], + product: Product, +): + """Test getting profile of a user that does not have pre-registration data""" + assert client.app + + from simcore_service_webserver.db.plugin import get_asyncpg_engine + from simcore_service_webserver.users._accounts_repository import ( + search_merged_pre_and_registered_users, + ) + + asyncpg_engine = get_asyncpg_engine(client.app) + + # Verify user has no pre-registration data + pre_reg_users = await search_merged_pre_and_registered_users( + asyncpg_engine, + filter_by_email_like=logged_user["email"], + product_name="osparc", + ) + + # Filter for exact email match and pre-registration records + user_pre_regs = [ + row + for row in pre_reg_users + if row.pre_email == logged_user["email"] and row.id is not None + ] + assert len(user_pre_regs) == 0, "User should not have pre-registration data" + + url = client.app.router["get_my_profile"].url_for() + assert url.path == "/v0/me" + + resp = await client.get(f"{url}") + data, error = await assert_status(resp, status.HTTP_200_OK) + + assert not error + profile = MyProfileRestGet.model_validate(data) + + assert profile.login == logged_user["email"] + assert profile.first_name == logged_user.get("first_name", None) + assert profile.last_name == logged_user.get("last_name", None) + assert profile.role == user_role.name + assert profile.groups + assert profile.expiration_date is None + + # Verify profile groups structure + got_profile_groups = profile.groups.model_dump(**RESPONSE_MODEL_POLICY, mode="json") + assert got_profile_groups["me"] == primary_group + assert got_profile_groups["all"] == all_group + assert got_profile_groups["product"] == { + "accessRights": {"delete": False, "read": False, "write": False}, + "description": "osparc product group", + "gid": product.group_id, + "label": "osparc", + "thumbnail": None, + } + + # Verify no contact information since no pre-registration exists + assert profile.contact is None + + # Verify standard groups + sorted_by_group_id = functools.partial(sorted, key=lambda d: d["gid"]) + assert sorted_by_group_id( + got_profile_groups["organizations"] + ) == sorted_by_group_id(standard_groups) + + # Verify preferences are working + assert profile.preferences == await get_frontend_user_preferences_aggregation( + client.app, user_id=logged_user["id"], product_name="osparc" + ) diff --git a/services/web/server/tests/unit/with_dbs/03/test_users_rest_profiles.py b/services/web/server/tests/unit/with_dbs/03/test_users_rest_search.py similarity index 52% rename from services/web/server/tests/unit/with_dbs/03/test_users_rest_profiles.py rename to services/web/server/tests/unit/with_dbs/03/test_users_rest_search.py index 29760d166075..7473033cbd1a 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_users_rest_profiles.py +++ b/services/web/server/tests/unit/with_dbs/03/test_users_rest_search.py @@ -6,36 +6,23 @@ # pylint: disable=unused-variable -import functools from collections.abc import AsyncIterable -from copy import deepcopy -from http import HTTPStatus -from typing import Any -from unittest.mock import MagicMock, Mock import pytest from aiohttp.test_utils import TestClient -from aiopg.sa.connection import SAConnection from common_library.users_enums import UserRole from models_library.api_schemas_webserver.groups import GroupUserGet from models_library.api_schemas_webserver.users import ( - MyProfileGet, UserGet, ) -from psycopg2 import OperationalError from pydantic import TypeAdapter from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from pytest_simcore.helpers.webserver_login import ( - NewUser, - UserInfoDict, switch_client_session_to, ) +from pytest_simcore.helpers.webserver_users import NewUser, UserInfoDict from servicelib.aiohttp import status -from servicelib.rest_constants import RESPONSE_MODEL_POLICY -from simcore_service_webserver.users._preferences_service import ( - get_frontend_user_preferences_aggregation, -) @pytest.fixture @@ -48,6 +35,7 @@ def app_environment( { "WEBSERVER_GARBAGE_COLLECTOR": "null", "WEBSERVER_DB_LISTENER": "0", + "WEBSERVER_DEV_FEATURES_ENABLED": "1", # NOTE: still under development }, ) @@ -82,6 +70,7 @@ async def private_user( "first_name": partial_first_name, "last_name": "Bond", "email": f"james{partial_email}", + # Maximum privacy "privacy_hide_username": True, "privacy_hide_email": True, "privacy_hide_fullname": True, @@ -102,6 +91,7 @@ async def semi_private_user( "first_name": partial_first_name, "last_name": "Maxwell", "email": "j@maxwell.me", + # Medium privacy "privacy_hide_username": False, "privacy_hide_email": True, "privacy_hide_fullname": False, # <-- @@ -122,6 +112,7 @@ async def public_user( "first_name": "Taylor", "last_name": "Swift", "email": f"taylor{partial_email}", + # Fully public "privacy_hide_username": False, "privacy_hide_email": False, "privacy_hide_fullname": False, @@ -130,6 +121,52 @@ async def public_user( yield usr +@pytest.mark.acceptance_test( + "https://github.com/ITISFoundation/osparc-issues/issues/1779" +) +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_get_user_by_group_id( + user_role: UserRole, + logged_user: UserInfoDict, + client: TestClient, + public_user: UserInfoDict, + private_user: UserInfoDict, +): + assert client.app + assert user_role == logged_user["role"] + + assert private_user["id"] != logged_user["id"] + assert public_user["id"] != logged_user["id"] + + # GET public_user by its primary gid + url = client.app.router["get_all_group_users"].url_for( + gid=f"{public_user['primary_gid']}" + ) + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + + users = TypeAdapter(list[GroupUserGet]).validate_python(data) + assert len(users) == 1 + assert users[0].id == public_user["id"] + assert users[0].user_name == public_user["name"] + assert users[0].first_name == public_user.get("first_name") + assert users[0].last_name == public_user.get("last_name") + + # GET private_user by its primary gid + url = client.app.router["get_all_group_users"].url_for( + gid=f"{private_user['primary_gid']}" + ) + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + + users = TypeAdapter(list[GroupUserGet]).validate_python(data) + assert len(users) == 1 + assert users[0].id == private_user["id"] + assert users[0].user_name is None, "It's private" + assert users[0].first_name is None, "It's private" + assert users[0].last_name is None, "It's private" + + @pytest.mark.parametrize("user_role", [UserRole.USER]) async def test_search_users_by_partial_fullname( user_role: UserRole, @@ -141,7 +178,7 @@ async def test_search_users_by_partial_fullname( public_user: UserInfoDict, ): assert client.app - assert user_role.value == logged_user["role"] + assert user_role == logged_user["role"] # logged_user has default settings assert private_user["id"] != logged_user["id"] @@ -270,323 +307,3 @@ async def test_search_myself( assert found[0].email == user["email"] assert found[0].first_name == user.get("first_name") assert found[0].last_name == user.get("last_name") - - -@pytest.mark.acceptance_test( - "https://github.com/ITISFoundation/osparc-issues/issues/1779" -) -@pytest.mark.parametrize("user_role", [UserRole.USER]) -async def test_get_user_by_group_id( - user_role: UserRole, - logged_user: UserInfoDict, - client: TestClient, - public_user: UserInfoDict, - private_user: UserInfoDict, -): - assert client.app - assert user_role.value == logged_user["role"] - - assert private_user["id"] != logged_user["id"] - assert public_user["id"] != logged_user["id"] - - # GET public_user by its primary gid - url = client.app.router["get_all_group_users"].url_for( - gid=f"{public_user['primary_gid']}" - ) - resp = await client.get(f"{url}") - data, _ = await assert_status(resp, status.HTTP_200_OK) - - users = TypeAdapter(list[GroupUserGet]).validate_python(data) - assert len(users) == 1 - assert users[0].id == public_user["id"] - assert users[0].user_name == public_user["name"] - assert users[0].first_name == public_user.get("first_name") - assert users[0].last_name == public_user.get("last_name") - - # GET private_user by its primary gid - url = client.app.router["get_all_group_users"].url_for( - gid=f"{private_user['primary_gid']}" - ) - resp = await client.get(f"{url}") - data, _ = await assert_status(resp, status.HTTP_200_OK) - - users = TypeAdapter(list[GroupUserGet]).validate_python(data) - assert len(users) == 1 - assert users[0].id == private_user["id"] - assert users[0].user_name is None, "It's private" - assert users[0].first_name is None, "It's private" - assert users[0].last_name is None, "It's private" - - -@pytest.mark.parametrize( - "user_role,expected", - [ - (UserRole.ANONYMOUS, status.HTTP_401_UNAUTHORIZED), - *((r, status.HTTP_200_OK) for r in UserRole if r >= UserRole.GUEST), - ], -) -async def test_access_rights_on_get_profile( - user_role: UserRole, - logged_user: UserInfoDict, - client: TestClient, - expected: HTTPStatus, -): - assert client.app - - url = client.app.router["get_my_profile"].url_for() - assert url.path == "/v0/me" - - resp = await client.get(f"{url}") - await assert_status(resp, expected) - - -@pytest.mark.parametrize( - "user_role,expected", - [ - (UserRole.ANONYMOUS, status.HTTP_401_UNAUTHORIZED), - (UserRole.GUEST, status.HTTP_403_FORBIDDEN), - *((r, status.HTTP_204_NO_CONTENT) for r in UserRole if r >= UserRole.USER), - ], -) -async def test_access_update_profile( - user_role: UserRole, - logged_user: UserInfoDict, - client: TestClient, - expected: HTTPStatus, -): - assert client.app - - url = client.app.router["update_my_profile"].url_for() - assert url.path == "/v0/me" - - resp = await client.patch(f"{url}", json={"last_name": "Foo"}) - await assert_status(resp, expected) - - -@pytest.mark.parametrize("user_role", [UserRole.USER]) -async def test_get_profile( - user_role: UserRole, - logged_user: UserInfoDict, - client: TestClient, - primary_group: dict[str, Any], - standard_groups: list[dict[str, Any]], - all_group: dict[str, str], -): - assert client.app - - url = client.app.router["get_my_profile"].url_for() - assert url.path == "/v0/me" - - resp = await client.get(f"{url}") - data, error = await assert_status(resp, status.HTTP_200_OK) - - assert not error - profile = MyProfileGet.model_validate(data) - - assert profile.login == logged_user["email"] - assert profile.first_name == logged_user.get("first_name", None) - assert profile.last_name == logged_user.get("last_name", None) - assert profile.role == user_role.name - assert profile.groups - assert profile.expiration_date is None - - got_profile_groups = profile.groups.model_dump(**RESPONSE_MODEL_POLICY, mode="json") - assert got_profile_groups["me"] == primary_group - assert got_profile_groups["all"] == all_group - assert got_profile_groups["product"] == { - "accessRights": {"delete": False, "read": False, "write": False}, - "description": "osparc product group", - "gid": 2, - "label": "osparc", - "thumbnail": None, - } - - sorted_by_group_id = functools.partial(sorted, key=lambda d: d["gid"]) - assert sorted_by_group_id( - got_profile_groups["organizations"] - ) == sorted_by_group_id(standard_groups) - - assert profile.preferences == await get_frontend_user_preferences_aggregation( - client.app, user_id=logged_user["id"], product_name="osparc" - ) - - -@pytest.mark.parametrize("user_role", [UserRole.USER]) -async def test_update_profile( - user_role: UserRole, - logged_user: UserInfoDict, - client: TestClient, -): - assert client.app - - # GET - url = client.app.router["get_my_profile"].url_for() - resp = await client.get(f"{url}") - - data, _ = await assert_status(resp, status.HTTP_200_OK) - assert data["role"] == user_role.name - before = deepcopy(data) - - # UPDATE - url = client.app.router["update_my_profile"].url_for() - resp = await client.patch( - f"{url}", - json={ - "last_name": "Foo", - }, - ) - _, error = await assert_status(resp, status.HTTP_204_NO_CONTENT) - assert not error - - # GET - url = client.app.router["get_my_profile"].url_for() - resp = await client.get(f"{url}") - data, _ = await assert_status(resp, status.HTTP_200_OK) - - assert data["last_name"] == "Foo" - - def _copy(data: dict, exclude: set) -> dict: - return {k: v for k, v in data.items() if k not in exclude} - - exclude = {"last_name"} - assert _copy(data, exclude) == _copy(before, exclude) - - -@pytest.mark.parametrize("user_role", [UserRole.USER]) -async def test_profile_workflow( - user_role: UserRole, - logged_user: UserInfoDict, - client: TestClient, -): - assert client.app - - url = client.app.router["get_my_profile"].url_for() - resp = await client.get(f"{url}") - data, _ = await assert_status(resp, status.HTTP_200_OK) - my_profile = MyProfileGet.model_validate(data) - - url = client.app.router["update_my_profile"].url_for() - resp = await client.patch( - f"{url}", - json={ - "first_name": "Odei", # NOTE: still not camecase! - "userName": "odei123", - "privacy": {"hideFullname": False}, - }, - ) - await assert_status(resp, status.HTTP_204_NO_CONTENT) - - url = client.app.router["get_my_profile"].url_for() - resp = await client.get(f"{url}") - data, _ = await assert_status(resp, status.HTTP_200_OK) - updated_profile = MyProfileGet.model_validate(data) - - assert updated_profile.first_name != my_profile.first_name - assert updated_profile.last_name == my_profile.last_name - assert updated_profile.login == my_profile.login - - assert updated_profile.user_name != my_profile.user_name - assert updated_profile.user_name == "odei123" - - assert updated_profile.privacy != my_profile.privacy - assert updated_profile.privacy.hide_username == my_profile.privacy.hide_username - assert updated_profile.privacy.hide_email == my_profile.privacy.hide_email - assert updated_profile.privacy.hide_fullname != my_profile.privacy.hide_fullname - - -@pytest.mark.parametrize("user_role", [UserRole.USER]) -@pytest.mark.parametrize("invalid_username", ["", "_foo", "superadmin", "foo..-123"]) -async def test_update_wrong_user_name( - user_role: UserRole, - logged_user: UserInfoDict, - client: TestClient, - invalid_username: str, -): - assert client.app - - url = client.app.router["update_my_profile"].url_for() - resp = await client.patch( - f"{url}", - json={ - "userName": invalid_username, - }, - ) - await assert_status(resp, status.HTTP_422_UNPROCESSABLE_ENTITY) - - -@pytest.mark.parametrize("user_role", [UserRole.USER]) -async def test_update_existing_user_name( - user_role: UserRole, - user: UserInfoDict, - logged_user: UserInfoDict, - client: TestClient, -): - assert client.app - - other_username = user["name"] - assert other_username != logged_user["name"] - - # update with SAME username (i.e. existing) - url = client.app.router["get_my_profile"].url_for() - resp = await client.get(f"{url}") - data, _ = await assert_status(resp, status.HTTP_200_OK) - - assert data["userName"] == logged_user["name"] - - url = client.app.router["update_my_profile"].url_for() - resp = await client.patch( - f"{url}", - json={ - "userName": other_username, - }, - ) - await assert_status(resp, status.HTTP_409_CONFLICT) - - -@pytest.fixture -def mock_failing_database_connection(mocker: Mock) -> MagicMock: - """ - async with engine.acquire() as conn: - await conn.execute(query) --> will raise OperationalError - """ - # See http://initd.org/psycopg/docs/module.html - conn_execute = mocker.patch.object(SAConnection, "execute") - conn_execute.side_effect = OperationalError( - "MOCK: server closed the connection unexpectedly" - ) - return conn_execute - - -@pytest.mark.parametrize( - "user_role,expected", - [ - (UserRole.USER, status.HTTP_503_SERVICE_UNAVAILABLE), - ], -) -async def test_get_profile_with_failing_db_connection( - logged_user: UserInfoDict, - client: TestClient, - mock_failing_database_connection: MagicMock, - expected: HTTPStatus, -): - """ - Reproduces issue https://github.com/ITISFoundation/osparc-simcore/pull/1160 - - A logged user fails to get profie because though authentication because - - i.e. conn.execute(query) will raise psycopg2.OperationalError: server closed the connection unexpectedly - - SEE: - - https://github.com/ITISFoundation/osparc-simcore/issues/880 - - https://github.com/ITISFoundation/osparc-simcore/pull/1160 - """ - assert client.app - - url = client.app.router["get_my_profile"].url_for() - assert str(url) == "/v0/me" - - resp = await client.get(url.path) - - data, error = await assert_status(resp, expected) - assert not data - assert error["message"] == "Authentication service is temporary unavailable" diff --git a/services/web/server/tests/unit/with_dbs/03/test_users_api.py b/services/web/server/tests/unit/with_dbs/03/test_users_service.py similarity index 92% rename from services/web/server/tests/unit/with_dbs/03/test_users_api.py rename to services/web/server/tests/unit/with_dbs/03/test_users_service.py index 48fe21c24c38..a9a62763e67f 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_users_api.py +++ b/services/web/server/tests/unit/with_dbs/03/test_users_service.py @@ -3,7 +3,6 @@ # pylint: disable=unused-variable from datetime import datetime, timedelta -from enum import Enum import pytest from aiohttp.test_utils import TestClient @@ -14,10 +13,11 @@ from pydantic import TypeAdapter from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict -from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict +from pytest_simcore.helpers.webserver_users import NewUser, UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.users import UserStatus -from simcore_service_webserver.users.api import ( +from simcore_service_webserver.users.exceptions import UserNotFoundError +from simcore_service_webserver.users.users_service import ( delete_user_without_projects, get_guest_user_ids_and_names, get_user, @@ -31,7 +31,6 @@ set_user_as_deleted, update_expired_users, ) -from simcore_service_webserver.users.exceptions import UserNotFoundError @pytest.fixture @@ -56,10 +55,7 @@ async def test_reading_a_user(client: TestClient, faker: Faker, user: UserInfoDi keys = set(got.keys()).intersection(user.keys()) - def _normalize_val(v): - return v.value if isinstance(v, Enum) else v - - assert {k: _normalize_val(got[k]) for k in keys} == {k: user[k] for k in keys} + assert {k: got[k] for k in keys} == {k: user[k] for k in keys} user_primary_group_id = got["primary_gid"] @@ -84,7 +80,7 @@ def _normalize_val(v): assert got.name == user["name"] got = await get_user_role(client.app, user_id=user_id) - assert _normalize_val(got) == user["role"] + assert got == user["role"] got = await get_user_id_from_gid(client.app, primary_gid=user_primary_group_id) assert got == user_id @@ -100,9 +96,7 @@ async def test_listing_users(client: TestClient, faker: Faker, user: UserInfoDic guests = await get_guest_user_ids_and_names(client.app) assert not guests - async with NewUser( - user_data={"role": UserRole.GUEST.value}, app=client.app - ) as guest: + async with NewUser(user_data={"role": UserRole.GUEST}, app=client.app) as guest: got = await get_guest_user_ids_and_names(client.app) assert (guest["id"], guest["name"]) in TypeAdapter( list[tuple[UserID, UserNameID]] diff --git a/services/web/server/tests/unit/with_dbs/03/trash/conftest.py b/services/web/server/tests/unit/with_dbs/03/trash/conftest.py index 5c742b12144d..f154713ec817 100644 --- a/services/web/server/tests/unit/with_dbs/03/trash/conftest.py +++ b/services/web/server/tests/unit/with_dbs/03/trash/conftest.py @@ -7,7 +7,7 @@ import logging -from collections.abc import AsyncIterable, Callable +from collections.abc import AsyncIterable from pathlib import Path import pytest @@ -19,9 +19,9 @@ from pytest_simcore.helpers.logging_tools import log_context from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict from pytest_simcore.helpers.webserver_parametrizations import MockedStorageSubsystem from pytest_simcore.helpers.webserver_projects import NewProject +from pytest_simcore.helpers.webserver_users import NewUser, UserInfoDict from simcore_service_webserver.projects.models import ProjectDict _logger = logging.getLogger(__name__) @@ -72,19 +72,15 @@ async def other_user_project( @pytest.fixture def mocked_catalog( user_project: ProjectDict, - catalog_subsystem_mock: Callable[[list[ProjectDict]], None], -): - catalog_subsystem_mock([user_project]) +): ... @pytest.fixture -def mocked_director_v2(director_v2_service_mock: aioresponses): - ... +def mocked_director_v2(director_v2_service_mock: aioresponses): ... @pytest.fixture -def mocked_storage(storage_subsystem_mock: MockedStorageSubsystem): - ... +def mocked_storage(storage_subsystem_mock: MockedStorageSubsystem): ... @pytest.fixture diff --git a/services/web/server/tests/unit/with_dbs/03/trash/test_trash_rest.py b/services/web/server/tests/unit/with_dbs/03/trash/test_trash_rest.py index b3f66e0813f9..d2c4338c0eb4 100644 --- a/services/web/server/tests/unit/with_dbs/03/trash/test_trash_rest.py +++ b/services/web/server/tests/unit/with_dbs/03/trash/test_trash_rest.py @@ -22,8 +22,8 @@ from models_library.rest_pagination import Page from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import UserInfoDict from pytest_simcore.helpers.webserver_projects import create_project +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.folders_v2 import folders_v2 from simcore_postgres_database.models.projects import projects @@ -75,6 +75,10 @@ async def test_trash_projects( # noqa: PLR0915 return_value=[mocker.MagicMock()] if is_project_running else [], autospec=True, ) + mocker.patch( + "simcore_service_webserver.projects._trash_service.dynamic_scheduler_service.get_dynamic_service", + autospec=True, + ) project_uuid = UUID(user_project["uuid"]) @@ -587,7 +591,6 @@ async def test_trash_workspace( workspace: WorkspaceGet, user_project: ProjectDict, fake_project: ProjectDict, - mocked_catalog: None, mocked_dynamic_services_interface: dict[str, MagicMock], postgres_db: sa.engine.Engine, ): @@ -843,7 +846,6 @@ async def test_trash_subfolder( client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, - mocked_catalog: None, mocked_dynamic_services_interface: dict[str, MagicMock], ): assert client.app @@ -941,7 +943,6 @@ async def test_trash_project_in_subfolder( client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, - mocked_catalog: None, mocked_dynamic_services_interface: dict[str, MagicMock], ): assert client.app @@ -983,7 +984,7 @@ async def test_trash_project_in_subfolder( assert f"{url}" == "/v0/projects:search" resp = await client.get( - "/v0/projects:search", params={"filters": '{"trashed": true}'} + "/v0/projects:search", params={"filters": '{"trashed": true}', "type": "user"} ) await assert_status(resp, status.HTTP_200_OK) page = Page[ProjectGet].model_validate(await resp.json()) @@ -1011,14 +1012,14 @@ async def test_trash_project_in_subfolder( await assert_status(resp, status.HTTP_204_NO_CONTENT) resp = await client.get( - "/v0/projects:search", params={"filters": '{"trashed": true}'} + "/v0/projects:search", params={"filters": '{"trashed": true}', "type": "user"} ) await assert_status(resp, status.HTTP_200_OK) page = Page[ProjectGet].model_validate(await resp.json()) assert page.meta.total == 0 resp = await client.get( - "/v0/projects:search", params={"filters": '{"trashed": false}'} + "/v0/projects:search", params={"filters": '{"trashed": false}', "type": "user"} ) await assert_status(resp, status.HTTP_200_OK) page = Page[ProjectGet].model_validate(await resp.json()) @@ -1030,7 +1031,6 @@ async def test_trash_project_explitictly_and_empty_trash_bin( client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, - mocked_catalog: None, mocked_director_v2: None, mocked_dynamic_services_interface: dict[str, MagicMock], mocked_storage: None, @@ -1047,7 +1047,9 @@ async def test_trash_project_explitictly_and_empty_trash_bin( await assert_status(resp, status.HTTP_204_NO_CONTENT) # LIST trashed projects - resp = await client.get("/v0/projects", params={"filters": '{"trashed": true}'}) + resp = await client.get( + "/v0/projects", params={"filters": '{"trashed": true}', "type": "user"} + ) await assert_status(resp, status.HTTP_200_OK) page = Page[ProjectListItem].model_validate(await resp.json()) @@ -1088,7 +1090,6 @@ async def test_trash_folder_with_subfolder_and_project_and_empty_bin( client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, - mocked_catalog: None, mocked_director_v2: None, mocked_dynamic_services_interface: dict[str, MagicMock], mocked_storage: None, @@ -1131,7 +1132,7 @@ async def test_trash_folder_with_subfolder_and_project_and_empty_bin( # - LIST trashed projects (will show only explicit!) resp = await client.get( - "/v0/projects:search", params={"filters": '{"trashed": true}'} + "/v0/projects:search", params={"filters": '{"trashed": true}', "type": "user"} ) await assert_status(resp, status.HTTP_200_OK) page = Page[ProjectListItem].model_validate(await resp.json()) @@ -1165,7 +1166,7 @@ async def test_trash_folder_with_subfolder_and_project_and_empty_bin( # waits for deletion async for attempt in AsyncRetrying( - stop=stop_after_attempt(3), wait=wait_fixed(1), reraise=True + stop=stop_after_attempt(10), wait=wait_fixed(1), reraise=True ): with attempt: # GET trashed parent folder @@ -1191,7 +1192,7 @@ async def test_trash_folder_with_subfolder_and_project_and_empty_bin( # - LIST trashed projects (will show only explicit!) resp = await client.get( - "/v0/projects:search", params={"filters": '{"trashed": true}'} + "/v0/projects:search", params={"filters": '{"trashed": true}', "type": "user"} ) await assert_status(resp, status.HTTP_200_OK) page = Page[ProjectListItem].model_validate(await resp.json()) diff --git a/services/web/server/tests/unit/with_dbs/03/users/conftest.py b/services/web/server/tests/unit/with_dbs/03/users/conftest.py index 2b99f9e01f67..2272c5bc9f62 100644 --- a/services/web/server/tests/unit/with_dbs/03/users/conftest.py +++ b/services/web/server/tests/unit/with_dbs/03/users/conftest.py @@ -3,15 +3,14 @@ # pylint: disable=unused-variable # pylint: disable=too-many-arguments -import asyncio from collections.abc import AsyncGenerator, AsyncIterable, Callable from typing import Any import pytest +import pytest_asyncio import sqlalchemy as sa from aiohttp import web from aiohttp.test_utils import TestServer -from faker import Faker from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.aiohttp.application import create_safe_application from simcore_postgres_database.models.users_details import ( @@ -22,9 +21,8 @@ from sqlalchemy.ext.asyncio import AsyncEngine -@pytest.fixture -def web_server( - event_loop: asyncio.AbstractEventLoop, +@pytest_asyncio.fixture(loop_scope="function", scope="function") +async def web_server( app_environment: EnvVarsDict, # configs postgres_db: sa.engine.Engine, # db-ready webserver_test_server_port: int, @@ -37,9 +35,7 @@ def web_server( setup_settings(app) setup_db(app) - return event_loop.run_until_complete( - aiohttp_server(app, port=webserver_test_server_port) - ) + return await aiohttp_server(app, port=webserver_test_server_port) @pytest.fixture @@ -73,24 +69,19 @@ async def pre_registration_details_db_cleanup( @pytest.fixture async def product_owner_user( - faker: Faker, asyncpg_engine: AsyncEngine, ) -> AsyncIterable[dict[str, Any]]: """A PO user in the database""" - from pytest_simcore.helpers.faker_factories import random_user - from pytest_simcore.helpers.postgres_tools import insert_and_get_row_lifespan - from simcore_postgres_database.models.users import UserRole, users + from pytest_simcore.helpers.postgres_users import ( + insert_and_get_user_and_secrets_lifespan, + ) + from simcore_postgres_database.models.users import UserRole - async with insert_and_get_row_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup + async with insert_and_get_user_and_secrets_lifespan( # pylint:disable=contextmanager-generator-missing-cleanup asyncpg_engine, - table=users, - values=random_user( - faker, - email="po-user@email.com", - name="po-user-fixture", - role=UserRole.PRODUCT_OWNER, - ), - pk_col=users.c.id, + email="po-user@email.com", + name="po-user-fixture", + role=UserRole.PRODUCT_OWNER, ) as record: yield record diff --git a/services/web/server/tests/unit/with_dbs/03/users/test_users_repository.py b/services/web/server/tests/unit/with_dbs/03/users/test_users_accounts_repository.py similarity index 70% rename from services/web/server/tests/unit/with_dbs/03/users/test_users_repository.py rename to services/web/server/tests/unit/with_dbs/03/users/test_users_accounts_repository.py index 48d1991a1b99..71c178405675 100644 --- a/services/web/server/tests/unit/with_dbs/03/users/test_users_repository.py +++ b/services/web/server/tests/unit/with_dbs/03/users/test_users_accounts_repository.py @@ -12,11 +12,12 @@ from aiohttp import web from common_library.users_enums import AccountRequestStatus from models_library.products import ProductName +from models_library.users import UserID from simcore_postgres_database.models.users_details import ( users_pre_registration_details, ) from simcore_service_webserver.db.plugin import get_asyncpg_engine -from simcore_service_webserver.users import _users_repository +from simcore_service_webserver.users import _accounts_repository @pytest.fixture @@ -62,7 +63,7 @@ async def test_create_user_pre_registration( } # Act - pre_registration_id = await _users_repository.create_user_pre_registration( + pre_registration_id = await _accounts_repository.create_user_pre_registration( asyncpg_engine, email=test_email, created_by=created_by_user_id, @@ -112,7 +113,7 @@ async def test_review_user_pre_registration( } # Create a pre-registration to review - pre_registration_id = await _users_repository.create_user_pre_registration( + pre_registration_id = await _accounts_repository.create_user_pre_registration( asyncpg_engine, email=test_email, created_by=created_by_user_id, @@ -125,7 +126,7 @@ async def test_review_user_pre_registration( # Act - review and approve the registration new_status = AccountRequestStatus.APPROVED - await _users_repository.review_user_pre_registration( + await _accounts_repository.review_user_pre_registration( asyncpg_engine, pre_registration_id=pre_registration_id, reviewed_by=reviewer_id, @@ -133,7 +134,7 @@ async def test_review_user_pre_registration( ) # Assert - Use list_user_pre_registrations to verify - registrations, count = await _users_repository.list_user_pre_registrations( + registrations, count = await _accounts_repository.list_user_pre_registrations( asyncpg_engine, filter_by_pre_email=test_email, filter_by_product_name=product_name, @@ -170,6 +171,96 @@ async def test_review_user_pre_registration( await conn.commit() +async def test_review_user_pre_registration_with_invitation_extras( + app: web.Application, + product_name: ProductName, + product_owner_user: dict[str, Any], + pre_registration_details_db_cleanup: list[int], +): + # Arrange + asyncpg_engine = get_asyncpg_engine(app) + + test_email = "review.with.invitation@example.com" + created_by_user_id = product_owner_user["id"] + reviewer_id = product_owner_user["id"] + institution = "Test Institution" + pre_registration_details: dict[str, Any] = { + "institution": institution, + "pre_first_name": "Review", + "pre_last_name": "WithInvitation", + } + + # Create a pre-registration to review + pre_registration_id = await _accounts_repository.create_user_pre_registration( + asyncpg_engine, + email=test_email, + created_by=created_by_user_id, + product_name=product_name, + **pre_registration_details, + ) + + # Add to cleanup list + pre_registration_details_db_cleanup.append(pre_registration_id) + + # Prepare invitation extras (mimicking the structure from _users_rest.py) + invitation_extras = { + "invitation": { + "issuer": str(reviewer_id), + "guest": test_email, + "trial_account_days": 30, + "extra_credits_in_usd": 100.0, + "product_name": product_name, + "created": "2024-01-01T00:00:00Z", + } + } + + # Act - review and approve the registration with invitation extras + new_status = AccountRequestStatus.APPROVED + await _accounts_repository.review_user_pre_registration( + asyncpg_engine, + pre_registration_id=pre_registration_id, + reviewed_by=reviewer_id, + new_status=new_status, + invitation_extras=invitation_extras, + ) + + # Assert - Use list_user_pre_registrations to verify + registrations, count = await _accounts_repository.list_user_pre_registrations( + asyncpg_engine, + filter_by_pre_email=test_email, + filter_by_product_name=product_name, + ) + + # Check count and that we found our registration + assert count == 1 + assert len(registrations) == 1 + + # Get the registration + reg = registrations[0] + + # Verify basic details + assert reg["id"] == pre_registration_id + assert reg["pre_email"] == test_email + assert reg["pre_first_name"] == "Review" + assert reg["pre_last_name"] == "WithInvitation" + assert reg["institution"] == institution + assert reg["product_name"] == product_name + assert reg["account_request_status"] == new_status + assert reg["created_by"] == created_by_user_id + assert reg["account_request_reviewed_by"] == reviewer_id + assert reg["account_request_reviewed_at"] is not None + + # Verify invitation extras were stored correctly + assert reg["extras"] is not None + assert "invitation" in reg["extras"] + invitation_data = reg["extras"]["invitation"] + assert invitation_data["issuer"] == str(reviewer_id) + assert invitation_data["guest"] == test_email + assert invitation_data["trial_account_days"] == 30 + assert invitation_data["extra_credits_in_usd"] == 100.0 + assert invitation_data["product_name"] == product_name + + async def test_list_user_pre_registrations( app: web.Application, product_name: ProductName, @@ -191,7 +282,7 @@ async def test_list_user_pre_registrations( # Create pending registrations for i, email in enumerate(emails[:3]): - pre_reg_id = await _users_repository.create_user_pre_registration( + pre_reg_id = await _accounts_repository.create_user_pre_registration( asyncpg_engine, email=email, created_by=created_by_user_id, @@ -205,7 +296,7 @@ async def test_list_user_pre_registrations( pre_registration_details_db_cleanup.append(pre_reg_id) # Create and approve one registration - await _users_repository.review_user_pre_registration( + await _accounts_repository.review_user_pre_registration( asyncpg_engine, pre_registration_id=pre_reg_ids[0], reviewed_by=created_by_user_id, @@ -213,7 +304,7 @@ async def test_list_user_pre_registrations( ) # Create and reject one registration - await _users_repository.review_user_pre_registration( + await _accounts_repository.review_user_pre_registration( asyncpg_engine, pre_registration_id=pre_reg_ids[1], reviewed_by=created_by_user_id, @@ -225,7 +316,7 @@ async def test_list_user_pre_registrations( # Act & Assert - Test different filter combinations # 1. Get all registrations (should be 3) - all_registrations, count = await _users_repository.list_user_pre_registrations( + all_registrations, count = await _accounts_repository.list_user_pre_registrations( asyncpg_engine, filter_by_product_name=product_name, ) @@ -233,7 +324,7 @@ async def test_list_user_pre_registrations( assert len(all_registrations) == 3 # 2. Filter by email pattern (should match first 3 emails with "test") - test_registrations, count = await _users_repository.list_user_pre_registrations( + test_registrations, count = await _accounts_repository.list_user_pre_registrations( asyncpg_engine, filter_by_pre_email="test", filter_by_product_name=product_name, @@ -243,10 +334,12 @@ async def test_list_user_pre_registrations( assert all("test" in reg["pre_email"] for reg in test_registrations) # 3. Filter by status - APPROVED - approved_registrations, count = await _users_repository.list_user_pre_registrations( - asyncpg_engine, - filter_by_account_request_status=AccountRequestStatus.APPROVED, - filter_by_product_name=product_name, + approved_registrations, count = ( + await _accounts_repository.list_user_pre_registrations( + asyncpg_engine, + filter_by_account_request_status=AccountRequestStatus.APPROVED, + filter_by_product_name=product_name, + ) ) assert count == 1 assert len(approved_registrations) == 1 @@ -257,10 +350,12 @@ async def test_list_user_pre_registrations( ) # 4. Filter by status - REJECTED - rejected_registrations, count = await _users_repository.list_user_pre_registrations( - asyncpg_engine, - filter_by_account_request_status=AccountRequestStatus.REJECTED, - filter_by_product_name=product_name, + rejected_registrations, count = ( + await _accounts_repository.list_user_pre_registrations( + asyncpg_engine, + filter_by_account_request_status=AccountRequestStatus.REJECTED, + filter_by_product_name=product_name, + ) ) assert count == 1 assert len(rejected_registrations) == 1 @@ -271,10 +366,12 @@ async def test_list_user_pre_registrations( ) # 5. Filter by status - PENDING - pending_registrations, count = await _users_repository.list_user_pre_registrations( - asyncpg_engine, - filter_by_account_request_status=AccountRequestStatus.PENDING, - filter_by_product_name=product_name, + pending_registrations, count = ( + await _accounts_repository.list_user_pre_registrations( + asyncpg_engine, + filter_by_account_request_status=AccountRequestStatus.PENDING, + filter_by_product_name=product_name, + ) ) assert count == 1 assert len(pending_registrations) == 1 @@ -286,7 +383,7 @@ async def test_list_user_pre_registrations( # 6. Test pagination paginated_registrations, count = ( - await _users_repository.list_user_pre_registrations( + await _accounts_repository.list_user_pre_registrations( asyncpg_engine, filter_by_product_name=product_name, pagination_limit=2, @@ -297,7 +394,7 @@ async def test_list_user_pre_registrations( assert len(paginated_registrations) == 2 # But only returns 2 records # Get next page - page2_registrations, count = await _users_repository.list_user_pre_registrations( + page2_registrations, count = await _accounts_repository.list_user_pre_registrations( asyncpg_engine, filter_by_product_name=product_name, pagination_limit=2, @@ -335,7 +432,7 @@ async def test_create_pre_registration_with_existing_user_linking( existing_user_email = product_owner_user["email"] # Act - Create pre-registration with the same email as product_owner_user - pre_registration_id = await _users_repository.create_user_pre_registration( + pre_registration_id = await _accounts_repository.create_user_pre_registration( asyncpg_engine, email=existing_user_email, # Same email as existing user created_by=existing_user_id, @@ -350,7 +447,7 @@ async def test_create_pre_registration_with_existing_user_linking( pre_registration_details_db_cleanup.append(pre_registration_id) # Assert - Verify through list_user_pre_registrations - registrations, count = await _users_repository.list_user_pre_registrations( + registrations, count = await _accounts_repository.list_user_pre_registrations( asyncpg_engine, filter_by_pre_email=existing_user_email, filter_by_product_name=product_name, @@ -381,9 +478,9 @@ async def test_create_pre_registration_with_existing_user_linking( class MixedUserTestData: """Test data for user pre-registration tests with mixed states.""" - created_by_user_id: str + created_by_user_id: UserID product_owner_email: str - product_owner_id: str + product_owner_id: UserID pre_reg_email: str pre_reg_id: int owner_pre_reg_id: int @@ -412,7 +509,7 @@ async def mixed_user_data( # 1. Create a pre-registered user that is not in the users table - PENDING status pre_reg_email = "pre.registered.only@example.com" - pre_reg_id = await _users_repository.create_user_pre_registration( + pre_reg_id = await _accounts_repository.create_user_pre_registration( asyncpg_engine, email=pre_reg_email, created_by=created_by_user_id, @@ -431,7 +528,7 @@ async def mixed_user_data( pre_registration_details_db_cleanup.append(pre_reg_id) # 2. Create a pre-registration for the product_owner_user (both registered and pre-registered) - owner_pre_reg_id = await _users_repository.create_user_pre_registration( + owner_pre_reg_id = await _accounts_repository.create_user_pre_registration( asyncpg_engine, email=product_owner_user["email"], created_by=created_by_user_id, @@ -447,7 +544,7 @@ async def mixed_user_data( # 3. Create another pre-registered user with APPROVED status approved_email = "approved.user@example.com" - approved_reg_id = await _users_repository.create_user_pre_registration( + approved_reg_id = await _accounts_repository.create_user_pre_registration( asyncpg_engine, email=approved_email, created_by=created_by_user_id, @@ -461,7 +558,7 @@ async def mixed_user_data( pre_registration_details_db_cleanup.append(approved_reg_id) # Set to APPROVED status - await _users_repository.review_user_pre_registration( + await _accounts_repository.review_user_pre_registration( asyncpg_engine, pre_registration_id=approved_reg_id, reviewed_by=created_by_user_id, @@ -492,7 +589,7 @@ async def test_list_merged_users_all_users( # Act - Get all users without filtering users_list, total_count = ( - await _users_repository.list_merged_pre_and_registered_users( + await _accounts_repository.list_merged_pre_and_registered_users( asyncpg_engine, product_name=product_name, filter_include_deleted=False, @@ -528,7 +625,7 @@ async def test_list_merged_users_pre_registered_only( asyncpg_engine = get_asyncpg_engine(app) # Act - Get all users - users_list, _ = await _users_repository.list_merged_pre_and_registered_users( + users_list, _ = await _accounts_repository.list_merged_pre_and_registered_users( asyncpg_engine, product_name=product_name, filter_include_deleted=False, @@ -564,7 +661,7 @@ async def test_list_merged_users_linked_user( asyncpg_engine = get_asyncpg_engine(app) # Act - Get all users - users_list, _ = await _users_repository.list_merged_pre_and_registered_users( + users_list, _ = await _accounts_repository.list_merged_pre_and_registered_users( asyncpg_engine, product_name=product_name, filter_include_deleted=False, @@ -615,7 +712,7 @@ async def test_list_merged_users_filter_pending( # Act - Get users with PENDING status pending_users, pending_count = ( - await _users_repository.list_merged_pre_and_registered_users( + await _accounts_repository.list_merged_pre_and_registered_users( asyncpg_engine, product_name=product_name, filter_any_account_request_status=[AccountRequestStatus.PENDING], @@ -644,7 +741,7 @@ async def test_list_merged_users_filter_approved( # Act - Get users with APPROVED status approved_users, approved_count = ( - await _users_repository.list_merged_pre_and_registered_users( + await _accounts_repository.list_merged_pre_and_registered_users( asyncpg_engine, product_name=product_name, filter_any_account_request_status=[AccountRequestStatus.APPROVED], @@ -670,7 +767,7 @@ async def test_list_merged_users_multiple_statuses( # Act - Get users with either PENDING or APPROVED status mixed_status_users, mixed_status_count = ( - await _users_repository.list_merged_pre_and_registered_users( + await _accounts_repository.list_merged_pre_and_registered_users( asyncpg_engine, product_name=product_name, filter_any_account_request_status=[ @@ -701,7 +798,7 @@ async def test_list_merged_users_pagination( # Act - Get first page with limit 2 page1_users, total_count = ( - await _users_repository.list_merged_pre_and_registered_users( + await _accounts_repository.list_merged_pre_and_registered_users( asyncpg_engine, product_name=product_name, filter_include_deleted=False, @@ -711,7 +808,7 @@ async def test_list_merged_users_pagination( ) # Get second page with limit 2 - page2_users, _ = await _users_repository.list_merged_pre_and_registered_users( + page2_users, _ = await _accounts_repository.list_merged_pre_and_registered_users( asyncpg_engine, product_name=product_name, filter_include_deleted=False, @@ -733,3 +830,132 @@ async def test_list_merged_users_pagination( assert not set(page1_emails).intersection( set(page2_emails) ), "Pages should have different users" + + +@pytest.mark.parametrize( + "email_pattern,expected_count", + [ + ("%pre.registered%", 1), # Valid: matches pre-registered user + ("%nonexistent%", 0), # Invalid: no matches + ], +) +async def test_search_merged_users_by_email( + app: web.Application, + product_name: ProductName, + mixed_user_data: MixedUserTestData, + email_pattern: str, + expected_count: int, +): + """Test searching merged users by email pattern.""" + asyncpg_engine = get_asyncpg_engine(app) + + # Act + rows = await _accounts_repository.search_merged_pre_and_registered_users( + asyncpg_engine, + filter_by_email_like=email_pattern, + product_name=product_name, + ) + + # Assert + assert len(rows) == expected_count + + if expected_count > 0: + row = rows[0] + assert row.pre_email == mixed_user_data.pre_reg_email + assert row.pre_first_name == "Pre-Registered" + assert row.pre_last_name == "Only" + assert row.institution == "Pre-Reg Institution" + + +@pytest.mark.parametrize( + "use_valid_username,expected_count", + [ + (True, 1), # Valid: use actual product owner username + (False, 0), # Invalid: use non-existent username + ], +) +async def test_search_merged_users_by_username( + app: web.Application, + product_name: ProductName, + product_owner_user: dict[str, Any], + use_valid_username: bool, + expected_count: int, +): + """Test searching merged users by username pattern.""" + asyncpg_engine = get_asyncpg_engine(app) + + # Arrange + username_pattern = ( + f"{product_owner_user['name']}" + if use_valid_username + else "%nonexistent_username%" + ) + + # Act + rows = await _accounts_repository.search_merged_pre_and_registered_users( + asyncpg_engine, + filter_by_user_name_like=username_pattern, + product_name=product_name, + ) + + # Assert + assert len(rows) >= expected_count + + if expected_count > 0: + # Find the product owner in rows + found_user = next( + (row for row in rows if row.email == product_owner_user["email"]), + None, + ) + assert found_user is not None + assert found_user.first_name == product_owner_user["first_name"] + assert found_user.last_name == product_owner_user["last_name"] + + +@pytest.mark.parametrize( + "use_valid_group_id,expected_count", + [ + (True, 1), # Valid: use actual product owner primary group ID + (False, 0), # Invalid: use non-existent group ID + ], +) +async def test_search_merged_users_by_primary_group_id( + app: web.Application, + product_name: ProductName, + product_owner_user: dict[str, Any], + use_valid_group_id: bool, + expected_count: int, +): + """Test searching merged users by primary group ID.""" + asyncpg_engine = get_asyncpg_engine(app) + + # Arrange + primary_group_id = ( + product_owner_user["primary_gid"] + if use_valid_group_id + else 99999 # Non-existent group ID + ) + + # Act + results = await _accounts_repository.search_merged_pre_and_registered_users( + asyncpg_engine, + filter_by_primary_group_id=primary_group_id, + product_name=product_name, + ) + + # Assert + assert len(results) >= expected_count + + if expected_count > 0: + # Find the product owner in results + found_user = next( + ( + result + for result in results + if result.email == product_owner_user["email"] + ), + None, + ) + assert found_user is not None + assert found_user.first_name == product_owner_user["first_name"] + assert found_user.last_name == product_owner_user["last_name"] diff --git a/services/web/server/tests/unit/with_dbs/03/users/test_users_service.py b/services/web/server/tests/unit/with_dbs/03/users/test_users_accounts_service.py similarity index 89% rename from services/web/server/tests/unit/with_dbs/03/users/test_users_service.py rename to services/web/server/tests/unit/with_dbs/03/users/test_users_accounts_service.py index f9f648da946c..8e0a30d28d1d 100644 --- a/services/web/server/tests/unit/with_dbs/03/users/test_users_service.py +++ b/services/web/server/tests/unit/with_dbs/03/users/test_users_accounts_service.py @@ -15,8 +15,8 @@ users_pre_registration_details, ) from simcore_service_webserver.db.plugin import get_asyncpg_engine -from simcore_service_webserver.users import _users_service -from simcore_service_webserver.users._users_repository import ( +from simcore_service_webserver.users import _accounts_service +from simcore_service_webserver.users._accounts_repository import ( create_user_pre_registration, ) @@ -82,8 +82,11 @@ async def test_search_users_as_admin_real_user( user_email = product_owner_user["email"] # Act - found_users = await _users_service.search_users_accounts( - app, email_glob=user_email, product_name=product_name, include_products=False + found_users = await _accounts_service.search_users_accounts( + app, + filter_by_email_glob=user_email, + product_name=product_name, + include_products=False, ) # Assert @@ -111,8 +114,8 @@ async def test_search_users_as_admin_pre_registered_user( pre_registration_details = pre_registered_user_created["details"] # Act - found_users = await _users_service.search_users_accounts( - app, email_glob=pre_registered_email, product_name=product_name + found_users = await _accounts_service.search_users_accounts( + app, filter_by_email_glob=pre_registered_email, product_name=product_name ) # Assert @@ -164,8 +167,8 @@ async def test_search_users_as_admin_wildcard( ) # Act - search with wildcard for the domain - found_users = await _users_service.search_users_accounts( - app, email_glob=f"*{email_domain}", product_name=product_name + found_users = await _accounts_service.search_users_accounts( + app, filter_by_email_glob=f"*{email_domain}", product_name=product_name ) # Assert diff --git a/services/web/server/tests/unit/with_dbs/04/conversations/__init__.py b/services/web/server/tests/unit/with_dbs/04/conversations/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/services/web/server/tests/unit/with_dbs/04/conversations/test_conversations_messages_rest.py b/services/web/server/tests/unit/with_dbs/04/conversations/test_conversations_messages_rest.py new file mode 100644 index 000000000000..9fb63eb81655 --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/04/conversations/test_conversations_messages_rest.py @@ -0,0 +1,508 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-statements + + +from collections.abc import Callable, Iterable +from datetime import UTC, datetime +from http import HTTPStatus +from types import SimpleNamespace +from uuid import uuid4 + +import pytest +from aiohttp.test_utils import TestClient +from models_library.api_schemas_webserver.conversations import ( + ConversationMessageRestGet, +) +from models_library.conversations import ( + ConversationMessageGetDB, + ConversationMessageType, +) +from pytest_mock import MockerFixture +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import UserInfoDict +from servicelib.aiohttp import status +from simcore_service_webserver.conversations import _conversation_message_service +from simcore_service_webserver.db.models import UserRole + + +@pytest.fixture +def mock_functions_factory( + mocker: MockerFixture, +) -> Callable[[Iterable[tuple[object, str]]], SimpleNamespace]: + def _patch(targets_and_names: Iterable[tuple[object, str]]) -> SimpleNamespace: + return SimpleNamespace( + **{ + name: mocker.patch.object(target, name) + for target, name in targets_and_names + } + ) + + return _patch + + +@pytest.fixture +async def conversation_id( + client: TestClient, + logged_user: UserInfoDict, +) -> str: + """Create a test conversation and return its ID""" + assert client.app + base_url = client.app.router["list_conversations"].url_for() + body = {"name": "Test Support Conversation", "type": "SUPPORT"} + resp = await client.post(f"{base_url}", json=body) + data, _ = await assert_status(resp, status.HTTP_201_CREATED) + return data["conversationId"] + + +@pytest.mark.parametrize( + "user_role,expected", + [ + # (UserRole.ANONYMOUS, status.HTTP_401_UNAUTHORIZED), + (UserRole.GUEST, status.HTTP_200_OK), + (UserRole.USER, status.HTTP_200_OK), + (UserRole.TESTER, status.HTTP_200_OK), + ], +) +async def test_conversation_messages_user_role_access( + client: TestClient, + logged_user: UserInfoDict, + user_role: UserRole, + expected: HTTPStatus, + conversation_id: str, +): + """Test user role access to conversation messages endpoints""" + assert client.app + list_url = client.app.router["list_conversation_messages"].url_for( + conversation_id=conversation_id + ) + resp = await client.get(f"{list_url}") + assert resp.status == 401 if user_role == UserRole.ANONYMOUS else 200 + + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.USER, status.HTTP_201_CREATED), + ], +) +async def test_conversation_messages_create_and_list( + client: TestClient, + logged_user: UserInfoDict, + expected: HTTPStatus, + conversation_id: str, + mock_functions_factory: Callable[[Iterable[tuple[object, str]]], SimpleNamespace], +): + """Test creating and listing messages in a support conversation""" + mocks = mock_functions_factory( + [ + (_conversation_message_service, "create_message"), + (_conversation_message_service, "list_messages_for_conversation"), + ] + ) + + # Mock the create_message function to return a message + mock_message = ConversationMessageGetDB( + message_id=uuid4(), + conversation_id=uuid4(), # Convert string to UUID + user_group_id=1, # Default primary group ID + content="Test message content", + type=ConversationMessageType.MESSAGE, + created=datetime.now(tz=UTC), + modified=datetime.now(tz=UTC), + ) + mocks.create_message.return_value = mock_message + + assert client.app + create_url = client.app.router["create_conversation_message"].url_for( + conversation_id=conversation_id + ) + + # Test creating a message + body = {"content": "Test message content", "type": "MESSAGE"} + resp = await client.post(f"{create_url}", json=body) + data, _ = await assert_status(resp, expected) + assert ConversationMessageRestGet.model_validate(data) + assert data["content"] == "Test message content" + assert data["type"] == "MESSAGE" + first_message_id = data["messageId"] + + assert mocks.create_message.call_count == 1 + + # Mock the list_messages_for_conversation function + mocks.list_messages_for_conversation.return_value = (1, [mock_message]) + + # Test listing messages + list_url = client.app.router["list_conversation_messages"].url_for( + conversation_id=conversation_id + ) + resp = await client.get(f"{list_url}") + data, _, meta, links = await assert_status( + resp, + status.HTTP_200_OK, + include_meta=True, + include_links=True, + ) + assert len(data) == 1 + assert meta["total"] == 1 + assert links + + assert mocks.list_messages_for_conversation.call_count == 1 + + return first_message_id + + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.USER, status.HTTP_200_OK), + ], +) +async def test_conversation_messages_get_update_delete( + client: TestClient, + logged_user: UserInfoDict, + expected: HTTPStatus, + conversation_id: str, + mock_functions_factory: Callable[[Iterable[tuple[object, str]]], SimpleNamespace], +): + """Test getting, updating, and deleting messages in a support conversation""" + mocks = mock_functions_factory( + [ + (_conversation_message_service, "create_message"), + (_conversation_message_service, "get_message"), + (_conversation_message_service, "update_message"), + (_conversation_message_service, "delete_message"), + ] + ) + + # Create a test message first + message_id = uuid4() + mock_message = ConversationMessageGetDB( + message_id=message_id, + conversation_id=uuid4(), # Convert string to UUID + user_group_id=1, # Default primary group ID + content="Original message content", + type=ConversationMessageType.MESSAGE, + created=datetime.now(tz=UTC), + modified=datetime.now(tz=UTC), + ) + mocks.create_message.return_value = mock_message + + assert client.app + create_url = client.app.router["create_conversation_message"].url_for( + conversation_id=conversation_id + ) + + # Create a message + body = {"content": "Original message content", "type": "MESSAGE"} + resp = await client.post(f"{create_url}", json=body) + data, _ = await assert_status(resp, status.HTTP_201_CREATED) + created_message_id = data["messageId"] + + # Mock get_message + mocks.get_message.return_value = mock_message + + # Test getting a specific message + get_url = client.app.router["get_conversation_message"].url_for( + conversation_id=conversation_id, message_id=created_message_id + ) + resp = await client.get(f"{get_url}") + data, _ = await assert_status(resp, expected) + assert data["messageId"] == str(message_id) + assert data["content"] == "Original message content" + + assert mocks.get_message.call_count == 1 + + # Mock update_message + updated_mock_message = ConversationMessageGetDB( + message_id=message_id, + conversation_id=uuid4(), # Convert string to UUID + user_group_id=1, # Default primary group ID + content="Updated message content", + type=ConversationMessageType.MESSAGE, + created=datetime.now(tz=UTC), + modified=datetime.now(tz=UTC), + ) + mocks.update_message.return_value = updated_mock_message + + # Test updating a message + update_url = client.app.router["update_conversation_message"].url_for( + conversation_id=conversation_id, message_id=created_message_id + ) + updated_content = "Updated message content" + resp = await client.put( + f"{update_url}", + json={"content": updated_content}, + ) + data, _ = await assert_status(resp, expected) + assert data["content"] == updated_content + + assert mocks.update_message.call_count == 1 + + # Test deleting a message + delete_url = client.app.router["delete_conversation_message"].url_for( + conversation_id=conversation_id, message_id=created_message_id + ) + resp = await client.delete(f"{delete_url}") + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + assert mocks.delete_message.call_count == 1 + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_conversation_messages_pagination( + client: TestClient, + logged_user: UserInfoDict, + conversation_id: str, + mock_functions_factory: Callable[[Iterable[tuple[object, str]]], SimpleNamespace], +): + """Test pagination of conversation messages list""" + mocks = mock_functions_factory( + [ + (_conversation_message_service, "list_messages_for_conversation"), + ] + ) + + # Mock multiple messages + mock_messages = [] + for i in range(5): + mock_message = ConversationMessageGetDB( + message_id=uuid4(), + conversation_id=uuid4(), # Convert string to UUID + user_group_id=1, # Default primary group ID + content=f"Message {i+1}", + type=ConversationMessageType.MESSAGE, + created=datetime.now(tz=UTC), + modified=datetime.now(tz=UTC), + ) + mock_messages.append(mock_message) + + # Mock pagination with limit=3 + mocks.list_messages_for_conversation.return_value = (5, mock_messages[:3]) + + assert client.app + list_url = client.app.router["list_conversation_messages"].url_for( + conversation_id=conversation_id + ) + + # Test pagination with limit + resp = await client.get(f"{list_url}?limit=3") + data, _, meta, links = await assert_status( + resp, + status.HTTP_200_OK, + include_meta=True, + include_links=True, + ) + assert len(data) == 3 + assert meta["total"] == 5 + assert links + + assert mocks.list_messages_for_conversation.call_count == 1 + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_conversation_messages_validation_errors( + client: TestClient, + logged_user: UserInfoDict, + conversation_id: str, +): + """Test validation errors for conversation messages""" + assert client.app + create_url = client.app.router["create_conversation_message"].url_for( + conversation_id=conversation_id + ) + + # Test creating message with missing content + body = {"type": "MESSAGE"} + resp = await client.post(f"{create_url}", json=body) + await assert_status(resp, status.HTTP_422_UNPROCESSABLE_ENTITY) + + # Test creating message with missing type + body = {"content": "Test message"} + resp = await client.post(f"{create_url}", json=body) + await assert_status(resp, status.HTTP_422_UNPROCESSABLE_ENTITY) + + # Test creating message with invalid type + body = {"content": "Test message", "type": "INVALID_TYPE"} + resp = await client.post(f"{create_url}", json=body) + await assert_status(resp, status.HTTP_422_UNPROCESSABLE_ENTITY) + + # Test creating message with empty content + body = {"content": "", "type": "MESSAGE"} + resp = await client.post(f"{create_url}", json=body) + await assert_status(resp, status.HTTP_201_CREATED) + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_conversation_messages_different_types( + client: TestClient, + logged_user: UserInfoDict, + conversation_id: str, + mock_functions_factory: Callable[[Iterable[tuple[object, str]]], SimpleNamespace], +): + """Test creating messages with different message types""" + mocks = mock_functions_factory( + [ + (_conversation_message_service, "create_message"), + ] + ) + + assert client.app + create_url = client.app.router["create_conversation_message"].url_for( + conversation_id=conversation_id + ) + + # Test USER_MESSAGE type + user_message = ConversationMessageGetDB( + message_id=uuid4(), + conversation_id=uuid4(), # Convert string to UUID + user_group_id=1, # Default primary group ID + content="User message", + type=ConversationMessageType.MESSAGE, + created=datetime.now(tz=UTC), + modified=datetime.now(tz=UTC), + ) + mocks.create_message.return_value = user_message + + body = {"content": "User message", "type": "MESSAGE"} + resp = await client.post(f"{create_url}", json=body) + data, _ = await assert_status(resp, status.HTTP_201_CREATED) + assert data["type"] == "MESSAGE" + + # Test NOTIFICATION type + system_message = ConversationMessageGetDB( + message_id=uuid4(), + conversation_id=uuid4(), # Convert string to UUID + user_group_id=1, # Default primary group ID + content="System message", + type=ConversationMessageType.NOTIFICATION, + created=datetime.now(tz=UTC), + modified=datetime.now(tz=UTC), + ) + mocks.create_message.return_value = system_message + + body = {"content": "System message", "type": "NOTIFICATION"} + resp = await client.post(f"{create_url}", json=body) + data, _ = await assert_status(resp, status.HTTP_201_CREATED) + assert data["type"] == "NOTIFICATION" + + assert mocks.create_message.call_count == 2 + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_conversation_messages_nonexistent_resources( + client: TestClient, + logged_user: UserInfoDict, + mock_functions_factory: Callable[[Iterable[tuple[object, str]]], SimpleNamespace], +): + """Test operations on nonexistent conversations and messages""" + mocks = mock_functions_factory( + [ + (_conversation_message_service, "get_message"), + (_conversation_message_service, "update_message"), + (_conversation_message_service, "delete_message"), + ] + ) + + # Import the exception that should be raised + from simcore_service_webserver.conversations.errors import ( + ConversationErrorNotFoundError, + ) + + # Mock service to raise ConversationErrorNotFoundError + mocks.get_message.side_effect = ConversationErrorNotFoundError( + conversation_id="nonexistent" + ) + mocks.update_message.side_effect = ConversationErrorNotFoundError( + conversation_id="nonexistent" + ) + mocks.delete_message.side_effect = ConversationErrorNotFoundError( + conversation_id="nonexistent" + ) + + nonexistent_conversation_id = "00000000-0000-0000-0000-000000000000" + nonexistent_message_id = "00000000-0000-0000-0000-000000000001" + + assert client.app + + # Test getting message from nonexistent conversation + get_url = client.app.router["get_conversation_message"].url_for( + conversation_id=nonexistent_conversation_id, message_id=nonexistent_message_id + ) + resp = await client.get(f"{get_url}") + await assert_status(resp, status.HTTP_404_NOT_FOUND) + + # Test updating message in nonexistent conversation + update_url = client.app.router["update_conversation_message"].url_for( + conversation_id=nonexistent_conversation_id, message_id=nonexistent_message_id + ) + resp = await client.put(f"{update_url}", json={"content": "Updated content"}) + await assert_status(resp, status.HTTP_404_NOT_FOUND) + + # Test deleting message from nonexistent conversation + delete_url = client.app.router["delete_conversation_message"].url_for( + conversation_id=nonexistent_conversation_id, message_id=nonexistent_message_id + ) + resp = await client.delete(f"{delete_url}") + await assert_status(resp, status.HTTP_404_NOT_FOUND) + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_conversation_messages_with_database( + client: TestClient, + logged_user: UserInfoDict, + mocker: MockerFixture, +): + """Test conversation messages with direct database interaction""" + # Mock the email service to verify it's called for first message + mock_send_email = mocker.patch( + "simcore_service_webserver.email.email_service.send_email_from_template" + ) + mocker.patch("simcore_service_webserver.products.products_web.get_current_product") + assert client.app + + # Create a conversation directly via API (no mocks) + base_url = client.app.router["list_conversations"].url_for() + body = {"name": "Database Test Conversation", "type": "SUPPORT"} + resp = await client.post(f"{base_url}", json=body) + data, _ = await assert_status(resp, status.HTTP_201_CREATED) + conversation_id = data["conversationId"] + + # Verify the conversation was created + assert conversation_id is not None + assert data["name"] == "Database Test Conversation" + assert data["type"] == "SUPPORT" + + # Create a message in the conversation + create_message_url = client.app.router["create_conversation_message"].url_for( + conversation_id=conversation_id + ) + message_body = {"content": "Hello from database test", "type": "MESSAGE"} + resp = await client.post(f"{create_message_url}", json=message_body) + message_data, _ = await assert_status(resp, status.HTTP_201_CREATED) + + # Verify the message was created + assert message_data["messageId"] is not None + assert message_data["content"] == "Hello from database test" + assert message_data["type"] == "MESSAGE" + assert message_data["conversationId"] == conversation_id + + # Verify email was sent for first message + assert mock_send_email.call_count == 1 + + # Create a second message + second_message_body = {"content": "Second message", "type": "MESSAGE"} + resp = await client.post(f"{create_message_url}", json=second_message_body) + second_message_data, _ = await assert_status(resp, status.HTTP_201_CREATED) + + # Verify the second message was created + assert second_message_data["messageId"] is not None + assert second_message_data["content"] == "Second message" + assert second_message_data["type"] == "MESSAGE" + assert second_message_data["conversationId"] == conversation_id + + # Verify email was NOT sent again for second message (still only 1 call) + assert mock_send_email.call_count == 1 diff --git a/services/web/server/tests/unit/with_dbs/04/conversations/test_conversations_rest.py b/services/web/server/tests/unit/with_dbs/04/conversations/test_conversations_rest.py new file mode 100644 index 000000000000..10a6a9d4c3e3 --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/04/conversations/test_conversations_rest.py @@ -0,0 +1,451 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-statements + + +from collections.abc import Callable, Iterable +from http import HTTPStatus +from types import SimpleNamespace + +import pytest +from aiohttp.test_utils import TestClient +from models_library.api_schemas_webserver.conversations import ConversationRestGet +from pytest_mock import MockerFixture +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import LoggedUser, UserInfoDict +from servicelib.aiohttp import status +from simcore_service_webserver.conversations import _conversation_service +from simcore_service_webserver.db.models import UserRole +from simcore_service_webserver.projects.models import ProjectDict + + +@pytest.fixture +def mock_functions_factory( + mocker: MockerFixture, +) -> Callable[[Iterable[tuple[object, str]]], SimpleNamespace]: + def _patch(targets_and_names: Iterable[tuple[object, str]]) -> SimpleNamespace: + return SimpleNamespace( + **{ + name: mocker.patch.object(target, name) + for target, name in targets_and_names + } + ) + + return _patch + + +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.ANONYMOUS, status.HTTP_401_UNAUTHORIZED), + (UserRole.GUEST, status.HTTP_200_OK), + (UserRole.USER, status.HTTP_200_OK), + (UserRole.TESTER, status.HTTP_200_OK), + ], +) +async def test_conversations_user_role_access( + client: TestClient, + logged_user: UserInfoDict, + user_role: UserRole, + expected: HTTPStatus, +): + assert client.app + base_url = client.app.router["list_conversations"].url_for() + resp = await client.get(f"{base_url}?type=SUPPORT") + assert resp.status == 401 if user_role == UserRole.ANONYMOUS else 200 + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +@pytest.mark.parametrize( + "conversation_type,expected_status", + [ + ("SUPPORT", status.HTTP_200_OK), + ("PROJECT_STATIC", status.HTTP_422_UNPROCESSABLE_ENTITY), + ("PROJECT_ANNOTATION", status.HTTP_422_UNPROCESSABLE_ENTITY), + ], +) +async def test_list_conversations_type_validation( + client: TestClient, + logged_user: UserInfoDict, + conversation_type: str, + expected_status: HTTPStatus, +): + """Test that only SUPPORT type conversations are allowed""" + assert client.app + base_url = client.app.router["list_conversations"].url_for() + resp = await client.get(f"{base_url}?type={conversation_type}") + if expected_status == status.HTTP_200_OK: + await assert_status(resp, expected_status) + else: + # Should get validation error for non-SUPPORT types + assert resp.status == expected_status + + +@pytest.mark.acceptance_test( + "https://github.com/ITISFoundation/private-issues/issues/51" +) +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.USER, status.HTTP_200_OK), + ], +) +async def test_conversations_create_and_list( + client: TestClient, + logged_user: UserInfoDict, + expected: HTTPStatus, + mock_functions_factory: Callable[[Iterable[tuple[object, str]]], SimpleNamespace], +): + """Test creating and listing support conversations""" + mocks = mock_functions_factory( + [ + (_conversation_service, "notify_conversation_created"), + ] + ) + + base_url = client.app.router["list_conversations"].url_for() + + # Test listing empty conversations initially + resp = await client.get(f"{base_url}?type=SUPPORT") + data, _, meta, links = await assert_status( + resp, + expected, + include_meta=True, + include_links=True, + ) + assert data == [] + assert meta["total"] == 0 + assert links + + # Test creating a support conversation + body = {"name": "Support Request - Bug Report", "type": "SUPPORT"} + resp = await client.post(f"{base_url}", json=body) + data, _ = await assert_status( + resp, + status.HTTP_201_CREATED, + ) + assert ConversationRestGet.model_validate(data) + first_conversation_id = data["conversationId"] + assert data["name"] == "Support Request - Bug Report" + assert data["type"] == "SUPPORT" + + assert mocks.notify_conversation_created.call_count == 0 + + # Test creating a second support conversation + body = {"name": "Support Request - Feature Request", "type": "SUPPORT"} + resp = await client.post(f"{base_url}", json=body) + data, _ = await assert_status( + resp, + status.HTTP_201_CREATED, + ) + assert ConversationRestGet.model_validate(data) + second_conversation_id = data["conversationId"] + + assert mocks.notify_conversation_created.call_count == 0 + + # Test creating conversation with invalid type should fail + body = {"name": "Invalid Type", "type": "PROJECT_STATIC"} + resp = await client.post(f"{base_url}", json=body) + await assert_status(resp, status.HTTP_400_BAD_REQUEST) + + # Test listing all conversations + resp = await client.get(f"{base_url}?type=SUPPORT") + data, _, meta, links = await assert_status( + resp, + expected, + include_meta=True, + include_links=True, + ) + assert len(data) == 2 + assert meta["total"] == 2 + assert links + + return first_conversation_id, second_conversation_id + + +@pytest.mark.acceptance_test( + "https://github.com/ITISFoundation/private-issues/issues/51" +) +@pytest.mark.parametrize( + "user_role,expected", + [ + (UserRole.USER, status.HTTP_200_OK), + ], +) +async def test_conversations_update_and_delete( + client: TestClient, + logged_user: UserInfoDict, + expected: HTTPStatus, + mock_functions_factory: Callable[[Iterable[tuple[object, str]]], SimpleNamespace], +): + """Test updating and deleting support conversations""" + mocks = mock_functions_factory( + [ + (_conversation_service, "notify_conversation_created"), + (_conversation_service, "notify_conversation_updated"), + (_conversation_service, "notify_conversation_deleted"), + ] + ) + + base_url = client.app.router["list_conversations"].url_for() + + # Create a conversation first + body = {"name": "Support Request - Bug Report", "type": "SUPPORT"} + resp = await client.post(f"{base_url}", json=body) + data, _ = await assert_status(resp, status.HTTP_201_CREATED) + first_conversation_id = data["conversationId"] + + # Test getting a specific conversation + get_url = client.app.router["get_conversation"].url_for( + conversation_id=first_conversation_id + ) + resp = await client.get(f"{get_url}?type=SUPPORT") + data, _ = await assert_status(resp, expected) + assert data["conversationId"] == first_conversation_id + assert data["name"] == "Support Request - Bug Report" + + # Test updating a conversation + update_url = client.app.router["update_conversation"].url_for( + conversation_id=first_conversation_id + ) + updated_name = "Updated Support Request - Bug Report" + resp = await client.patch( + f"{update_url}?type=SUPPORT", + json={"name": updated_name}, + ) + data, _ = await assert_status(resp, expected) + assert data["name"] == updated_name + + assert mocks.notify_conversation_updated.call_count == 0 + + # Verify the update by getting the conversation again + resp = await client.get(f"{get_url}?type=SUPPORT") + data, _ = await assert_status(resp, expected) + assert data["name"] == updated_name + + # Test deleting a conversation + delete_url = client.app.router["delete_conversation"].url_for( + conversation_id=first_conversation_id + ) + resp = await client.delete(f"{delete_url}?type=SUPPORT") + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + assert mocks.notify_conversation_deleted.call_count == 0 + + # Verify deletion by listing conversations + resp = await client.get(f"{base_url}?type=SUPPORT") + data, _, meta = await assert_status(resp, expected, include_meta=True) + assert len(data) == 0 + assert meta["total"] == 0 + + # Test getting deleted conversation should fail + resp = await client.get(f"{get_url}?type=SUPPORT") + await assert_status(resp, status.HTTP_404_NOT_FOUND) + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_conversations_with_extra_context( + client: TestClient, + logged_user: UserInfoDict, +): + """Test creating conversation with extra context""" + base_url = client.app.router["list_conversations"].url_for() + + # Test creating a support conversation with extra context + body = { + "name": "Support Request with Context", + "type": "SUPPORT", + "extraContext": { + "issue_type": "bug", + "priority": "high", + "browser": "Chrome", + "version": "1.0.0", + }, + } + resp = await client.post(f"{base_url}", json=body) + data, _ = await assert_status(resp, status.HTTP_201_CREATED) + assert ConversationRestGet.model_validate(data) + assert data["name"] == "Support Request with Context" + assert data["type"] == "SUPPORT" + assert data["extraContext"] == { + "issue_type": "bug", + "priority": "high", + "browser": "Chrome", + "version": "1.0.0", + } + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_conversations_pagination( + client: TestClient, + logged_user: UserInfoDict, +): + """Test pagination of conversations list""" + base_url = client.app.router["list_conversations"].url_for() + + # Create multiple conversations + for i in range(5): + body = {"name": f"Support Request {i+1}", "type": "SUPPORT"} + resp = await client.post(f"{base_url}", json=body) + await assert_status(resp, status.HTTP_201_CREATED) + + # Test pagination with limit + resp = await client.get(f"{base_url}?type=SUPPORT&limit=3") + data, _, meta, links = await assert_status( + resp, + status.HTTP_200_OK, + include_meta=True, + include_links=True, + ) + assert len(data) == 3 + assert meta["total"] == 5 + assert links + + # Test pagination with offset + resp = await client.get(f"{base_url}?type=SUPPORT&limit=3&offset=3") + data, _, meta = await assert_status( + resp, + status.HTTP_200_OK, + include_meta=True, + ) + assert len(data) == 2 # Remaining items + assert meta["total"] == 5 + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_conversations_access_control( + client: TestClient, + logged_user: UserInfoDict, +): + """Test that users can only access their own support conversations""" + base_url = client.app.router["list_conversations"].url_for() + + # Create a conversation as first user + body = {"name": "User 1 Support Request", "type": "SUPPORT"} + resp = await client.post(f"{base_url}", json=body) + data, _ = await assert_status(resp, status.HTTP_201_CREATED) + conversation_id = data["conversationId"] + + # Login as a different user + async with LoggedUser(client): + # The new user should not see the first user's conversation + resp = await client.get(f"{base_url}?type=SUPPORT") + data, _, meta = await assert_status( + resp, + status.HTTP_200_OK, + include_meta=True, + ) + assert len(data) == 0 + assert meta["total"] == 0 + + # The new user should not be able to access the specific conversation + get_url = client.app.router["get_conversation"].url_for( + conversation_id=conversation_id + ) + resp = await client.get(f"{get_url}?type=SUPPORT") + await assert_status(resp, status.HTTP_404_NOT_FOUND) + + # The new user should not be able to update the conversation + update_url = client.app.router["update_conversation"].url_for( + conversation_id=conversation_id + ) + resp = await client.patch( + f"{update_url}?type=SUPPORT", + json={"name": "Unauthorized update attempt"}, + ) + await assert_status(resp, status.HTTP_404_NOT_FOUND) + + # The new user should not be able to delete the conversation + delete_url = client.app.router["delete_conversation"].url_for( + conversation_id=conversation_id + ) + resp = await client.delete(f"{delete_url}?type=SUPPORT") + await assert_status(resp, status.HTTP_404_NOT_FOUND) + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_conversations_error_handling( + client: TestClient, + logged_user: UserInfoDict, +): + """Test error handling for conversations endpoints""" + base_url = client.app.router["list_conversations"].url_for() + + # Test creating conversation with missing required fields + resp = await client.post(f"{base_url}", json={}) + await assert_status(resp, status.HTTP_422_UNPROCESSABLE_ENTITY) + + # Test creating conversation with invalid type + body = {"name": "Invalid Type Request", "type": "INVALID_TYPE"} + resp = await client.post(f"{base_url}", json=body) + await assert_status(resp, status.HTTP_422_UNPROCESSABLE_ENTITY) + + # Test operations on non-existent conversation + fake_conversation_id = "00000000-0000-0000-0000-000000000000" + + get_url = client.app.router["get_conversation"].url_for( + conversation_id=fake_conversation_id + ) + resp = await client.get(f"{get_url}?type=SUPPORT") + await assert_status(resp, status.HTTP_404_NOT_FOUND) + + update_url = client.app.router["update_conversation"].url_for( + conversation_id=fake_conversation_id + ) + resp = await client.patch( + f"{update_url}?type=SUPPORT", + json={"name": "Update non-existent"}, + ) + await assert_status(resp, status.HTTP_404_NOT_FOUND) + + delete_url = client.app.router["delete_conversation"].url_for( + conversation_id=fake_conversation_id + ) + resp = await client.delete(f"{delete_url}?type=SUPPORT") + await assert_status(resp, status.HTTP_404_NOT_FOUND) + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_conversations_without_type_query_param( + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, +): + """Test that endpoints require type query parameter""" + + # Create a conversation via project endpoint first + project_id = f"{user_project['uuid']}" + project_conversation_url = client.app.router["create_project_conversation"].url_for( + project_id=f"{project_id}" + ) + body = {"name": "Test Conversation", "type": "PROJECT_STATIC"} + resp = await client.post(f"{project_conversation_url}", json=body) + data, _ = await assert_status(resp, status.HTTP_201_CREATED) + conversation_id = data["conversationId"] + + # Test list endpoint without type parameter should fail + list_url = client.app.router["list_conversations"].url_for() + resp = await client.get(f"{list_url}") + await assert_status(resp, status.HTTP_422_UNPROCESSABLE_ENTITY) + + # All other endpoints should return 400, because we currently support only SUPPORT type + get_url = client.app.router["get_conversation"].url_for( + conversation_id=conversation_id + ) + resp = await client.get(f"{get_url}") + await assert_status(resp, status.HTTP_400_BAD_REQUEST) + + update_url = client.app.router["update_conversation"].url_for( + conversation_id=conversation_id + ) + resp = await client.patch(f"{update_url}", json={"name": "Updated"}) + await assert_status(resp, status.HTTP_400_BAD_REQUEST) + + delete_url = client.app.router["delete_conversation"].url_for( + conversation_id=conversation_id + ) + resp = await client.delete(f"{delete_url}") + await assert_status(resp, status.HTTP_400_BAD_REQUEST) diff --git a/services/web/server/tests/unit/with_dbs/04/folders/test_folders.py b/services/web/server/tests/unit/with_dbs/04/folders/test_folders.py index f7acc51f42d3..ed4af3333c0f 100644 --- a/services/web/server/tests/unit/with_dbs/04/folders/test_folders.py +++ b/services/web/server/tests/unit/with_dbs/04/folders/test_folders.py @@ -269,22 +269,13 @@ async def test_project_folder_movement_full_workflow( await assert_status(resp, status.HTTP_204_NO_CONTENT) -@pytest.fixture -def mock_catalog_api_get_services_for_user_in_product(mocker: MockerFixture): - mocker.patch( - "simcore_service_webserver.projects._crud_api_read.catalog_service.get_services_for_user_in_product", - spec=True, - return_value=[], - ) - - @pytest.mark.parametrize("user_role,expected", [(UserRole.USER, status.HTTP_200_OK)]) async def test_project_listing_inside_of_private_folder( client: TestClient, + mocked_dynamic_services_interface: dict[str, mock.MagicMock], logged_user: UserInfoDict, user_project: ProjectDict, expected: HTTPStatus, - mock_catalog_api_get_services_for_user_in_product: MockerFixture, ): assert client.app @@ -380,10 +371,6 @@ async def test_project_listing_inside_of_private_folder( @pytest.fixture def mock_storage_delete_data_folders(mocker: MockerFixture) -> mock.Mock: - mocker.patch( - "simcore_service_webserver.dynamic_scheduler.api.list_dynamic_services", - autospec=True, - ) mocker.patch( "simcore_service_webserver.projects._projects_service.remove_project_dynamic_services", autospec=True, @@ -400,11 +387,11 @@ def mock_storage_delete_data_folders(mocker: MockerFixture) -> mock.Mock: @pytest.mark.parametrize("user_role,expected", [(UserRole.USER, status.HTTP_200_OK)]) async def test_folders_deletion( + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, expected: HTTPStatus, - mock_catalog_api_get_services_for_user_in_product: MockerFixture, mock_storage_delete_data_folders: mock.Mock, ): assert client.app diff --git a/services/web/server/tests/unit/with_dbs/04/functions/conftest.py b/services/web/server/tests/unit/with_dbs/04/functions/conftest.py new file mode 100644 index 000000000000..b59d0dc29031 --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/04/functions/conftest.py @@ -0,0 +1,330 @@ +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name + + +from collections.abc import AsyncIterator, Awaitable, Callable +from contextlib import AsyncExitStack +from typing import Any +from uuid import UUID, uuid4 + +import pytest +import sqlalchemy +from aiohttp.test_utils import TestClient +from models_library.api_schemas_webserver.functions import ( + Function, + JSONFunctionInputSchema, + JSONFunctionOutputSchema, + ProjectFunction, +) +from models_library.functions import FunctionClass, SolverFunction +from models_library.products import ProductName +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.postgres_tools import insert_and_get_row_lifespan +from pytest_simcore.helpers.typing_env import EnvVarsDict +from pytest_simcore.helpers.webserver_login import LoggedUser, UserInfoDict +from servicelib.rabbitmq import RabbitMQRPCClient +from servicelib.rabbitmq.rpc_interfaces.webserver.functions import ( + functions_rpc_interface as functions_rpc, +) +from settings_library.rabbit import RabbitSettings +from simcore_postgres_database.models.funcapi_api_access_rights_table import ( + funcapi_api_access_rights_table, +) +from simcore_postgres_database.models.funcapi_function_jobs_table import ( + function_jobs_table, +) +from simcore_postgres_database.models.funcapi_functions_access_rights_table import ( + functions_access_rights_table, +) +from simcore_postgres_database.models.funcapi_functions_table import functions_table +from simcore_service_webserver.application_settings import ApplicationSettings +from simcore_service_webserver.statics._constants import FRONTEND_APP_DEFAULT +from sqlalchemy.ext.asyncio import AsyncEngine + + +@pytest.fixture +def app_environment( + rabbit_service: RabbitSettings, + app_environment: EnvVarsDict, + monkeypatch: pytest.MonkeyPatch, +) -> EnvVarsDict: + new_envs = setenvs_from_dict( + monkeypatch, + { + **app_environment, + "RABBIT_HOST": rabbit_service.RABBIT_HOST, + "RABBIT_PORT": f"{rabbit_service.RABBIT_PORT}", + "RABBIT_USER": rabbit_service.RABBIT_USER, + "RABBIT_SECURE": f"{rabbit_service.RABBIT_SECURE}", + "RABBIT_PASSWORD": rabbit_service.RABBIT_PASSWORD.get_secret_value(), + "WEBSERVER_DEV_FEATURES_ENABLED": "1", + "WEBSERVER_FUNCTIONS": "1", + }, + ) + + settings = ApplicationSettings.create_from_envs() + assert settings.WEBSERVER_RABBITMQ + + return new_envs + + +@pytest.fixture +async def rpc_client( + rabbitmq_rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], +) -> RabbitMQRPCClient: + return await rabbitmq_rpc_client("client") + + +@pytest.fixture +def mock_function_factory() -> Callable[[FunctionClass], Function]: + def _(function_class: FunctionClass) -> Function: + if function_class == FunctionClass.PROJECT: + return ProjectFunction( + title="Test Function", + description="A test function", + input_schema=JSONFunctionInputSchema( + schema_content={ + "type": "object", + "properties": {"input1": {"type": "string"}}, + } + ), + output_schema=JSONFunctionOutputSchema( + schema_content={ + "type": "object", + "properties": {"output1": {"type": "string"}}, + } + ), + project_id=uuid4(), + default_inputs=None, + ) + if function_class == FunctionClass.SOLVER: + return SolverFunction( + title="Test Solver", + description="A test solver", + input_schema=JSONFunctionInputSchema( + schema_content={ + "type": "object", + "properties": {"input1": {"type": "string"}}, + } + ), + output_schema=JSONFunctionOutputSchema( + schema_content={ + "type": "object", + "properties": {"output1": {"type": "string"}}, + } + ), + default_inputs=None, + solver_key="simcore/services/comp/mysolver", + solver_version="1.0.0", + ) + msg = f"Please implement the mock for {function_class=} yourself" + raise AssertionError(msg) + + return _ + + +@pytest.fixture +async def other_logged_user( + client: TestClient, rpc_client: RabbitMQRPCClient +) -> AsyncIterator[UserInfoDict]: + async with LoggedUser(client) as other_user: + yield other_user + + +@pytest.fixture +async def user_without_function_api_access_rights( + client: TestClient, rpc_client: RabbitMQRPCClient +) -> AsyncIterator[UserInfoDict]: + async with LoggedUser(client) as user_without_function_api_access_rights: + yield user_without_function_api_access_rights + + +@pytest.fixture +async def clean_functions( + client: TestClient, + rpc_client: RabbitMQRPCClient, + logged_user: UserInfoDict, + osparc_product_name: ProductName, +) -> None: + assert client.app + + functions, _ = await functions_rpc.list_functions( + rabbitmq_rpc_client=rpc_client, + pagination_limit=100, + pagination_offset=0, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + for function in functions: + assert function.uid is not None + await functions_rpc.delete_function( + rabbitmq_rpc_client=rpc_client, + function_id=function.uid, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + +@pytest.fixture +async def clean_function_job_collections( + client: TestClient, + rpc_client: RabbitMQRPCClient, + logged_user: UserInfoDict, + osparc_product_name: ProductName, +) -> None: + assert client.app + + job_collections, _ = await functions_rpc.list_function_job_collections( + rabbitmq_rpc_client=rpc_client, + pagination_limit=100, + pagination_offset=0, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + for function_job_collection in job_collections: + assert function_job_collection.uid is not None + await functions_rpc.delete_function_job_collection( + rabbitmq_rpc_client=rpc_client, + function_job_collection_id=function_job_collection.uid, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + +@pytest.fixture +async def add_user_function_api_access_rights( + asyncpg_engine: AsyncEngine, + logged_user: UserInfoDict, + other_logged_user: UserInfoDict, + request: pytest.FixtureRequest, +) -> AsyncIterator[None]: + if hasattr(request, "param") and not request.param: + yield + return + async with asyncpg_engine.begin() as conn: + for group_id in (logged_user["primary_gid"], other_logged_user["primary_gid"]): + await conn.execute( + funcapi_api_access_rights_table.insert().values( + group_id=group_id, + product_name=FRONTEND_APP_DEFAULT, + read_functions=True, + write_functions=True, + execute_functions=True, + read_function_jobs=True, + write_function_jobs=True, + execute_function_jobs=True, + read_function_job_collections=True, + write_function_job_collections=True, + execute_function_job_collections=True, + ) + ) + yield + async with asyncpg_engine.begin() as conn: + for group_id in (logged_user["primary_gid"], other_logged_user["primary_gid"]): + await conn.execute( + sqlalchemy.delete(funcapi_api_access_rights_table).where( + funcapi_api_access_rights_table.c.group_id == group_id + ) + ) + + +@pytest.fixture +async def logged_user_function_api_access_rights( + asyncpg_engine: AsyncEngine, + logged_user: UserInfoDict, + *, + expected_read_functions: bool, + expected_write_functions: bool, +) -> AsyncIterator[dict[str, Any]]: + cm = insert_and_get_row_lifespan( + asyncpg_engine, + table=funcapi_api_access_rights_table, + values={ + "group_id": logged_user["primary_gid"], + "product_name": FRONTEND_APP_DEFAULT, + "read_functions": expected_read_functions, + "write_functions": expected_write_functions, + "execute_functions": True, + "read_function_jobs": True, + "write_function_jobs": True, + "execute_function_jobs": True, + "read_function_job_collections": True, + "write_function_job_collections": True, + "execute_function_job_collections": True, + }, + pk_col=funcapi_api_access_rights_table.c.group_id, + pk_value=logged_user["primary_gid"], + ) + + async with AsyncExitStack() as stack: + row = await stack.enter_async_context(cm) + yield row + + +@pytest.fixture +async def fake_function_with_associated_job( + asyncpg_engine: AsyncEngine, + logged_user: UserInfoDict, +) -> AsyncIterator[UUID]: + async with AsyncExitStack() as stack: + function_row = await stack.enter_async_context( + insert_and_get_row_lifespan( + asyncpg_engine, + table=functions_table, + values={ + "title": "Test Function", + "function_class": FunctionClass.PROJECT.value, + "description": "A test function", + "input_schema": { + "schema_class": "application/schema+json", + "schema_content": { + "type": "object", + "properties": {"input1": {"type": "string"}}, + }, + }, + "output_schema": { + "schema_class": "application/schema+json", + "schema_content": { + "type": "object", + "properties": {"output1": {"type": "string"}}, + }, + }, + "class_specific_data": {"project_id": f"{uuid4()}"}, + }, + pk_col=functions_table.c.uuid, + ) + ) + + await stack.enter_async_context( + insert_and_get_row_lifespan( + asyncpg_engine, + table=functions_access_rights_table, + values={ + "function_uuid": function_row["uuid"], + "group_id": logged_user["primary_gid"], + "product_name": "osparc", # Default product name + "read": True, + "write": True, + "execute": True, + }, + pk_cols=[ + functions_access_rights_table.c.function_uuid, + functions_access_rights_table.c.group_id, + functions_access_rights_table.c.product_name, + ], + ) + ) + + await stack.enter_async_context( + insert_and_get_row_lifespan( + asyncpg_engine, + table=function_jobs_table, + values={ + "function_uuid": function_row["uuid"], + "status": "pending", + }, + pk_col=function_jobs_table.c.uuid, + ) + ) + yield function_row["uuid"] diff --git a/services/web/server/tests/unit/with_dbs/04/functions_rpc/test_function_job_collections_controller_rpc.py b/services/web/server/tests/unit/with_dbs/04/functions/test_function_job_collections_controller_rpc.py similarity index 87% rename from services/web/server/tests/unit/with_dbs/04/functions_rpc/test_function_job_collections_controller_rpc.py rename to services/web/server/tests/unit/with_dbs/04/functions/test_function_job_collections_controller_rpc.py index d784a9fad3be..1e2bd2d89953 100644 --- a/services/web/server/tests/unit/with_dbs/04/functions_rpc/test_function_job_collections_controller_rpc.py +++ b/services/web/server/tests/unit/with_dbs/04/functions/test_function_job_collections_controller_rpc.py @@ -2,6 +2,7 @@ # pylint: disable=unused-argument import datetime +from collections.abc import Callable from uuid import uuid4 import pytest @@ -10,16 +11,21 @@ from models_library.api_schemas_webserver.functions import ( FunctionIDString, FunctionJobCollection, - ProjectFunction, ProjectFunctionJob, ) -from models_library.functions import FunctionJobCollectionsListFilters +from models_library.functions import ( + Function, + FunctionClass, + FunctionJobCollectionsListFilters, +) from models_library.functions_errors import ( FunctionJobCollectionReadAccessDeniedError, + FunctionJobCollectionsReadApiAccessDeniedError, + FunctionJobCollectionWriteAccessDeniedError, FunctionJobIDNotFoundError, ) from models_library.products import ProductName -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.rabbitmq import RabbitMQRPCClient from servicelib.rabbitmq.rpc_interfaces.webserver.functions import ( functions_rpc_interface as functions_rpc, @@ -34,16 +40,18 @@ ) async def test_function_job_collection( client: TestClient, - mock_function: ProjectFunction, + add_user_function_api_access_rights: None, + mock_function_factory: Callable[[FunctionClass], Function], rpc_client: RabbitMQRPCClient, logged_user: UserInfoDict, other_logged_user: UserInfoDict, + user_without_function_api_access_rights: UserInfoDict, osparc_product_name: ProductName, ): # Register the function first registered_function = await functions_rpc.register_function( rabbitmq_rpc_client=rpc_client, - function=mock_function, + function=mock_function_factory(FunctionClass.PROJECT), user_id=logged_user["id"], product_name=osparc_product_name, ) @@ -56,6 +64,7 @@ async def test_function_job_collection( project_job_id=uuid4(), inputs={"input1": "value1"}, outputs={"output1": "result1"}, + job_creation_task_id=None, ) # Register the function job function_job_ids = [] @@ -67,6 +76,7 @@ async def test_function_job_collection( project_job_id=uuid4(), inputs={"input1": "value1"}, outputs={"output1": "result1"}, + job_creation_task_id=None, ) # Register the function job registered_job = await functions_rpc.register_function_job( @@ -115,8 +125,17 @@ async def test_function_job_collection( product_name=osparc_product_name, ) + # Test denied access for another user + with pytest.raises(FunctionJobCollectionsReadApiAccessDeniedError): + await functions_rpc.get_function_job_collection( + rabbitmq_rpc_client=rpc_client, + function_job_collection_id=registered_collection.uid, + user_id=user_without_function_api_access_rights["id"], + product_name=osparc_product_name, + ) + # Test denied access for another product - with pytest.raises(FunctionJobCollectionReadAccessDeniedError): + with pytest.raises(FunctionJobCollectionsReadApiAccessDeniedError): await functions_rpc.get_function_job_collection( rabbitmq_rpc_client=rpc_client, function_job_collection_id=registered_collection.uid, @@ -125,7 +144,7 @@ async def test_function_job_collection( ) # Attempt to delete the function job collection by another user - with pytest.raises(FunctionJobCollectionReadAccessDeniedError): + with pytest.raises(FunctionJobCollectionWriteAccessDeniedError): await functions_rpc.delete_function_job_collection( rabbitmq_rpc_client=rpc_client, function_job_collection_id=registered_collection.uid, @@ -155,7 +174,8 @@ async def test_function_job_collection( ) async def test_list_function_job_collections( client: TestClient, - mock_function: ProjectFunction, + add_user_function_api_access_rights: None, + mock_function_factory: Callable[[FunctionClass], Function], rpc_client: RabbitMQRPCClient, clean_functions: None, clean_function_job_collections: None, @@ -180,7 +200,7 @@ async def test_list_function_job_collections( # Register the function first registered_function = await functions_rpc.register_function( rabbitmq_rpc_client=rpc_client, - function=mock_function, + function=mock_function_factory(FunctionClass.PROJECT), user_id=logged_user["id"], product_name=osparc_product_name, ) @@ -196,6 +216,7 @@ async def test_list_function_job_collections( project_job_id=uuid4(), inputs={"input1": "value1"}, outputs={"output1": "result1"}, + job_creation_task_id=None, ) # Register the function job registered_job = await functions_rpc.register_function_job( @@ -256,8 +277,9 @@ async def test_list_function_job_collections( ) async def test_list_function_job_collections_filtered_function_id( client: TestClient, + add_user_function_api_access_rights: None, rpc_client: RabbitMQRPCClient, - mock_function: ProjectFunction, + mock_function_factory: Callable[[FunctionClass], Function], clean_functions: None, clean_function_job_collections: None, logged_user: UserInfoDict, @@ -266,13 +288,13 @@ async def test_list_function_job_collections_filtered_function_id( # Register the function first registered_function = await functions_rpc.register_function( rabbitmq_rpc_client=rpc_client, - function=mock_function, + function=mock_function_factory(FunctionClass.PROJECT), user_id=logged_user["id"], product_name=osparc_product_name, ) other_registered_function = await functions_rpc.register_function( rabbitmq_rpc_client=rpc_client, - function=mock_function, + function=mock_function_factory(FunctionClass.PROJECT), user_id=logged_user["id"], product_name=osparc_product_name, ) @@ -293,6 +315,7 @@ async def test_list_function_job_collections_filtered_function_id( project_job_id=uuid4(), inputs={"input1": "value1"}, outputs={"output1": "result1"}, + job_creation_task_id=None, ) # Register the function job registered_job = await functions_rpc.register_function_job( diff --git a/services/web/server/tests/unit/with_dbs/04/functions/test_function_jobs_controller_rpc.py b/services/web/server/tests/unit/with_dbs/04/functions/test_function_jobs_controller_rpc.py new file mode 100644 index 000000000000..e20c5589d087 --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/04/functions/test_function_jobs_controller_rpc.py @@ -0,0 +1,840 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=too-many-arguments + +import datetime +from collections.abc import Callable +from uuid import uuid4 + +import pytest +from aiohttp.test_utils import TestClient +from common_library.users_enums import UserRole +from faker import Faker +from models_library.api_schemas_webserver.functions import ProjectFunctionJob +from models_library.functions import ( + Function, + FunctionClass, + FunctionJobCollection, + FunctionJobStatus, + RegisteredFunctionJob, + RegisteredFunctionJobPatch, + RegisteredProjectFunctionJobPatch, + RegisteredSolverFunctionJobPatch, + SolverFunctionJob, +) +from models_library.functions_errors import ( + FunctionJobIDNotFoundError, + FunctionJobPatchModelIncompatibleError, + FunctionJobReadAccessDeniedError, + FunctionJobsReadApiAccessDeniedError, + FunctionJobWriteAccessDeniedError, +) +from models_library.products import ProductName +from models_library.projects import ProjectID +from pytest_simcore.helpers.webserver_users import UserInfoDict +from servicelib.celery.models import TaskID +from servicelib.rabbitmq import RabbitMQRPCClient +from servicelib.rabbitmq.rpc_interfaces.webserver.functions import ( + functions_rpc_interface as functions_rpc, +) + +pytest_simcore_core_services_selection = ["rabbit"] + + +_faker = Faker() + + +@pytest.mark.parametrize( + "user_role", + [UserRole.USER], +) +async def test_register_get_delete_function_job( + client: TestClient, + add_user_function_api_access_rights: None, + rpc_client: RabbitMQRPCClient, + mock_function_factory: Callable[[FunctionClass], Function], + logged_user: UserInfoDict, + other_logged_user: UserInfoDict, + osparc_product_name: ProductName, +): + # Register the function first + registered_function = await functions_rpc.register_function( + rabbitmq_rpc_client=rpc_client, + function=mock_function_factory(FunctionClass.PROJECT), + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + assert registered_function.uid is not None + + function_job = ProjectFunctionJob( + function_uid=registered_function.uid, + title="Test Function Job", + description="A test function job", + project_job_id=uuid4(), + inputs={"input1": "value1"}, + outputs={"output1": "result1"}, + job_creation_task_id=None, + ) + + # Register the function job + registered_job = await functions_rpc.register_function_job( + rabbitmq_rpc_client=rpc_client, + function_job=function_job, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + # Assert the registered job matches the input job + assert registered_job.function_uid == function_job.function_uid + assert registered_job.inputs == function_job.inputs + assert registered_job.outputs == function_job.outputs + assert registered_job.created_at - datetime.datetime.now( + datetime.UTC + ) < datetime.timedelta(seconds=60) + + # Retrieve the function job using its ID + retrieved_job = await functions_rpc.get_function_job( + rabbitmq_rpc_client=rpc_client, + function_job_id=registered_job.uid, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + # Assert the retrieved job matches the registered job + assert retrieved_job.function_uid == registered_job.function_uid + assert retrieved_job.inputs == registered_job.inputs + assert retrieved_job.outputs == registered_job.outputs + + # Test denied access for another user + with pytest.raises(FunctionJobReadAccessDeniedError): + await functions_rpc.get_function_job( + rabbitmq_rpc_client=rpc_client, + function_job_id=registered_job.uid, + user_id=other_logged_user["id"], + product_name=osparc_product_name, + ) + + # Test denied access for another product + with pytest.raises(FunctionJobsReadApiAccessDeniedError): + await functions_rpc.get_function_job( + rabbitmq_rpc_client=rpc_client, + function_job_id=registered_job.uid, + user_id=other_logged_user["id"], + product_name="this_is_not_osparc", + ) + + with pytest.raises(FunctionJobWriteAccessDeniedError): + # Attempt to delete the function job by another user + await functions_rpc.delete_function_job( + rabbitmq_rpc_client=rpc_client, + function_job_id=registered_job.uid, + user_id=other_logged_user["id"], + product_name=osparc_product_name, + ) + + # Delete the function job using its ID + await functions_rpc.delete_function_job( + rabbitmq_rpc_client=rpc_client, + function_job_id=registered_job.uid, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + # Attempt to retrieve the deleted job + with pytest.raises(FunctionJobIDNotFoundError): + await functions_rpc.get_function_job( + rabbitmq_rpc_client=rpc_client, + function_job_id=registered_job.uid, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + +@pytest.mark.parametrize( + "user_role", + [UserRole.USER], +) +async def test_get_function_job_not_found( + client: TestClient, + add_user_function_api_access_rights: None, + rpc_client: RabbitMQRPCClient, + logged_user: UserInfoDict, + osparc_product_name: ProductName, + clean_functions: None, +): + # Attempt to retrieve a function job that does not exist + with pytest.raises(FunctionJobIDNotFoundError): + await functions_rpc.get_function_job( + rabbitmq_rpc_client=rpc_client, + function_job_id=uuid4(), + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + +@pytest.mark.parametrize( + "user_role", + [UserRole.USER], +) +async def test_list_function_jobs( + client: TestClient, + add_user_function_api_access_rights: None, + rpc_client: RabbitMQRPCClient, + mock_function_factory: Callable[[FunctionClass], Function], + logged_user: UserInfoDict, + osparc_product_name: ProductName, +): + # Register the function first + registered_function = await functions_rpc.register_function( + rabbitmq_rpc_client=rpc_client, + function=mock_function_factory(FunctionClass.PROJECT), + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + assert registered_function.uid is not None + + function_job = ProjectFunctionJob( + function_uid=registered_function.uid, + title="Test Function Job", + description="A test function job", + project_job_id=uuid4(), + inputs={"input1": "value1"}, + outputs={"output1": "result1"}, + job_creation_task_id=None, + ) + + # Register the function job + registered_job = await functions_rpc.register_function_job( + rabbitmq_rpc_client=rpc_client, + function_job=function_job, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + # List function jobs + jobs, _ = await functions_rpc.list_function_jobs( + rabbitmq_rpc_client=rpc_client, + pagination_limit=10, + pagination_offset=0, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + # Assert the list contains the registered job + assert len(jobs) > 0 + assert any(j.uid == registered_job.uid for j in jobs) + + +@pytest.mark.parametrize( + "user_role", + [UserRole.USER], +) +async def test_list_function_jobs_with_status( + client: TestClient, + add_user_function_api_access_rights: None, + rpc_client: RabbitMQRPCClient, + mock_function_factory: Callable[[FunctionClass], Function], + logged_user: UserInfoDict, + osparc_product_name: ProductName, +): + # Register the function first + registered_function = await functions_rpc.register_function( + rabbitmq_rpc_client=rpc_client, + function=mock_function_factory(FunctionClass.PROJECT), + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + assert registered_function.uid is not None + + function_job = ProjectFunctionJob( + function_uid=registered_function.uid, + title="Test Function Job", + description="A test function job", + project_job_id=uuid4(), + inputs={"input1": "value1"}, + outputs={"output1": "result1"}, + job_creation_task_id=None, + ) + + # Register the function job + registered_job = await functions_rpc.register_function_job( + rabbitmq_rpc_client=rpc_client, + function_job=function_job, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + # List function jobs + jobs, _ = await functions_rpc.list_function_jobs_with_status( + rpc_client=rpc_client, + pagination_limit=10, + pagination_offset=0, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + # Assert the list contains the registered job + assert len(jobs) > 0 + assert jobs[0].status.status == "created" + assert any(j.uid == registered_job.uid for j in jobs) + + +@pytest.mark.parametrize( + "user_role", + [UserRole.USER], +) +async def test_list_function_jobs_filtering( + client: TestClient, + rpc_client: RabbitMQRPCClient, + mock_function_factory: Callable[[FunctionClass], Function], + logged_user: UserInfoDict, + osparc_product_name: ProductName, + add_user_function_api_access_rights: None, +): + # Register the function first + first_registered_function = await functions_rpc.register_function( + rabbitmq_rpc_client=rpc_client, + function=mock_function_factory(FunctionClass.PROJECT), + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + second_registered_function = await functions_rpc.register_function( + rabbitmq_rpc_client=rpc_client, + function=mock_function_factory(FunctionClass.PROJECT), + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + first_registered_function_jobs = [] + second_registered_function_jobs = [] + for i_job in range(6): + if i_job < 3: + function_job = ProjectFunctionJob( + function_uid=first_registered_function.uid, + title="Test Function Job", + description="A test function job", + project_job_id=uuid4(), + inputs={"input1": "value1"}, + outputs={"output1": "result1"}, + job_creation_task_id=None, + ) + # Register the function job + first_registered_function_jobs.append( + await functions_rpc.register_function_job( + rabbitmq_rpc_client=rpc_client, + function_job=function_job, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + ) + else: + function_job = ProjectFunctionJob( + function_uid=second_registered_function.uid, + title="Test Function Job", + description="A test function job", + project_job_id=uuid4(), + inputs={"input1": "value1"}, + outputs={"output1": "result1"}, + job_creation_task_id=None, + ) + # Register the function job + second_registered_function_jobs.append( + await functions_rpc.register_function_job( + rabbitmq_rpc_client=rpc_client, + function_job=function_job, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + ) + + function_job_collection = await functions_rpc.register_function_job_collection( + rabbitmq_rpc_client=rpc_client, + function_job_collection=FunctionJobCollection( + job_ids=[ + job.uid + for job in first_registered_function_jobs[1:2] + + second_registered_function_jobs[0:1] + ] + ), + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + # List function jobs for a specific function ID + jobs, _ = await functions_rpc.list_function_jobs( + rabbitmq_rpc_client=rpc_client, + pagination_limit=10, + pagination_offset=0, + filter_by_function_id=first_registered_function.uid, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + # Assert the list contains the registered job + assert len(jobs) == 3 + assert all(j.function_uid == first_registered_function.uid for j in jobs) + + # List function jobs for a specific function job IDs + jobs, _ = await functions_rpc.list_function_jobs( + rabbitmq_rpc_client=rpc_client, + pagination_limit=10, + pagination_offset=0, + filter_by_function_job_ids=[ + job.uid + for job in first_registered_function_jobs[0:1] + + second_registered_function_jobs[1:2] + ], + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + # Assert the list contains the registered job + assert len(jobs) == 2 + assert jobs[0].uid == first_registered_function_jobs[0].uid + assert jobs[1].uid == second_registered_function_jobs[1].uid + + # List function jobs for a specific function job collection + jobs, _ = await functions_rpc.list_function_jobs( + rabbitmq_rpc_client=rpc_client, + pagination_limit=10, + pagination_offset=0, + filter_by_function_job_collection_id=function_job_collection.uid, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + # Assert the list contains the registered job + assert len(jobs) == 2 + assert jobs[0].uid == first_registered_function_jobs[1].uid + assert jobs[1].uid == second_registered_function_jobs[0].uid + + # List function jobs for a specific function job collection and function job id + jobs, _ = await functions_rpc.list_function_jobs( + rabbitmq_rpc_client=rpc_client, + pagination_limit=10, + pagination_offset=0, + filter_by_function_job_collection_id=function_job_collection.uid, + filter_by_function_job_ids=[first_registered_function_jobs[1].uid], + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + # Assert the list contains the registered job + assert len(jobs) == 1 + assert jobs[0].uid == first_registered_function_jobs[1].uid + + +@pytest.mark.parametrize( + "user_role", + [UserRole.USER], +) +async def test_find_cached_function_jobs( + client: TestClient, + rpc_client: RabbitMQRPCClient, + add_user_function_api_access_rights: None, + logged_user: UserInfoDict, + other_logged_user: UserInfoDict, + osparc_product_name: ProductName, + mock_function_factory: Callable[[FunctionClass], Function], + clean_functions: None, +): + # Register the function first + registered_function = await functions_rpc.register_function( + rabbitmq_rpc_client=rpc_client, + function=mock_function_factory(FunctionClass.PROJECT), + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + registered_function_jobs = [] + for value in range(5): + function_job = ProjectFunctionJob( + function_uid=registered_function.uid, + title="Test Function Job", + description="A test function job", + project_job_id=uuid4(), + inputs={"input1": value if value < 4 else 1}, + outputs={"output1": "result1"}, + job_creation_task_id=None, + ) + + # Register the function job + registered_job = await functions_rpc.register_function_job( + rabbitmq_rpc_client=rpc_client, + function_job=function_job, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + registered_function_jobs.append(registered_job) + + # Find cached function jobs + cached_jobs = await functions_rpc.find_cached_function_jobs( + rabbitmq_rpc_client=rpc_client, + function_id=registered_function.uid, + inputs={"input1": 1}, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + # Assert the cached jobs contain the registered job + assert cached_jobs is not None + assert len(cached_jobs) == 2 + assert {job.uid for job in cached_jobs} == { + registered_function_jobs[1].uid, + registered_function_jobs[4].uid, + } + + cached_jobs = await functions_rpc.find_cached_function_jobs( + rabbitmq_rpc_client=rpc_client, + function_id=registered_function.uid, + inputs={"input1": 1}, + user_id=other_logged_user["id"], + product_name=osparc_product_name, + ) + + # Assert the cached jobs does not contain the registered job for the other user + assert cached_jobs is None + + +@pytest.mark.parametrize( + "user_role", + [UserRole.USER], +) +@pytest.mark.parametrize( + "function_job, patch", + [ + ( + ProjectFunctionJob( + function_uid=_faker.uuid4(), + title="Test Function Job", + description="A test function job", + project_job_id=None, + inputs=None, + outputs=None, + job_creation_task_id=None, + ), + RegisteredProjectFunctionJobPatch( + title=_faker.word(), + description=_faker.sentence(), + project_job_id=ProjectID(_faker.uuid4()), + job_creation_task_id=TaskID(_faker.uuid4()), + inputs={"input1": _faker.pyint(min_value=0, max_value=1000)}, + outputs={"output1": _faker.word()}, + ), + ), + ( + SolverFunctionJob( + function_uid=_faker.uuid4(), + title="Test Function Job", + description="A test function job", + inputs=None, + outputs=None, + job_creation_task_id=None, + solver_job_id=None, + ), + RegisteredSolverFunctionJobPatch( + title=_faker.word(), + description=_faker.sentence(), + job_creation_task_id=TaskID(_faker.uuid4()), + inputs={"input1": _faker.pyint(min_value=0, max_value=1000)}, + outputs={"output1": _faker.word()}, + solver_job_id=_faker.uuid4(), + ), + ), + ], +) +async def test_patch_registered_function_jobs( + client: TestClient, + rpc_client: RabbitMQRPCClient, + add_user_function_api_access_rights: None, + logged_user: UserInfoDict, + other_logged_user: UserInfoDict, + osparc_product_name: ProductName, + mock_function_factory: Callable[[FunctionClass], Function], + clean_functions: None, + function_job: RegisteredFunctionJob, + patch: RegisteredFunctionJobPatch, +): + function = mock_function_factory(function_job.function_class) + + registered_function = await functions_rpc.register_function( + rabbitmq_rpc_client=rpc_client, + function=function, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + # Register the function job + function_job.function_uid = registered_function.uid + registered_job = await functions_rpc.register_function_job( + rabbitmq_rpc_client=rpc_client, + function_job=function_job, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + registered_job = await functions_rpc.patch_registered_function_job( + rpc_client=rpc_client, + user_id=logged_user["id"], + function_job_uuid=registered_job.uid, + product_name=osparc_product_name, + registered_function_job_patch=patch, + ) + assert registered_job.title == patch.title + assert registered_job.description == patch.description + assert registered_job.inputs == patch.inputs + assert registered_job.outputs == patch.outputs + if isinstance(patch, RegisteredProjectFunctionJobPatch): + assert registered_job.function_class == FunctionClass.PROJECT + assert registered_job.job_creation_task_id == patch.job_creation_task_id + assert registered_job.project_job_id == patch.project_job_id + if isinstance(patch, RegisteredSolverFunctionJobPatch): + assert registered_job.function_class == FunctionClass.SOLVER + assert registered_job.job_creation_task_id == patch.job_creation_task_id + assert registered_job.solver_job_id == patch.solver_job_id + + +@pytest.mark.parametrize( + "user_role", + [UserRole.USER], +) +@pytest.mark.parametrize( + "function_job, patch", + [ + ( + ProjectFunctionJob( + function_uid=_faker.uuid4(), + title="Test Function Job", + description="A test function job", + project_job_id=None, + inputs=None, + outputs=None, + job_creation_task_id=None, + ), + RegisteredSolverFunctionJobPatch( + title=_faker.word(), + description=_faker.sentence(), + job_creation_task_id=TaskID(_faker.uuid4()), + inputs={"input1": _faker.pyint(min_value=0, max_value=1000)}, + outputs={"output1": _faker.word()}, + solver_job_id=_faker.uuid4(), + ), + ), + ], +) +async def test_incompatible_patch_model_error( + client: TestClient, + rpc_client: RabbitMQRPCClient, + add_user_function_api_access_rights: None, + logged_user: UserInfoDict, + other_logged_user: UserInfoDict, + osparc_product_name: ProductName, + mock_function_factory: Callable[[FunctionClass], Function], + clean_functions: None, + function_job: RegisteredFunctionJob, + patch: RegisteredFunctionJobPatch, +): + function = mock_function_factory(function_job.function_class) + + registered_function = await functions_rpc.register_function( + rabbitmq_rpc_client=rpc_client, + function=function, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + function_job.function_uid = registered_function.uid + registered_job = await functions_rpc.register_function_job( + rabbitmq_rpc_client=rpc_client, + function_job=function_job, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + with pytest.raises(FunctionJobPatchModelIncompatibleError): + registered_job = await functions_rpc.patch_registered_function_job( + rpc_client=rpc_client, + user_id=logged_user["id"], + function_job_uuid=registered_job.uid, + product_name=osparc_product_name, + registered_function_job_patch=patch, + ) + + +@pytest.mark.parametrize( + "user_role", + [UserRole.USER], +) +@pytest.mark.parametrize( + "access_by_other_user, check_write_permissions, expected_to_raise", + [(False, False, None), (True, True, FunctionJobWriteAccessDeniedError)], +) +@pytest.mark.parametrize( + "status_or_output", + ["status", "output"], +) +async def test_update_function_job_status_output( + client: TestClient, + rpc_client: RabbitMQRPCClient, + add_user_function_api_access_rights: None, + logged_user: UserInfoDict, + other_logged_user: UserInfoDict, + mock_function_factory: Callable[[FunctionClass], Function], + osparc_product_name: ProductName, + access_by_other_user: bool, + check_write_permissions: bool, + expected_to_raise: type[Exception] | None, + status_or_output: str, +): + # Register the function first + registered_function = await functions_rpc.register_function( + rabbitmq_rpc_client=rpc_client, + function=mock_function_factory(FunctionClass.PROJECT), + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + function_job = ProjectFunctionJob( + function_uid=registered_function.uid, + title="Test Function Job", + description="A test function job", + project_job_id=uuid4(), + inputs={"input1": "value1"}, + outputs={"output1": "result1"}, + job_creation_task_id=None, + ) + + # Register the function job + registered_job = await functions_rpc.register_function_job( + rabbitmq_rpc_client=rpc_client, + function_job=function_job, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + old_job_status = await functions_rpc.get_function_job_status( + rabbitmq_rpc_client=rpc_client, + function_job_id=registered_job.uid, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + assert old_job_status.status == "created" + + await functions_rpc.set_group_permissions( + rabbitmq_rpc_client=rpc_client, + user_id=logged_user["id"], + product_name=osparc_product_name, + object_type="function_job", + object_ids=[registered_job.uid], + permission_group_id=int(other_logged_user["primary_gid"]), + read=True, + ) + + async def update_job_status_or_output(new_status, new_outputs): + if status_or_output == "status": + return await functions_rpc.update_function_job_status( + rabbitmq_rpc_client=rpc_client, + function_job_id=registered_job.uid, + user_id=( + other_logged_user["id"] + if access_by_other_user + else logged_user["id"] + ), + product_name=osparc_product_name, + job_status=new_status, + check_write_permissions=check_write_permissions, + ) + return await functions_rpc.update_function_job_outputs( + rabbitmq_rpc_client=rpc_client, + function_job_id=registered_job.uid, + user_id=( + other_logged_user["id"] if access_by_other_user else logged_user["id"] + ), + product_name=osparc_product_name, + outputs=new_outputs, + check_write_permissions=check_write_permissions, + ) + + # Update the function job status + new_status = FunctionJobStatus(status="COMPLETED") + new_outputs = {"output1": "new_result1", "output2": "new_result2"} + if expected_to_raise: + with pytest.raises(expected_to_raise): + await update_job_status_or_output(new_status, new_outputs) + return + + return_value = await update_job_status_or_output(new_status, new_outputs) + if status_or_output == "status": + assert return_value == new_status + else: + assert return_value == new_outputs + + +@pytest.mark.parametrize( + "user_role", + [UserRole.USER], +) +async def test_update_function_job_outputs( + client: TestClient, + rpc_client: RabbitMQRPCClient, + add_user_function_api_access_rights: None, + logged_user: UserInfoDict, + mock_function_factory: Callable[[FunctionClass], Function], + osparc_product_name: ProductName, +): + # Register the function first + registered_function = await functions_rpc.register_function( + rabbitmq_rpc_client=rpc_client, + function=mock_function_factory(FunctionClass.PROJECT), + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + function_job = ProjectFunctionJob( + function_uid=registered_function.uid, + title="Test Function Job", + description="A test function job", + project_job_id=uuid4(), + inputs={"input1": "value1"}, + outputs=None, + job_creation_task_id=None, + ) + + # Register the function job + registered_job = await functions_rpc.register_function_job( + rabbitmq_rpc_client=rpc_client, + function_job=function_job, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + received_outputs = await functions_rpc.get_function_job_outputs( + rabbitmq_rpc_client=rpc_client, + function_job_id=registered_job.uid, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + assert received_outputs is None + + new_outputs = {"output1": "new_result1", "output2": "new_result2"} + + # Update the function job outputs + updated_outputs = await functions_rpc.update_function_job_outputs( + rabbitmq_rpc_client=rpc_client, + function_job_id=registered_job.uid, + user_id=logged_user["id"], + product_name=osparc_product_name, + outputs=new_outputs, + ) + + # Assert the updated outputs match the new outputs + assert updated_outputs == new_outputs + + # Update the function job outputs + received_outputs = await functions_rpc.get_function_job_outputs( + rabbitmq_rpc_client=rpc_client, + function_job_id=registered_job.uid, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + assert received_outputs == new_outputs diff --git a/services/web/server/tests/unit/with_dbs/04/functions/test_functions_controller_rest.py b/services/web/server/tests/unit/with_dbs/04/functions/test_functions_controller_rest.py new file mode 100644 index 000000000000..17bbc9afe42b --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/04/functions/test_functions_controller_rest.py @@ -0,0 +1,348 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments +# pylint: disable=too-many-statements + +from collections.abc import AsyncIterator +from http import HTTPStatus +from typing import Any +from uuid import UUID, uuid4 + +import pytest +from aiohttp.test_utils import TestClient +from common_library.json_serialization import json_dumps +from models_library.api_schemas_webserver.functions import ( + FunctionClass, + JSONFunctionInputSchema, + JSONFunctionOutputSchema, + RegisteredFunctionGet, +) +from models_library.api_schemas_webserver.users import MyFunctionPermissionsGet +from pydantic import TypeAdapter +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_users import UserInfoDict +from servicelib.aiohttp import status +from simcore_service_webserver.db.models import UserRole + +pytest_simcore_core_services_selection = ["rabbit"] + + +async def _list_functions_and_validate( + client: TestClient, + expected_status: HTTPStatus, + expected_count: int | None = None, + params: dict[str, Any] | None = None, + expected_uid_in_results: str | None = None, + expected_uid_at_index: tuple[str, int] | None = None, +) -> list[RegisteredFunctionGet] | None: + """Helper function to list functions and validate the response.""" + url = client.app.router["list_functions"].url_for() + response = await client.get(url, params=params or {}) + data, error = await assert_status(response, expected_status) + + if error: + return None + + retrieved_functions = TypeAdapter(list[RegisteredFunctionGet]).validate_python(data) + + if expected_count is not None: + assert len(retrieved_functions) == expected_count + + if expected_uid_in_results is not None: + assert expected_uid_in_results in [f"{f.uid}" for f in retrieved_functions] + + if expected_uid_at_index is not None: + expected_uid, index = expected_uid_at_index + assert f"{retrieved_functions[index].uid}" == expected_uid + + return retrieved_functions + + +@pytest.fixture(params=[FunctionClass.PROJECT, FunctionClass.SOLVER]) +def mocked_function(request) -> dict[str, Any]: + function_dict = { + "title": f"Test {request.param} Function", + "description": f"A test {request.param} function", + "inputSchema": JSONFunctionInputSchema( + schema_content={ + "type": "object", + "properties": {"input1": {"type": "string"}}, + }, + ).model_dump(mode="json"), + "outputSchema": JSONFunctionOutputSchema( + schema_content={ + "type": "object", + "properties": {"output1": {"type": "string"}}, + }, + ).model_dump(mode="json"), + "functionClass": request.param, + "defaultInputs": None, + } + + match request.param: + case FunctionClass.PROJECT: + function_dict["projectId"] = f"{uuid4()}" + case FunctionClass.SOLVER: + function_dict["solverKey"] = "simcore/services/dynamic/test" + function_dict["solverVersion"] = "1.0.0" + + return function_dict + + +@pytest.mark.parametrize( + "user_role,add_user_function_api_access_rights,expected_register,expected_get,expected_list,expected_update,expected_delete,expected_get2", + [ + ( + UserRole.USER, + True, + status.HTTP_201_CREATED, + status.HTTP_200_OK, + status.HTTP_200_OK, + status.HTTP_200_OK, + status.HTTP_204_NO_CONTENT, + status.HTTP_404_NOT_FOUND, + ), + ( + UserRole.GUEST, + False, + status.HTTP_403_FORBIDDEN, + status.HTTP_403_FORBIDDEN, + status.HTTP_403_FORBIDDEN, + status.HTTP_403_FORBIDDEN, + status.HTTP_403_FORBIDDEN, + status.HTTP_403_FORBIDDEN, + ), + ], + indirect=["add_user_function_api_access_rights"], +) +async def test_function_workflow( + client: TestClient, + logged_user: UserInfoDict, + other_logged_user: UserInfoDict, + mocked_function: dict[str, Any], + expected_register: HTTPStatus, + expected_get: HTTPStatus, + expected_list: HTTPStatus, + expected_update: HTTPStatus, + expected_delete: HTTPStatus, + expected_get2: HTTPStatus, + add_user_function_api_access_rights: AsyncIterator[None], + request: pytest.FixtureRequest, +) -> None: + # Register a new function + url = client.app.router["register_function"].url_for() + response = await client.post(url, json=mocked_function) + data, error = await assert_status(response, expected_status_code=expected_register) + if error: + returned_function_uid = uuid4() + else: + returned_function = TypeAdapter(RegisteredFunctionGet).validate_python(data) + assert returned_function.uid is not None + returned_function_uid = returned_function.uid + + # Register a new function (duplicate) + url = client.app.router["register_function"].url_for() + mocked_function.update(title=mocked_function["title"] + " (duplicate)") + response = await client.post(url, json=mocked_function) + await assert_status(response, expected_status_code=expected_register) + + # Get the registered function + url = client.app.router["get_function"].url_for( + function_id=f"{returned_function_uid}" + ) + response = await client.get(url) + data, error = await assert_status(response, expected_get) + if not error: + retrieved_function = TypeAdapter(RegisteredFunctionGet).validate_python(data) + assert retrieved_function.uid == returned_function.uid + + # List existing functions (default) + await _list_functions_and_validate( + client, + expected_list, + expected_count=2, + expected_uid_in_results=f"{returned_function_uid}", + expected_uid_at_index=( + f"{returned_function_uid}", + 1, + ), # ordered by modified_at by default + ) + + # List existing functions (ordered by created_at ascending) + await _list_functions_and_validate( + client, + expected_list, + expected_count=2, + params={"order_by": json_dumps({"field": "created_at", "direction": "asc"})}, + expected_uid_in_results=f"{returned_function_uid}", + expected_uid_at_index=(f"{returned_function_uid}", 0), + ) + + # List existing functions (searching for not existing) + await _list_functions_and_validate( + client, + expected_list, + expected_count=0, + params={"search": "you_can_not_find_me_because_I_do_not_exist"}, + ) + + # List existing functions (searching for duplicate) + await _list_functions_and_validate( + client, + expected_list, + expected_count=1, + params={"search": "duplicate"}, + ) + + # List existing functions (searching by title) + await _list_functions_and_validate( + client, + expected_list, + expected_count=1, + params={"filters": json_dumps({"search_by_title": "duplicate"})}, + ) + + # Set group permissions for other user + new_group_id = other_logged_user["primary_gid"] + new_group_access_rights = {"read": True, "write": True, "execute": False} + + url = client.app.router["create_or_update_function_group"].url_for( + function_id=f"{returned_function_uid}", group_id=f"{new_group_id}" + ) + + response = await client.put(url, json=new_group_access_rights) + data, error = await assert_status(response, expected_update) + if not error: + assert data == new_group_access_rights + + # Remove group permissions for original user + url = client.app.router["delete_function_group"].url_for( + function_id=f"{returned_function_uid}", group_id=f"{logged_user['primary_gid']}" + ) + + response = await client.delete(url) + data, error = await assert_status(response, expected_delete) + if not error: + assert data is None + + # Check that original user no longer has access + url = client.app.router["get_function"].url_for( + function_id=f"{returned_function_uid}" + ) + response = await client.get(url) + data, error = await assert_status(response, expected_get) + if not error: + retrieved_function = ( + TypeAdapter(RegisteredFunctionGet).validate_python(data).model_dump() + ) + assert retrieved_function["access_rights"] == { + new_group_id: new_group_access_rights + } + + # Update existing function + new_title = "Test Function (edited)" + new_description = "A test function (edited)" + url = client.app.router["update_function"].url_for( + function_id=f"{returned_function_uid}" + ) + response = await client.patch( + url, json={"title": new_title, "description": new_description} + ) + data, error = await assert_status(response, expected_update) + if not error: + updated_group_access_rights = TypeAdapter( + RegisteredFunctionGet + ).validate_python(data) + assert updated_group_access_rights.title == new_title + assert updated_group_access_rights.description == new_description + + # Delete existing function + url = client.app.router["delete_function"].url_for( + function_id=f"{returned_function_uid}" + ) + response = await client.delete(url) + data, error = await assert_status(response, expected_delete) + + # Check if the function was effectively deleted + url = client.app.router["get_function"].url_for( + function_id=f"{returned_function_uid}" + ) + response = await client.get(url) + data, error = await assert_status(response, expected_get2) + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +@pytest.mark.parametrize( + "expected_read_functions,expected_write_functions", + [ + (True, True), + (True, False), + (False, True), # Weird, but allowed for testing purposes + (False, False), + ], +) +async def test_list_user_functions_permissions( + client: TestClient, + logged_user: UserInfoDict, + expected_read_functions: bool, + expected_write_functions: bool, + logged_user_function_api_access_rights: dict[str, Any], +): + assert ( + logged_user_function_api_access_rights["read_functions"] + == expected_read_functions + ) + assert ( + logged_user_function_api_access_rights["write_functions"] + == expected_write_functions + ) + + url = client.app.router["list_user_functions_permissions"].url_for() + response = await client.get(url) + data, error = await assert_status(response, expected_status_code=status.HTTP_200_OK) + + assert not error + function_permissions = MyFunctionPermissionsGet.model_validate(data) + assert function_permissions.read_functions == expected_read_functions + assert function_permissions.write_functions == expected_write_functions + + +@pytest.mark.parametrize( + "user_role,expected_read_functions,expected_write_functions", + [(UserRole.USER, True, True)], +) +async def test_delete_function_with_associated_jobs( + client: TestClient, + logged_user: UserInfoDict, + fake_function_with_associated_job: UUID, + logged_user_function_api_access_rights: dict[str, Any], +) -> None: + function_id = fake_function_with_associated_job + + url = client.app.router["get_function"].url_for(function_id=f"{function_id}") + response = await client.get(url) + data, error = await assert_status(response, status.HTTP_200_OK) + assert not error + function = TypeAdapter(RegisteredFunctionGet).validate_python(data) + assert function.uid == function_id + + url = client.app.router["delete_function"].url_for(function_id=f"{function_id}") + response = await client.delete(url) + data, error = await assert_status(response, status.HTTP_409_CONFLICT) + assert error is not None + + url = client.app.router["get_function"].url_for(function_id=f"{function_id}") + response = await client.get(url) + data, error = await assert_status(response, status.HTTP_200_OK) + assert not error + + url = client.app.router["delete_function"].url_for(function_id=f"{function_id}") + response = await client.delete(url, params={"force": "true"}) + data, error = await assert_status(response, status.HTTP_204_NO_CONTENT) + assert not error + + url = client.app.router["get_function"].url_for(function_id=f"{function_id}") + response = await client.get(url) + data, error = await assert_status(response, status.HTTP_404_NOT_FOUND) + assert error is not None diff --git a/services/web/server/tests/unit/with_dbs/04/functions_rpc/test_functions_controller_rpc.py b/services/web/server/tests/unit/with_dbs/04/functions/test_functions_controller_rpc.py similarity index 57% rename from services/web/server/tests/unit/with_dbs/04/functions_rpc/test_functions_controller_rpc.py rename to services/web/server/tests/unit/with_dbs/04/functions/test_functions_controller_rpc.py index c6f2fecc7102..fc720743ad83 100644 --- a/services/web/server/tests/unit/with_dbs/04/functions_rpc/test_functions_controller_rpc.py +++ b/services/web/server/tests/unit/with_dbs/04/functions/test_functions_controller_rpc.py @@ -2,6 +2,7 @@ # pylint: disable=unused-argument import datetime +from collections.abc import Callable from uuid import uuid4 import pytest @@ -12,14 +13,23 @@ JSONFunctionOutputSchema, ProjectFunction, ) - -# import simcore_service_webserver.functions._functions_controller_rpc as functions_rpc +from models_library.basic_types import IDStr +from models_library.functions import ( + Function, + FunctionClass, + FunctionUserAccessRights, + RegisteredFunction, + SolverFunction, +) from models_library.functions_errors import ( FunctionIDNotFoundError, FunctionReadAccessDeniedError, + FunctionsWriteApiAccessDeniedError, + FunctionWriteAccessDeniedError, ) from models_library.products import ProductName -from pytest_simcore.helpers.webserver_login import UserInfoDict +from models_library.rest_ordering import OrderBy, OrderDirection +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.rabbitmq import RabbitMQRPCClient from servicelib.rabbitmq.rpc_interfaces.webserver.functions import ( functions_rpc_interface as functions_rpc, @@ -34,17 +44,21 @@ ) async def test_register_get_delete_function( client: TestClient, + add_user_function_api_access_rights: None, rpc_client: RabbitMQRPCClient, - mock_function: ProjectFunction, + mock_function_factory: Callable[[FunctionClass], Function], logged_user: UserInfoDict, user_role: UserRole, osparc_product_name: ProductName, other_logged_user: UserInfoDict, ): - # Register the function + function = mock_function_factory(FunctionClass.PROJECT) + assert function.function_class == FunctionClass.PROJECT + + # Register the function registered_function = await functions_rpc.register_function( rabbitmq_rpc_client=rpc_client, - function=mock_function, + function=function, user_id=logged_user["id"], product_name=osparc_product_name, ) @@ -63,19 +77,19 @@ async def test_register_get_delete_function( # Assert the saved function matches the input function assert saved_function.uid is not None - assert saved_function.title == mock_function.title - assert saved_function.description == mock_function.description + assert saved_function.title == function.title + assert saved_function.description == function.description # Ensure saved_function is of type ProjectFunction before accessing project_id assert isinstance(saved_function, ProjectFunction) - assert saved_function.project_id == mock_function.project_id + assert saved_function.project_id == function.project_id assert saved_function.created_at == registered_function.created_at # Assert the returned function matches the expected result - assert registered_function.title == mock_function.title - assert registered_function.description == mock_function.description + assert registered_function.title == function.title + assert registered_function.description == function.description assert isinstance(registered_function, ProjectFunction) - assert registered_function.project_id == mock_function.project_id + assert registered_function.project_id == function.project_id with pytest.raises(FunctionReadAccessDeniedError): await functions_rpc.get_function( @@ -85,7 +99,7 @@ async def test_register_get_delete_function( product_name=osparc_product_name, ) - with pytest.raises(FunctionReadAccessDeniedError): + with pytest.raises(FunctionWriteAccessDeniedError): # Attempt to delete the function by another user await functions_rpc.delete_function( rabbitmq_rpc_client=rpc_client, @@ -94,8 +108,8 @@ async def test_register_get_delete_function( product_name=osparc_product_name, ) - with pytest.raises(FunctionReadAccessDeniedError): - # Attempt to delete the function by another user + with pytest.raises(FunctionsWriteApiAccessDeniedError): + # Attempt to delete the function in another product await functions_rpc.delete_function( rabbitmq_rpc_client=rpc_client, function_id=registered_function.uid, @@ -127,6 +141,7 @@ async def test_register_get_delete_function( ) async def test_get_function_not_found( client: TestClient, + add_user_function_api_access_rights: None, rpc_client: RabbitMQRPCClient, logged_user: UserInfoDict, osparc_product_name: ProductName, @@ -148,6 +163,7 @@ async def test_get_function_not_found( ) async def test_list_functions( client: TestClient, + add_user_function_api_access_rights: None, rpc_client: RabbitMQRPCClient, logged_user: UserInfoDict, osparc_product_name: ProductName, @@ -203,16 +219,18 @@ async def test_list_functions( async def test_list_functions_mixed_user( client: TestClient, rpc_client: RabbitMQRPCClient, - mock_function: ProjectFunction, + mock_function_factory: Callable[[FunctionClass], ProjectFunction], logged_user: UserInfoDict, osparc_product_name: ProductName, other_logged_user: UserInfoDict, + add_user_function_api_access_rights: None, ): + function = mock_function_factory(FunctionClass.PROJECT) # Register a function for the logged user registered_functions = [ await functions_rpc.register_function( rabbitmq_rpc_client=rpc_client, - function=mock_function, + function=function, user_id=logged_user["id"], product_name=osparc_product_name, ) @@ -234,7 +252,7 @@ async def test_list_functions_mixed_user( other_registered_function = [ await functions_rpc.register_function( rabbitmq_rpc_client=rpc_client, - function=mock_function, + function=function, user_id=other_logged_user["id"], product_name=osparc_product_name, ) @@ -267,63 +285,225 @@ async def test_list_functions_mixed_user( ) +@pytest.mark.parametrize("user_role", [UserRole.USER]) @pytest.mark.parametrize( - "user_role", - [UserRole.USER], + "order_by", + [ + None, + OrderBy(field=IDStr("uid"), direction=OrderDirection.ASC), + OrderBy(field=IDStr("uid"), direction=OrderDirection.DESC), + ], +) +@pytest.mark.parametrize( + "test_pagination_limit, test_pagination_offset", + [ + (5, 0), + (2, 2), + (12, 4), + ], ) -async def test_list_functions_with_pagination( +async def test_list_functions_with_pagination_ordering( client: TestClient, + add_user_function_api_access_rights: None, rpc_client: RabbitMQRPCClient, - mock_function: ProjectFunction, + mock_function_factory: Callable[[FunctionClass], ProjectFunction], clean_functions: None, osparc_product_name: ProductName, logged_user: UserInfoDict, + order_by: OrderBy | None, + test_pagination_limit: int, + test_pagination_offset: int, ): # Register multiple functions - TOTAL_FUNCTIONS = 3 - for _ in range(TOTAL_FUNCTIONS): + TOTAL_FUNCTIONS = 10 + registered_functions = [ await functions_rpc.register_function( rabbitmq_rpc_client=rpc_client, - function=mock_function, + function=mock_function_factory(FunctionClass.PROJECT), user_id=logged_user["id"], product_name=osparc_product_name, ) - - functions, page_info = await functions_rpc.list_functions( - rabbitmq_rpc_client=rpc_client, - pagination_limit=2, - pagination_offset=0, - user_id=logged_user["id"], - product_name=osparc_product_name, - ) + for _ in range(TOTAL_FUNCTIONS) + ] # List functions with pagination functions, page_info = await functions_rpc.list_functions( rabbitmq_rpc_client=rpc_client, - pagination_limit=2, - pagination_offset=0, + pagination_limit=test_pagination_limit, + pagination_offset=test_pagination_offset, user_id=logged_user["id"], product_name=osparc_product_name, + order_by=order_by, ) # Assert the list contains the correct number of functions - assert len(functions) == 2 - assert page_info.count == 2 + assert len(functions) == min( + test_pagination_limit, max(0, TOTAL_FUNCTIONS - test_pagination_offset) + ) + assert all(f.uid in [rf.uid for rf in registered_functions] for f in functions) + assert page_info.count == len(functions) assert page_info.total == TOTAL_FUNCTIONS - # List the next page of functions - functions, page_info = await functions_rpc.list_functions( - rabbitmq_rpc_client=rpc_client, - pagination_limit=2, - pagination_offset=2, - user_id=logged_user["id"], - product_name=osparc_product_name, + # Verify the functions are sorted correctly based on the order_by parameter + if order_by: + field = order_by.field + direction = order_by.direction + sorted_functions = sorted( + functions, + key=lambda f: getattr(f, field), + reverse=(direction == OrderDirection.DESC), + ) + assert functions == sorted_functions + + +@pytest.mark.parametrize( + "user_role", + [UserRole.USER], +) +async def test_list_functions_search( + client: TestClient, + rpc_client: RabbitMQRPCClient, + mock_function_factory: Callable[[FunctionClass], ProjectFunction], + logged_user: UserInfoDict, + osparc_product_name: ProductName, + add_user_function_api_access_rights: None, +): + function = mock_function_factory(FunctionClass.PROJECT) + assert function.function_class == FunctionClass.PROJECT + + mock_function_dummy1 = function.model_copy() + mock_function_dummy1.title = "Function TitleDummy1" + mock_function_dummy1.description = "Function DescriptionDummy1" + + mock_function_dummy2 = function.model_copy() + mock_function_dummy2.title = "Function TitleDummy2" + mock_function_dummy2.description = "Function DescriptionDummy2" + + registered_functions = {} + for function in [mock_function_dummy1, mock_function_dummy2]: + registered_functions[function.title] = [] + for _ in range(5): + registered_functions[function.title].append( + await functions_rpc.register_function( + rabbitmq_rpc_client=rpc_client, + function=function, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + ) + + for search_term, expected_number in [("Dummy", 10), ("Dummy2", 5)]: + # Search for the function by title + functions, _ = await functions_rpc.list_functions( + rabbitmq_rpc_client=rpc_client, + user_id=logged_user["id"], + product_name=osparc_product_name, + search_by_function_title=search_term, + pagination_limit=10, + pagination_offset=0, + ) + + # Assert the function is found + assert len(functions) == expected_number + if search_term == "Dummy2": + assert functions[0].uid in [ + function.uid + for function in registered_functions[mock_function_dummy2.title] + ] + + for search_term, expected_number in [ + ("Dummy", 10), + ("Dummy2", 5), + (str(registered_functions[mock_function_dummy2.title][0].uid)[:8], 1), + ("DescriptionDummy2", 5), + ]: + # Search for the function by name, description, or UUID (multi-column search) + functions, _ = await functions_rpc.list_functions( + rabbitmq_rpc_client=rpc_client, + user_id=logged_user["id"], + product_name=osparc_product_name, + search_by_multi_columns=search_term, + pagination_limit=10, + pagination_offset=0, + ) + + # Assert the function is found + assert len(functions) == expected_number + if search_term == "Dummy2": + assert functions[0].uid in [ + function.uid + for function in registered_functions[mock_function_dummy2.title] + ] + + +@pytest.mark.parametrize( + "user_role", + [UserRole.USER], +) +async def test_list_functions_with_filters( + client: TestClient, + rpc_client: RabbitMQRPCClient, + mock_function_factory: Callable[[FunctionClass], ProjectFunction], + logged_user: UserInfoDict, + osparc_product_name: ProductName, + add_user_function_api_access_rights: None, +): + N_OF_PROJECT_FUNCTIONS = 3 + N_OF_SOLVER_FUNCTIONS = 4 + # Register the function first + registered_functions = [ + await functions_rpc.register_function( + rabbitmq_rpc_client=rpc_client, + function=mock_function_factory(FunctionClass.PROJECT), + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + for _ in range(N_OF_PROJECT_FUNCTIONS) + ] + + solver_function = SolverFunction( + title="Solver Function", + description="A function that solves problems", + function_class=FunctionClass.SOLVER, + input_schema=JSONFunctionInputSchema(), + output_schema=JSONFunctionOutputSchema(), + default_inputs=None, + solver_key="simcore/services/comp/foo.bar-baz_/sub-dir_1/my-service1", + solver_version="0.0.0", + ) + registered_functions.extend( + [ + await functions_rpc.register_function( + rabbitmq_rpc_client=rpc_client, + function=solver_function, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + for _ in range(N_OF_SOLVER_FUNCTIONS) + ] ) - # Assert the list contains the correct number of functions - assert len(functions) == 1 - assert page_info.count == 1 - assert page_info.total == TOTAL_FUNCTIONS + for function_class in [FunctionClass.PROJECT, FunctionClass.SOLVER]: + # List functions with filters + functions, _ = await functions_rpc.list_functions( + rabbitmq_rpc_client=rpc_client, + user_id=logged_user["id"], + product_name=osparc_product_name, + filter_by_function_class=function_class, + pagination_limit=10, + pagination_offset=0, + ) + + # Assert the function is found + assert len(functions) == ( + N_OF_PROJECT_FUNCTIONS + if function_class == FunctionClass.PROJECT + else N_OF_SOLVER_FUNCTIONS + ) + assert all( + function.uid in [f.uid for f in registered_functions] + for function in functions + ) @pytest.mark.parametrize( @@ -333,15 +513,16 @@ async def test_list_functions_with_pagination( async def test_update_function_title( client: TestClient, rpc_client: RabbitMQRPCClient, - mock_function: ProjectFunction, + mock_function_factory: Callable[[FunctionClass], RegisteredFunction], logged_user: UserInfoDict, other_logged_user: UserInfoDict, osparc_product_name: ProductName, + add_user_function_api_access_rights: None, ): # Register the function first registered_function = await functions_rpc.register_function( rabbitmq_rpc_client=rpc_client, - function=mock_function, + function=mock_function_factory(FunctionClass.PROJECT), user_id=logged_user["id"], product_name=osparc_product_name, ) @@ -383,14 +564,15 @@ async def test_update_function_title( async def test_update_function_description( client: TestClient, rpc_client: RabbitMQRPCClient, - mock_function: ProjectFunction, + mock_function_factory: Callable[[FunctionClass], RegisteredFunction], logged_user: UserInfoDict, osparc_product_name: ProductName, + add_user_function_api_access_rights: None, ): # Register the function first registered_function = await functions_rpc.register_function( rabbitmq_rpc_client=rpc_client, - function=mock_function, + function=mock_function_factory(FunctionClass.PROJECT), user_id=logged_user["id"], product_name=osparc_product_name, ) @@ -420,14 +602,15 @@ async def test_update_function_description( async def test_get_function_input_schema( client: TestClient, rpc_client: RabbitMQRPCClient, - mock_function: ProjectFunction, + mock_function_factory: Callable[[FunctionClass], RegisteredFunction], logged_user: UserInfoDict, osparc_product_name: ProductName, + add_user_function_api_access_rights: None, ): # Register the function first registered_function = await functions_rpc.register_function( rabbitmq_rpc_client=rpc_client, - function=mock_function, + function=mock_function_factory(FunctionClass.PROJECT), user_id=logged_user["id"], product_name=osparc_product_name, ) @@ -452,14 +635,15 @@ async def test_get_function_input_schema( async def test_get_function_output_schema( client: TestClient, rpc_client: RabbitMQRPCClient, - mock_function: ProjectFunction, + mock_function_factory: Callable[[FunctionClass], RegisteredFunction], logged_user: UserInfoDict, osparc_product_name: ProductName, + add_user_function_api_access_rights: None, ): # Register the function first registered_function = await functions_rpc.register_function( rabbitmq_rpc_client=rpc_client, - function=mock_function, + function=mock_function_factory(FunctionClass.PROJECT), user_id=logged_user["id"], product_name=osparc_product_name, ) @@ -484,16 +668,55 @@ async def test_get_function_output_schema( async def test_delete_function( client: TestClient, rpc_client: RabbitMQRPCClient, - mock_function: ProjectFunction, + mock_function_factory: Callable[[FunctionClass], RegisteredFunction], logged_user: UserInfoDict, other_logged_user: UserInfoDict, osparc_product_name: ProductName, + add_user_function_api_access_rights: None, ): # Register the function first registered_function = await functions_rpc.register_function( rabbitmq_rpc_client=rpc_client, - function=mock_function, + function=mock_function_factory(FunctionClass.PROJECT), + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + assert registered_function.uid is not None + + +@pytest.mark.parametrize( + "user_role", + [UserRole.USER], +) +async def test_get_function_user_permissions( + client: TestClient, + add_user_function_api_access_rights: None, + rpc_client: RabbitMQRPCClient, + mock_function_factory: Callable[[FunctionClass], RegisteredFunction], + logged_user: UserInfoDict, + osparc_product_name: ProductName, +): + # Register the function first + registered_function = await functions_rpc.register_function( + rabbitmq_rpc_client=rpc_client, + function=mock_function_factory(FunctionClass.PROJECT), user_id=logged_user["id"], product_name=osparc_product_name, ) assert registered_function.uid is not None + + # Retrieve the user permissions for the function + user_permissions = await functions_rpc.get_function_user_permissions( + rabbitmq_rpc_client=rpc_client, + function_id=registered_function.uid, + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + # Assert the user permissions match the expected permissions + assert user_permissions == FunctionUserAccessRights( + user_id=logged_user["id"], + read=True, + write=True, + execute=True, + ) diff --git a/services/web/server/tests/unit/with_dbs/04/functions/test_functions_service.py b/services/web/server/tests/unit/with_dbs/04/functions/test_functions_service.py new file mode 100644 index 000000000000..ecd97d8d9b18 --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/04/functions/test_functions_service.py @@ -0,0 +1,104 @@ +# pylint: disable=unused-argument + +from collections.abc import Callable + +import pytest +from aiohttp.test_utils import TestClient +from common_library.users_enums import UserRole +from models_library.functions import ( + FunctionClass, + FunctionGroupAccessRights, + RegisteredFunction, +) +from models_library.functions_errors import FunctionReadAccessDeniedError +from models_library.products import ProductName +from pytest_simcore.helpers.webserver_users import UserInfoDict +from simcore_service_webserver.functions import _functions_service + +pytest_simcore_core_services_selection = ["rabbit"] + + +@pytest.mark.parametrize( + "user_role", + [UserRole.USER], +) +async def test_set_and_remove_group_permissions( + client: TestClient, + user_role: UserRole, + add_user_function_api_access_rights: None, + logged_user: UserInfoDict, + other_logged_user: UserInfoDict, + osparc_product_name: ProductName, + mock_function_factory: Callable[[FunctionClass], RegisteredFunction], + clean_functions: None, +) -> None: + # Register the function + registered_function = await _functions_service.register_function( + app=client.app, + function=mock_function_factory(FunctionClass.PROJECT), + user_id=logged_user["id"], + product_name=osparc_product_name, + ) + + # Test if registering user can access the function + returned_function = await _functions_service.get_function( + app=client.app, + user_id=logged_user["id"], + product_name=osparc_product_name, + function_id=registered_function.uid, + ) + assert returned_function.uid == registered_function.uid + + # Test if non-registering user cannot access the function + with pytest.raises(FunctionReadAccessDeniedError): + await _functions_service.get_function( + app=client.app, + user_id=other_logged_user["id"], + product_name=osparc_product_name, + function_id=registered_function.uid, + ) + + group_permissions = FunctionGroupAccessRights( + group_id=int(other_logged_user["primary_gid"]), + read=True, + write=True, + execute=False, + ) + + # Give non-registering user group access + updated_group_permissions = await _functions_service.set_function_group_permissions( + app=client.app, + user_id=logged_user["id"], + product_name=osparc_product_name, + function_id=registered_function.uid, + permissions=group_permissions, + ) + + assert updated_group_permissions == group_permissions + + # Test if non-registering user can access the function + returned_function = await _functions_service.get_function( + app=client.app, + user_id=other_logged_user["id"], + product_name=osparc_product_name, + function_id=registered_function.uid, + ) + assert returned_function.uid == registered_function.uid + + # Remove non-registering user group access + await _functions_service.remove_function_group_permissions( + app=client.app, + user_id=logged_user["id"], + product_name=osparc_product_name, + permission_group_id=int(other_logged_user["primary_gid"]), + function_id=registered_function.uid, + ) + + # Test if non-registering user cannot access the function + with pytest.raises(FunctionReadAccessDeniedError): + await _functions_service.get_function( + app=client.app, + user_id=other_logged_user["id"], + product_name=osparc_product_name, + function_id=registered_function.uid, + ) diff --git a/services/web/server/tests/unit/with_dbs/04/functions_rpc/conftest.py b/services/web/server/tests/unit/with_dbs/04/functions_rpc/conftest.py deleted file mode 100644 index 98dfa687c230..000000000000 --- a/services/web/server/tests/unit/with_dbs/04/functions_rpc/conftest.py +++ /dev/null @@ -1,141 +0,0 @@ -# pylint:disable=unused-variable -# pylint:disable=unused-argument -# pylint:disable=redefined-outer-name - - -from collections.abc import AsyncIterator, Awaitable, Callable -from uuid import uuid4 - -import pytest -from aiohttp.test_utils import TestClient -from models_library.api_schemas_webserver.functions import ( - Function, - JSONFunctionInputSchema, - JSONFunctionOutputSchema, - ProjectFunction, -) -from models_library.products import ProductName -from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict -from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.webserver_login import LoggedUser, UserInfoDict -from servicelib.rabbitmq import RabbitMQRPCClient -from servicelib.rabbitmq.rpc_interfaces.webserver.functions import ( - functions_rpc_interface as functions_rpc, -) -from settings_library.rabbit import RabbitSettings -from simcore_service_webserver.application_settings import ApplicationSettings - - -@pytest.fixture -def app_environment( - rabbit_service: RabbitSettings, - app_environment: EnvVarsDict, - monkeypatch: pytest.MonkeyPatch, -) -> EnvVarsDict: - new_envs = setenvs_from_dict( - monkeypatch, - { - **app_environment, - "RABBIT_HOST": rabbit_service.RABBIT_HOST, - "RABBIT_PORT": f"{rabbit_service.RABBIT_PORT}", - "RABBIT_USER": rabbit_service.RABBIT_USER, - "RABBIT_SECURE": f"{rabbit_service.RABBIT_SECURE}", - "RABBIT_PASSWORD": rabbit_service.RABBIT_PASSWORD.get_secret_value(), - "WEBSERVER_DEV_FEATURES_ENABLED": "1", - "WEBSERVER_FUNCTIONS": "1", - }, - ) - - settings = ApplicationSettings.create_from_envs() - assert settings.WEBSERVER_RABBITMQ - - return new_envs - - -@pytest.fixture -async def rpc_client( - rabbitmq_rpc_client: Callable[[str], Awaitable[RabbitMQRPCClient]], -) -> RabbitMQRPCClient: - return await rabbitmq_rpc_client("client") - - -@pytest.fixture -def mock_function() -> Function: - return ProjectFunction( - title="Test Function", - description="A test function", - input_schema=JSONFunctionInputSchema( - schema_content={ - "type": "object", - "properties": {"input1": {"type": "string"}}, - } - ), - output_schema=JSONFunctionOutputSchema( - schema_content={ - "type": "object", - "properties": {"output1": {"type": "string"}}, - } - ), - project_id=uuid4(), - default_inputs=None, - ) - - -@pytest.fixture -async def other_logged_user( - client: TestClient, rpc_client: RabbitMQRPCClient -) -> AsyncIterator[UserInfoDict]: - async with LoggedUser(client) as other_user: - yield other_user - - -@pytest.fixture -async def clean_functions( - client: TestClient, - rpc_client: RabbitMQRPCClient, - logged_user: UserInfoDict, - osparc_product_name: ProductName, -) -> None: - assert client.app - - functions, _ = await functions_rpc.list_functions( - rabbitmq_rpc_client=rpc_client, - pagination_limit=100, - pagination_offset=0, - user_id=logged_user["id"], - product_name=osparc_product_name, - ) - for function in functions: - assert function.uid is not None - await functions_rpc.delete_function( - rabbitmq_rpc_client=rpc_client, - function_id=function.uid, - user_id=logged_user["id"], - product_name=osparc_product_name, - ) - - -@pytest.fixture -async def clean_function_job_collections( - client: TestClient, - rpc_client: RabbitMQRPCClient, - logged_user: UserInfoDict, - osparc_product_name: ProductName, -) -> None: - assert client.app - - job_collections, _ = await functions_rpc.list_function_job_collections( - rabbitmq_rpc_client=rpc_client, - pagination_limit=100, - pagination_offset=0, - user_id=logged_user["id"], - product_name=osparc_product_name, - ) - for function_job_collection in job_collections: - assert function_job_collection.uid is not None - await functions_rpc.delete_function_job_collection( - rabbitmq_rpc_client=rpc_client, - function_job_collection_id=function_job_collection.uid, - user_id=logged_user["id"], - product_name=osparc_product_name, - ) diff --git a/services/web/server/tests/unit/with_dbs/04/functions_rpc/test_function_jobs_controller_rpc.py b/services/web/server/tests/unit/with_dbs/04/functions_rpc/test_function_jobs_controller_rpc.py deleted file mode 100644 index f7b6b16e2df5..000000000000 --- a/services/web/server/tests/unit/with_dbs/04/functions_rpc/test_function_jobs_controller_rpc.py +++ /dev/null @@ -1,353 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument - -import datetime -from uuid import uuid4 - -import pytest -from aiohttp.test_utils import TestClient -from common_library.users_enums import UserRole -from models_library.api_schemas_webserver.functions import ( - ProjectFunction, - ProjectFunctionJob, -) -from models_library.functions_errors import ( - FunctionJobIDNotFoundError, - FunctionJobReadAccessDeniedError, -) -from models_library.products import ProductName -from pytest_simcore.helpers.webserver_login import UserInfoDict -from servicelib.rabbitmq import RabbitMQRPCClient -from servicelib.rabbitmq.rpc_interfaces.webserver.functions import ( - functions_rpc_interface as functions_rpc, -) - -pytest_simcore_core_services_selection = ["rabbit"] - - -@pytest.mark.parametrize( - "user_role", - [UserRole.USER], -) -async def test_register_get_delete_function_job( - client: TestClient, - rpc_client: RabbitMQRPCClient, - mock_function: ProjectFunction, - logged_user: UserInfoDict, - other_logged_user: UserInfoDict, - osparc_product_name: ProductName, -): - # Register the function first - registered_function = await functions_rpc.register_function( - rabbitmq_rpc_client=rpc_client, - function=mock_function, - user_id=logged_user["id"], - product_name=osparc_product_name, - ) - assert registered_function.uid is not None - - function_job = ProjectFunctionJob( - function_uid=registered_function.uid, - title="Test Function Job", - description="A test function job", - project_job_id=uuid4(), - inputs={"input1": "value1"}, - outputs={"output1": "result1"}, - ) - - # Register the function job - registered_job = await functions_rpc.register_function_job( - rabbitmq_rpc_client=rpc_client, - function_job=function_job, - user_id=logged_user["id"], - product_name=osparc_product_name, - ) - - # Assert the registered job matches the input job - assert registered_job.function_uid == function_job.function_uid - assert registered_job.inputs == function_job.inputs - assert registered_job.outputs == function_job.outputs - assert registered_job.created_at - datetime.datetime.now( - datetime.UTC - ) < datetime.timedelta(seconds=60) - - # Retrieve the function job using its ID - retrieved_job = await functions_rpc.get_function_job( - rabbitmq_rpc_client=rpc_client, - function_job_id=registered_job.uid, - user_id=logged_user["id"], - product_name=osparc_product_name, - ) - - # Assert the retrieved job matches the registered job - assert retrieved_job.function_uid == registered_job.function_uid - assert retrieved_job.inputs == registered_job.inputs - assert retrieved_job.outputs == registered_job.outputs - - # Test denied access for another user - with pytest.raises(FunctionJobReadAccessDeniedError): - await functions_rpc.get_function_job( - rabbitmq_rpc_client=rpc_client, - function_job_id=registered_job.uid, - user_id=other_logged_user["id"], - product_name=osparc_product_name, - ) - - # Test denied access for anothe product - with pytest.raises(FunctionJobReadAccessDeniedError): - await functions_rpc.get_function_job( - rabbitmq_rpc_client=rpc_client, - function_job_id=registered_job.uid, - user_id=other_logged_user["id"], - product_name="this_is_not_osparc", - ) - - with pytest.raises(FunctionJobReadAccessDeniedError): - # Attempt to delete the function job by another user - await functions_rpc.delete_function_job( - rabbitmq_rpc_client=rpc_client, - function_job_id=registered_job.uid, - user_id=other_logged_user["id"], - product_name=osparc_product_name, - ) - - # Delete the function job using its ID - await functions_rpc.delete_function_job( - rabbitmq_rpc_client=rpc_client, - function_job_id=registered_job.uid, - user_id=logged_user["id"], - product_name=osparc_product_name, - ) - - # Attempt to retrieve the deleted job - with pytest.raises(FunctionJobIDNotFoundError): - await functions_rpc.get_function_job( - rabbitmq_rpc_client=rpc_client, - function_job_id=registered_job.uid, - user_id=logged_user["id"], - product_name=osparc_product_name, - ) - - -@pytest.mark.parametrize( - "user_role", - [UserRole.USER], -) -async def test_get_function_job_not_found( - client: TestClient, - rpc_client: RabbitMQRPCClient, - logged_user: UserInfoDict, - osparc_product_name: ProductName, - clean_functions: None, -): - # Attempt to retrieve a function job that does not exist - with pytest.raises(FunctionJobIDNotFoundError): - await functions_rpc.get_function_job( - rabbitmq_rpc_client=rpc_client, - function_job_id=uuid4(), - user_id=logged_user["id"], - product_name=osparc_product_name, - ) - - -@pytest.mark.parametrize( - "user_role", - [UserRole.USER], -) -async def test_list_function_jobs( - client: TestClient, - rpc_client: RabbitMQRPCClient, - mock_function: ProjectFunction, - logged_user: UserInfoDict, - osparc_product_name: ProductName, -): - # Register the function first - registered_function = await functions_rpc.register_function( - rabbitmq_rpc_client=rpc_client, - function=mock_function, - user_id=logged_user["id"], - product_name=osparc_product_name, - ) - assert registered_function.uid is not None - - function_job = ProjectFunctionJob( - function_uid=registered_function.uid, - title="Test Function Job", - description="A test function job", - project_job_id=uuid4(), - inputs={"input1": "value1"}, - outputs={"output1": "result1"}, - ) - - # Register the function job - registered_job = await functions_rpc.register_function_job( - rabbitmq_rpc_client=rpc_client, - function_job=function_job, - user_id=logged_user["id"], - product_name=osparc_product_name, - ) - - # List function jobs - jobs, _ = await functions_rpc.list_function_jobs( - rabbitmq_rpc_client=rpc_client, - pagination_limit=10, - pagination_offset=0, - user_id=logged_user["id"], - product_name=osparc_product_name, - ) - - # Assert the list contains the registered job - assert len(jobs) > 0 - assert any(j.uid == registered_job.uid for j in jobs) - - -@pytest.mark.parametrize( - "user_role", - [UserRole.USER], -) -async def test_list_function_jobs_for_functionid( - client: TestClient, - rpc_client: RabbitMQRPCClient, - mock_function: ProjectFunction, - logged_user: UserInfoDict, - osparc_product_name: ProductName, -): - # Register the function first - first_registered_function = await functions_rpc.register_function( - rabbitmq_rpc_client=rpc_client, - function=mock_function, - user_id=logged_user["id"], - product_name=osparc_product_name, - ) - second_registered_function = await functions_rpc.register_function( - rabbitmq_rpc_client=rpc_client, - function=mock_function, - user_id=logged_user["id"], - product_name=osparc_product_name, - ) - - first_registered_function_jobs = [] - second_registered_function_jobs = [] - for i_job in range(6): - if i_job < 3: - function_job = ProjectFunctionJob( - function_uid=first_registered_function.uid, - title="Test Function Job", - description="A test function job", - project_job_id=uuid4(), - inputs={"input1": "value1"}, - outputs={"output1": "result1"}, - ) - # Register the function job - first_registered_function_jobs.append( - await functions_rpc.register_function_job( - rabbitmq_rpc_client=rpc_client, - function_job=function_job, - user_id=logged_user["id"], - product_name=osparc_product_name, - ) - ) - else: - function_job = ProjectFunctionJob( - function_uid=second_registered_function.uid, - title="Test Function Job", - description="A test function job", - project_job_id=uuid4(), - inputs={"input1": "value1"}, - outputs={"output1": "result1"}, - ) - # Register the function job - second_registered_function_jobs.append( - await functions_rpc.register_function_job( - rabbitmq_rpc_client=rpc_client, - function_job=function_job, - user_id=logged_user["id"], - product_name=osparc_product_name, - ) - ) - - # List function jobs for a specific function ID - jobs, _ = await functions_rpc.list_function_jobs( - rabbitmq_rpc_client=rpc_client, - pagination_limit=10, - pagination_offset=0, - filter_by_function_id=first_registered_function.uid, - user_id=logged_user["id"], - product_name=osparc_product_name, - ) - - # Assert the list contains the registered job - assert len(jobs) > 0 - assert len(jobs) == 3 - assert all(j.function_uid == first_registered_function.uid for j in jobs) - - -@pytest.mark.parametrize( - "user_role", - [UserRole.USER], -) -async def test_find_cached_function_jobs( - client: TestClient, - rpc_client: RabbitMQRPCClient, - logged_user: UserInfoDict, - other_logged_user: UserInfoDict, - osparc_product_name: ProductName, - mock_function: ProjectFunction, - clean_functions: None, -): - - # Register the function first - registered_function = await functions_rpc.register_function( - rabbitmq_rpc_client=rpc_client, - function=mock_function, - user_id=logged_user["id"], - product_name=osparc_product_name, - ) - - registered_function_jobs = [] - for value in range(5): - function_job = ProjectFunctionJob( - function_uid=registered_function.uid, - title="Test Function Job", - description="A test function job", - project_job_id=uuid4(), - inputs={"input1": value if value < 4 else 1}, - outputs={"output1": "result1"}, - ) - - # Register the function job - registered_job = await functions_rpc.register_function_job( - rabbitmq_rpc_client=rpc_client, - function_job=function_job, - user_id=logged_user["id"], - product_name=osparc_product_name, - ) - registered_function_jobs.append(registered_job) - - # Find cached function jobs - cached_jobs = await functions_rpc.find_cached_function_jobs( - rabbitmq_rpc_client=rpc_client, - function_id=registered_function.uid, - inputs={"input1": 1}, - user_id=logged_user["id"], - product_name=osparc_product_name, - ) - - # Assert the cached jobs contain the registered job - assert cached_jobs is not None - assert len(cached_jobs) == 2 - assert {job.uid for job in cached_jobs} == { - registered_function_jobs[1].uid, - registered_function_jobs[4].uid, - } - - cached_jobs = await functions_rpc.find_cached_function_jobs( - rabbitmq_rpc_client=rpc_client, - function_id=registered_function.uid, - inputs={"input1": 1}, - user_id=other_logged_user["id"], - product_name=osparc_product_name, - ) - - # Assert the cached jobs does not contain the registered job for the other user - assert cached_jobs is None diff --git a/services/web/server/tests/unit/with_dbs/04/functions_rpc/test_functions_controller_rest.py b/services/web/server/tests/unit/with_dbs/04/functions_rpc/test_functions_controller_rest.py deleted file mode 100644 index 5f4650a3f1e2..000000000000 --- a/services/web/server/tests/unit/with_dbs/04/functions_rpc/test_functions_controller_rest.py +++ /dev/null @@ -1,118 +0,0 @@ -# pylint: disable=redefined-outer-name -# pylint: disable=unused-argument -# pylint: disable=unused-variable -# pylint: disable=too-many-arguments - - -from http import HTTPStatus -from typing import Any -from uuid import uuid4 - -import pytest -from aiohttp.test_utils import TestClient -from models_library.api_schemas_webserver.functions import ( - FunctionClass, - JSONFunctionInputSchema, - JSONFunctionOutputSchema, - RegisteredProjectFunctionGet, -) -from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import UserInfoDict -from servicelib.aiohttp import status -from simcore_service_webserver.db.models import UserRole - -pytest_simcore_core_services_selection = ["rabbit"] - - -@pytest.fixture -def mock_function() -> dict[str, Any]: - return { - "title": "Test Function", - "description": "A test function", - "inputSchema": JSONFunctionInputSchema( - schema_content={ - "type": "object", - "properties": {"input1": {"type": "string"}}, - }, - ).model_dump(mode="json"), - "outputSchema": JSONFunctionOutputSchema( - schema_content={ - "type": "object", - "properties": {"output1": {"type": "string"}}, - }, - ).model_dump(mode="json"), - "projectId": str(uuid4()), - "functionClass": FunctionClass.PROJECT, - "defaultInputs": None, - } - - -@pytest.mark.parametrize( - "user_role,expected_register,expected_get,expected_delete,expected_get2", - [ - ( - UserRole.USER, - status.HTTP_201_CREATED, - status.HTTP_200_OK, - status.HTTP_204_NO_CONTENT, - status.HTTP_404_NOT_FOUND, - ), - ( - UserRole.GUEST, - status.HTTP_403_FORBIDDEN, - status.HTTP_403_FORBIDDEN, - status.HTTP_403_FORBIDDEN, - status.HTTP_403_FORBIDDEN, - ), - ], -) -async def test_register_get_delete_function( - client: TestClient, - logged_user: UserInfoDict, - mock_function: dict[str, Any], - expected_register: HTTPStatus, - expected_get: HTTPStatus, - expected_delete: HTTPStatus, - expected_get2: HTTPStatus, -) -> None: - assert client.app - url = client.app.router["register_function"].url_for() - response = await client.post( - f"{url}", - json=mock_function, - ) - data, error = await assert_status(response, expected_status_code=expected_register) - - if error: - returned_function_uid = uuid4() - else: - returned_function = RegisteredProjectFunctionGet.model_validate(data) - assert returned_function.uid is not None - returned_function_uid = returned_function.uid - - url = client.app.router["get_function"].url_for( - function_id=str(returned_function_uid) - ) - response = await client.get( - f"{url}", - ) - data, error = await assert_status(response, expected_get) - if not error: - retrieved_function = RegisteredProjectFunctionGet.model_validate(data) - assert retrieved_function.uid == returned_function.uid - - url = client.app.router["delete_function"].url_for( - function_id=str(returned_function_uid) - ) - response = await client.delete( - f"{url}", - ) - data, error = await assert_status(response, expected_delete) - - url = client.app.router["get_function"].url_for( - function_id=str(returned_function_uid) - ) - response = await client.get( - f"{url}", - ) - data, error = await assert_status(response, expected_get2) diff --git a/services/web/server/tests/unit/with_dbs/04/garbage_collector/conftest.py b/services/web/server/tests/unit/with_dbs/04/garbage_collector/conftest.py new file mode 100644 index 000000000000..38c1ba5a3c15 --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/04/garbage_collector/conftest.py @@ -0,0 +1,143 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=too-many-positional-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable +import asyncio +from collections.abc import AsyncIterator, Awaitable, Callable + +import pytest +import sqlalchemy as sa +from aiohttp.test_utils import TestClient +from models_library.projects import ProjectID +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.typing_env import EnvVarsDict +from redis.asyncio import Redis +from servicelib.aiohttp import status +from servicelib.aiohttp.application import create_safe_application +from servicelib.aiohttp.application_setup import is_setup_completed +from simcore_service_webserver.application_settings import setup_settings +from simcore_service_webserver.db.plugin import setup_db +from simcore_service_webserver.director_v2.plugin import setup_director_v2 +from simcore_service_webserver.login.plugin import setup_login +from simcore_service_webserver.notifications.plugin import setup_notifications +from simcore_service_webserver.products.plugin import setup_products +from simcore_service_webserver.projects.plugin import setup_projects +from simcore_service_webserver.rabbitmq import setup_rabbitmq +from simcore_service_webserver.resource_manager.plugin import setup_resource_manager +from simcore_service_webserver.rest.plugin import setup_rest +from simcore_service_webserver.security.plugin import setup_security +from simcore_service_webserver.session.plugin import setup_session +from simcore_service_webserver.socketio.plugin import setup_socketio +from simcore_service_webserver.users.plugin import setup_users + + +@pytest.fixture(scope="session") +def fast_service_deletion_delay() -> int: + """ + Returns the delay in seconds for fast service deletion. + This is used to speed up tests that involve service deletion. + """ + return 1 + + +@pytest.fixture +def app_environment( + fast_service_deletion_delay: int, + monkeypatch: pytest.MonkeyPatch, + app_environment: EnvVarsDict, +) -> EnvVarsDict: + # NOTE: undos some app_environment settings + monkeypatch.delenv("WEBSERVER_GARBAGE_COLLECTOR", raising=False) + app_environment.pop("WEBSERVER_GARBAGE_COLLECTOR", None) + + return app_environment | setenvs_from_dict( + monkeypatch, + { + "WEBSERVER_COMPUTATION": "1", + "WEBSERVER_NOTIFICATIONS": "1", + # sets TTL of a resource after logout + "RESOURCE_MANAGER_RESOURCE_TTL_S": f"{fast_service_deletion_delay}", + "GARBAGE_COLLECTOR_INTERVAL_S": "30", + }, + ) + + +@pytest.fixture +async def client( + aiohttp_client: Callable, + app_environment: EnvVarsDict, + postgres_db: sa.engine.Engine, + mock_orphaned_services, + redis_client: Redis, + mock_dynamic_scheduler_rabbitmq: None, +) -> TestClient: + app = create_safe_application() + + assert "WEBSERVER_GARBAGE_COLLECTOR" not in app_environment + + settings = setup_settings(app) + assert settings.WEBSERVER_GARBAGE_COLLECTOR is not None + assert settings.WEBSERVER_PROJECTS is not None + + setup_db(app) + setup_session(app) + setup_security(app) + setup_rest(app) + setup_login(app) + setup_users(app) + setup_socketio(app) + assert setup_projects(app) + setup_director_v2(app) + assert setup_resource_manager(app) + setup_rabbitmq(app) + setup_notifications(app) + setup_products(app) + + assert is_setup_completed("simcore_service_webserver.resource_manager", app) + + # NOTE: garbage_collector is disabled and instead explicitly called using + # garbage_collectorgc_core.collect_garbage + assert not is_setup_completed("simcore_service_webserver.garbage_collector", app) + + return await aiohttp_client(app) + + +@pytest.fixture +async def close_project() -> Callable[[TestClient, ProjectID, str], Awaitable[None]]: + """Closes a project by sending a request to the close_project endpoint.""" + + async def _close_project( + client: TestClient, project_uuid: ProjectID, client_session_id: str + ) -> None: + url = client.app.router["close_project"].url_for(project_id=f"{project_uuid}") + resp = await client.post(url, json=client_session_id) + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + return _close_project + + +@pytest.fixture +async def open_project( + close_project: Callable[[TestClient, ProjectID, str], Awaitable[None]], +) -> AsyncIterator[Callable[[TestClient, ProjectID, str], Awaitable[None]]]: + _opened_projects: list[tuple[TestClient, ProjectID, str]] = [] + + async def _open_project( + client: TestClient, project_uuid: ProjectID, client_session_id: str + ) -> None: + url = client.app.router["open_project"].url_for(project_id=f"{project_uuid}") + resp = await client.post(url, json=client_session_id) + await assert_status(resp, status.HTTP_200_OK) + _opened_projects.append((client, project_uuid, client_session_id)) + + yield _open_project + # cleanup, if we cannot close that is because the user_role might not allow it + await asyncio.gather( + *( + close_project(client, project_uuid, client_session_id) + for client, project_uuid, client_session_id in _opened_projects + ), + return_exceptions=True, + ) diff --git a/services/web/server/tests/unit/with_dbs/04/garbage_collector/test_garbage_collector.py b/services/web/server/tests/unit/with_dbs/04/garbage_collector/test_garbage_collector.py new file mode 100644 index 000000000000..6a1bc9d96974 --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/04/garbage_collector/test_garbage_collector.py @@ -0,0 +1,494 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=too-many-positional-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + +import asyncio +from collections.abc import Awaitable, Callable +from typing import Any +from unittest import mock + +import pytest +import socketio +from aiohttp.test_utils import TestClient +from aioresponses import aioresponses +from common_library.users_enums import UserRole +from fastapi.encoders import jsonable_encoder +from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet +from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( + DynamicServiceStop, +) +from models_library.projects import ProjectAtDB, ProjectID +from pytest_mock import MockerFixture +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_parametrizations import MockedStorageSubsystem +from pytest_simcore.helpers.webserver_users import UserInfoDict +from servicelib.aiohttp import status +from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE +from simcore_service_webserver.garbage_collector import _core as gc_core +from simcore_service_webserver.socketio.messages import SOCKET_IO_PROJECT_UPDATED_EVENT +from tenacity import ( + AsyncRetrying, + retry_if_exception_type, + stop_after_delay, + wait_fixed, +) + +_TENACITY_ASSERT_RETRY = { + "reraise": True, + "retry": retry_if_exception_type(AssertionError), + "wait": wait_fixed(0.5), + "stop": stop_after_delay(30), +} + + +@pytest.mark.parametrize( + "user_role, expected_save_state", + [ + (UserRole.GUEST, False), + (UserRole.USER, True), + (UserRole.TESTER, True), + ], +) +async def test_interactive_services_removed_after_logout( + fast_service_deletion_delay: int, + client: TestClient, + logged_user: dict[str, Any], + create_project: Callable[..., Awaitable[ProjectAtDB]], + mocked_dynamic_services_interface: dict[str, mock.MagicMock], + create_dynamic_service_mock: Callable[..., Awaitable[DynamicServiceGet]], + create_socketio_connection: Callable[ + [str | None, TestClient | None], Awaitable[tuple[socketio.AsyncClient, str]] + ], + storage_subsystem_mock: MockedStorageSubsystem, # when guest user logs out garbage is collected + director_v2_service_mock: aioresponses, + expected_save_state: bool, + open_project: Callable, + mocked_notifications_plugin: dict[str, mock.Mock], +): + assert client.app + user_id = logged_user["id"] + empty_user_project = await create_project(logged_user) + service = await create_dynamic_service_mock( + user_id=user_id, project_id=empty_user_project.uuid + ) + # create websocket + sio, client_session_id1 = await create_socketio_connection(None, client) + assert sio + # open project in client 1 + await open_project(client, empty_user_project.uuid, client_session_id1) + # logout + logout_url = client.app.router["auth_logout"].url_for() + r = await client.post( + f"{logout_url}", json={"client_session_id": client_session_id1} + ) + assert r.url.path == logout_url.path + await assert_status(r, status.HTTP_200_OK) + + # check result perfomed by background task + await asyncio.sleep(fast_service_deletion_delay + 1) + await gc_core.collect_garbage(client.app) + + # assert dynamic service is removed *this is done in a fire/forget way so give a bit of leeway + async for attempt in AsyncRetrying(**_TENACITY_ASSERT_RETRY): + with attempt: + print( + f"--> Waiting for stop_dynamic_service with: {service.node_uuid}, {expected_save_state=}", + ) + mocked_dynamic_services_interface[ + "dynamic_scheduler.api.stop_dynamic_service" + ].assert_awaited_with( + app=client.app, + dynamic_service_stop=DynamicServiceStop( + user_id=user_id, + project_id=service.project_id, + node_id=service.node_uuid, + simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, + save_state=expected_save_state, + ), + progress=mock.ANY, + ) + + +@pytest.mark.parametrize( + "user_role, expected_save_state", + [ + (UserRole.GUEST, False), + (UserRole.USER, True), + (UserRole.TESTER, True), + ], +) +async def test_interactive_services_remain_after_websocket_reconnection_from_2_tabs( + fast_service_deletion_delay: int, + director_v2_service_mock: aioresponses, + client: TestClient, + logged_user: UserInfoDict, + create_project: Callable[..., Awaitable[ProjectAtDB]], + mocked_dynamic_services_interface, + create_dynamic_service_mock: Callable[..., Awaitable[DynamicServiceGet]], + create_socketio_connection: Callable[ + [str | None, TestClient | None], Awaitable[tuple[socketio.AsyncClient, str]] + ], + storage_subsystem_mock, # when guest user logs out garbage is collected + expected_save_state: bool, + mocker: MockerFixture, + open_project: Callable, + mocked_notifications_plugin: dict[str, mock.Mock], +): + assert client.app + user_id = logged_user["id"] + empty_user_project = await create_project(logged_user) + service = await create_dynamic_service_mock( + user_id=user_id, project_id=empty_user_project.uuid + ) + # create first websocket + sio, client_session_id1 = await create_socketio_connection(None, client) + # open project in client 1 + await open_project(client, empty_user_project.uuid, client_session_id1) + + # create second websocket + sio2, client_session_id2 = await create_socketio_connection(None, client) + assert sio.sid != sio2.sid + socket_project_state_update_mock_callable = mocker.Mock() + sio2.on( + SOCKET_IO_PROJECT_UPDATED_EVENT, + handler=socket_project_state_update_mock_callable, + ) + # disconnect first websocket + # NOTE: since the service deletion delay is set to 1 second for the test, we should not sleep as long here, or the user will be deleted + # We have no mock-up for the heatbeat... + await sio.disconnect() + assert not sio.sid + async for attempt in AsyncRetrying( + **(_TENACITY_ASSERT_RETRY | {"wait": wait_fixed(0.1)}) + ): + with attempt: + socket_project_state_update_mock_callable.assert_called_with( + jsonable_encoder( + { + "project_uuid": empty_user_project.uuid, + "data": { + "shareState": { + "locked": False, + "currentUserGroupids": [logged_user["primary_gid"]], + "status": "OPENED", + }, + "state": {"value": "NOT_STARTED"}, + }, + } + ) + ) + # open project in second client + await open_project(client, empty_user_project.uuid, client_session_id2) + # ensure sufficient time is wasted here + await asyncio.sleep(fast_service_deletion_delay + 1) + await gc_core.collect_garbage(client.app) + + # assert dynamic service is still around + mocked_dynamic_services_interface[ + "dynamic_scheduler.api.stop_dynamic_service" + ].assert_not_called() + # disconnect second websocket + await sio2.disconnect() + assert not sio2.sid + # assert dynamic service is still around for now + mocked_dynamic_services_interface[ + "dynamic_scheduler.api.stop_dynamic_service" + ].assert_not_called() + # reconnect websocket + sio2, same_client_session_id2 = await create_socketio_connection( + client_session_id2, client + ) + assert same_client_session_id2 == client_session_id2 + # it should still be there even after waiting for auto deletion from garbage collector + await asyncio.sleep(fast_service_deletion_delay + 1) + await gc_core.collect_garbage(client.app) + + mocked_dynamic_services_interface[ + "dynamic_scheduler.api.stop_dynamic_service" + ].assert_not_called() + # now really disconnect + await sio2.disconnect() + await sio2.wait() + assert not sio2.sid + # run the garbage collector + # event after waiting some time + await asyncio.sleep(fast_service_deletion_delay + 1) + await gc_core.collect_garbage(client.app) + + await asyncio.sleep(0) + # assert dynamic service is gone + calls = [ + mock.call( + app=client.app, + dynamic_service_stop=DynamicServiceStop( + user_id=user_id, + project_id=service.project_id, + simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, + save_state=expected_save_state, + node_id=service.node_uuid, + ), + progress=mock.ANY, + ) + ] + mocked_dynamic_services_interface[ + "dynamic_scheduler.api.stop_dynamic_service" + ].assert_has_calls(calls) + + +@pytest.mark.parametrize( + "user_role, expected_save_state", + [ + (UserRole.GUEST, False), + (UserRole.USER, True), + (UserRole.TESTER, True), + ], +) +async def test_interactive_services_removed_per_project( + fast_service_deletion_delay: int, + director_v2_service_mock: aioresponses, + client: TestClient, + logged_user: UserInfoDict, + create_project: Callable[..., Awaitable[ProjectAtDB]], + mocked_dynamic_services_interface, + create_dynamic_service_mock: Callable[..., Awaitable[DynamicServiceGet]], + create_socketio_connection: Callable[ + [str | None, TestClient | None], Awaitable[tuple[socketio.AsyncClient, str]] + ], + storage_subsystem_mock, # when guest user logs out garbage is collected + expected_save_state: bool, + open_project: Callable, + mocked_notifications_plugin: dict[str, mock.Mock], +): + user_id = logged_user["id"] + empty_user_project = await create_project(logged_user) + # create server with delay set to DELAY + service1 = await create_dynamic_service_mock( + user_id=user_id, project_id=empty_user_project.uuid + ) + empty_user_project2 = await create_project(logged_user) + service2 = await create_dynamic_service_mock( + user_id=user_id, project_id=empty_user_project2.uuid + ) + service3 = await create_dynamic_service_mock( + user_id=user_id, project_id=empty_user_project2.uuid + ) + # create websocket1 from tab1 + sio1, client_session_id1 = await create_socketio_connection(None, client) + await open_project(client, empty_user_project.uuid, client_session_id1) + # create websocket2 from tab2 + sio2, client_session_id2 = await create_socketio_connection(None, client) + await open_project(client, empty_user_project2.uuid, client_session_id2) + # disconnect websocket1 + await sio1.disconnect() + assert not sio1.sid + # assert dynamic service is still around + mocked_dynamic_services_interface[ + "dynamic_scheduler.api.stop_dynamic_service" + ].assert_not_called() + # wait the defined delay + await asyncio.sleep(fast_service_deletion_delay + 1) + await gc_core.collect_garbage(client.app) + # assert dynamic service 1 is removed + calls = [ + mock.call( + app=client.app, + dynamic_service_stop=DynamicServiceStop( + user_id=user_id, + project_id=service1.project_id, + node_id=service1.node_uuid, + simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, + save_state=expected_save_state, + ), + progress=mock.ANY, + ) + ] + mocked_dynamic_services_interface[ + "dynamic_scheduler.api.stop_dynamic_service" + ].assert_has_calls(calls) + mocked_dynamic_services_interface[ + "dynamic_scheduler.api.stop_dynamic_service" + ].reset_mock() + + # disconnect websocket2 + await sio2.disconnect() + assert not sio2.sid + # assert dynamic services are still around + mocked_dynamic_services_interface[ + "dynamic_scheduler.api.stop_dynamic_service" + ].assert_not_called() + # wait the defined delay + await asyncio.sleep(fast_service_deletion_delay + 1) + await gc_core.collect_garbage(client.app) + # assert dynamic service 2,3 is removed + calls = [ + mock.call( + app=client.server.app, + dynamic_service_stop=DynamicServiceStop( + user_id=user_id, + project_id=service2.project_id, + node_id=service2.node_uuid, + simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, + save_state=expected_save_state, + ), + progress=mock.ANY, + ), + mock.call( + app=client.server.app, + dynamic_service_stop=DynamicServiceStop( + user_id=user_id, + project_id=service3.project_id, + node_id=service3.node_uuid, + simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, + save_state=expected_save_state, + ), + progress=mock.ANY, + ), + ] + mocked_dynamic_services_interface[ + "dynamic_scheduler.api.stop_dynamic_service" + ].assert_has_calls(calls) + mocked_dynamic_services_interface[ + "dynamic_scheduler.api.stop_dynamic_service" + ].reset_mock() + + +@pytest.mark.xfail( + reason="it is currently not permitted to open the same project from 2 different tabs" +) +@pytest.mark.parametrize( + "user_role, expected_save_state", + [ + # (UserRole.ANONYMOUS), + # (UserRole.GUEST), + (UserRole.USER, True), + (UserRole.TESTER, True), + ], +) +async def test_services_remain_after_closing_one_out_of_two_tabs( + fast_service_deletion_delay: int, + director_v2_service_mock: aioresponses, + client: TestClient, + logged_user: UserInfoDict, + create_project: Callable[..., Awaitable[ProjectAtDB]], + mocked_dynamic_services_interface, + create_dynamic_service_mock: Callable[..., Awaitable[DynamicServiceGet]], + create_socketio_connection: Callable[ + [str | None, TestClient | None], Awaitable[tuple[socketio.AsyncClient, str]] + ], + expected_save_state: bool, + open_project: Callable[[TestClient, ProjectID, str], Awaitable[None]], + close_project: Callable[[TestClient, ProjectID, str], Awaitable[None]], +): + empty_user_project = await create_project(logged_user) + # create server with delay set to DELAY + service = await create_dynamic_service_mock( + user_id=logged_user["id"], project_id=empty_user_project.uuid + ) + # open project in tab1 + sio1, client_session_id1 = await create_socketio_connection(None, client) + assert sio1 + await open_project(client, empty_user_project.uuid, client_session_id1) + # open project in tab2 + sio2, client_session_id2 = await create_socketio_connection(None, client) + assert sio2 + await open_project(client, empty_user_project.uuid, client_session_id2) + # close project in tab1 + await close_project(client, empty_user_project.uuid, client_session_id1) + # wait the defined delay + await asyncio.sleep(fast_service_deletion_delay + 1) + await gc_core.collect_garbage(client.app) + # assert dynamic service is still around + mocked_dynamic_services_interface[ + "dynamic_scheduler.api.stop_dynamic_service" + ].assert_not_called() + # close project in tab2 + await close_project(client, empty_user_project.uuid, client_session_id2) + # wait the defined delay + await asyncio.sleep(fast_service_deletion_delay + 1) + await gc_core.collect_garbage(client.app) + mocked_dynamic_services_interface[ + "dynamic_scheduler.api.stop_dynamic_service" + ].assert_has_calls( + [mock.call(client.server.app, service.node_uuid, expected_save_state)] + ) + + +@pytest.mark.parametrize( + "user_role, expect_call, expected_save_state", + [ + (UserRole.USER, False, True), + (UserRole.TESTER, False, True), + (UserRole.GUEST, True, False), + ], +) +async def test_websocket_disconnected_remove_or_maintain_files_based_on_role( + fast_service_deletion_delay: int, + director_v2_service_mock: aioresponses, + client: TestClient, + logged_user: UserInfoDict, + create_project: Callable[..., Awaitable[ProjectAtDB]], + mocked_dynamic_services_interface, + create_dynamic_service_mock: Callable[..., Awaitable[DynamicServiceGet]], + create_socketio_connection: Callable[ + [str | None, TestClient | None], Awaitable[tuple[socketio.AsyncClient, str]] + ], + # asyncpg_storage_system_mock, + storage_subsystem_mock, # when guest user logs out garbage is collected + expect_call: bool, + expected_save_state: bool, + open_project: Callable[[TestClient, ProjectID, str], Awaitable[None]], + mocked_notifications_plugin: dict[str, mock.Mock], +): + user_id = logged_user["id"] + empty_user_project = await create_project(logged_user) + service = await create_dynamic_service_mock( + user_id=user_id, project_id=empty_user_project.uuid + ) + # create websocket + sio, client_session_id1 = await create_socketio_connection(None, client) + assert sio + # open project in client 1 + await open_project(client, empty_user_project.uuid, client_session_id1) + # logout + logout_url = client.app.router["auth_logout"].url_for() + r = await client.post(logout_url, json={"client_session_id": client_session_id1}) + assert r.url.path == logout_url.path + await assert_status(r, status.HTTP_200_OK) + + # ensure sufficient time is wasted here + await asyncio.sleep(fast_service_deletion_delay + 1) + await gc_core.collect_garbage(client.app) + + # assert dynamic service is removed + calls = [ + mock.call( + app=client.server.app, + dynamic_service_stop=DynamicServiceStop( + user_id=user_id, + project_id=service.project_id, + simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, + save_state=expected_save_state, + node_id=service.node_uuid, + ), + progress=mock.ANY, + ) + ] + mocked_dynamic_services_interface[ + "dynamic_scheduler.api.stop_dynamic_service" + ].assert_has_calls(calls) + + # this call is done async, so wait a bit here to ensure it is correctly done + async for attempt in AsyncRetrying(**_TENACITY_ASSERT_RETRY): + with attempt: + if expect_call: + # make sure `delete_project` is called + storage_subsystem_mock[1].assert_called_once() + # make sure `delete_user` is called + # asyncpg_storage_system_mock.assert_called_once() + else: + # make sure `delete_project` not called + storage_subsystem_mock[1].assert_not_called() + # make sure `delete_user` not called + # asyncpg_storage_system_mock.assert_not_called() diff --git a/services/web/server/tests/unit/with_dbs/04/garbage_collector/test_projects_document_service.py b/services/web/server/tests/unit/with_dbs/04/garbage_collector/test_projects_document_service.py new file mode 100644 index 000000000000..b5fd6e6570fe --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/04/garbage_collector/test_projects_document_service.py @@ -0,0 +1,267 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + +import uuid +from collections.abc import AsyncGenerator +from typing import Any + +import pytest +from aiohttp.test_utils import TestClient +from common_library.users_enums import UserRole +from models_library.api_schemas_webserver.socketio import SocketIORoomStr +from models_library.projects import ProjectID +from pytest_simcore.helpers.webserver_users import UserInfoDict +from servicelib.redis._project_document_version import ( + increment_and_return_project_document_version, +) +from simcore_service_webserver.projects import _project_document_service +from simcore_service_webserver.projects._project_document_service import ( + get_redis_document_manager_client_sdk, +) +from simcore_service_webserver.socketio._utils import get_socket_server + +pytest_simcore_core_services_selection = [ + "redis", +] + +pytest_simcore_ops_services_selection = [ + "redis-commander", +] + + +@pytest.fixture +def sample_project_uuids() -> list[ProjectID]: + """Generate sample project UUIDs for testing.""" + return [ + ProjectID(str(uuid.uuid4())), + ProjectID(str(uuid.uuid4())), + ProjectID(str(uuid.uuid4())), + ] + + +@pytest.fixture +def redis_document_client(client: TestClient) -> Any: + """Get Redis document manager client for testing.""" + return get_redis_document_manager_client_sdk(client.app) + + +@pytest.fixture +def socketio_server(client: TestClient) -> Any: + """Get SocketIO server instance for testing.""" + return get_socket_server(client.app) + + +@pytest.fixture +async def project_documents_setup( + redis_document_client: Any, sample_project_uuids: list[ProjectID] +) -> AsyncGenerator[list[str], None]: + """Setup project documents in Redis and cleanup after test.""" + test_keys = [] + + # Setup: Create project document versions in Redis + for project_id in sample_project_uuids: + key = f"projects:{project_id}:version" + test_keys.append(key) + # Create document versions (calling twice to increment to version 2) + await increment_and_return_project_document_version( + redis_client=redis_document_client, project_uuid=project_id + ) + await increment_and_return_project_document_version( + redis_client=redis_document_client, project_uuid=project_id + ) + + # Verify keys exist before returning + for key in test_keys: + assert await redis_document_client.redis.exists(key) == 1 + + yield test_keys + + # Cleanup: Remove test keys from Redis + for key in test_keys: + await redis_document_client.redis.delete(key) + + +@pytest.fixture +async def create_project_socketio_connections( + create_socketio_connection: Any, client: TestClient, socketio_server: Any +): + """Factory fixture to create SocketIO connections with automatic cleanup.""" + connections = [] + + async def _create_connections_for_projects( + project_uuids: list[ProjectID], connected_project_indices: list[int] + ) -> list[tuple[Any, str]]: + """Create SocketIO connections and connect specified projects to their rooms. + + Args: + project_uuids: List of project UUIDs + connected_project_indices: Indices of projects that should be connected to rooms + + Returns: + List of (sio_client, session_id) tuples + """ + created_connections = [] + + for i, project_id in enumerate(project_uuids): + sio_client, session_id = await create_socketio_connection(None, client) + created_connections.append((sio_client, session_id)) + + # Connect to project room if this project index is in the connected list + if i in connected_project_indices: + project_room = SocketIORoomStr.from_project_id(project_id) + await socketio_server.enter_room(sio_client.get_sid(), project_room) + + connections.extend(created_connections) + return created_connections + + return _create_connections_for_projects + + # Cleanup: Disconnect all SocketIO clients is done already in create_socket_io_connection + + +@pytest.mark.parametrize( + "user_role", + [ + UserRole.USER, + ], +) +async def test_remove_project_documents_as_admin_with_real_connections( + client: TestClient, + logged_user: UserInfoDict, + redis_document_client: Any, + sample_project_uuids: list[ProjectID], + project_documents_setup: list[str], + create_project_socketio_connections, +): + """Test removing project documents with real Redis and SocketIO connections. + + Test scenario: + - Project 0: Has SocketIO connection -> should be preserved + - Project 1: Has SocketIO connection -> should be preserved + - Project 2: No SocketIO connection -> should be removed + """ + # Create SocketIO connections - connect first two projects to their rooms + await create_project_socketio_connections( + project_uuids=sample_project_uuids, + connected_project_indices=[0, 1], # Connect projects 0 and 1 to rooms + ) + + # Execute the function being tested + await _project_document_service.remove_project_documents_as_admin(client.app) + + # Verify results: + # Projects 0 and 1 should still have their documents (users connected) + assert ( + await redis_document_client.redis.exists( + f"projects:{sample_project_uuids[0]}:version" + ) + == 1 + ), "Project 0 should be preserved because it has active SocketIO connection" + + assert ( + await redis_document_client.redis.exists( + f"projects:{sample_project_uuids[1]}:version" + ) + == 1 + ), "Project 1 should be preserved because it has active SocketIO connection" + + # Project 2 should have its document removed (no users connected) + assert ( + await redis_document_client.redis.exists( + f"projects:{sample_project_uuids[2]}:version" + ) + == 0 + ), "Project 2 should be removed because it has no active SocketIO connections" + + +@pytest.mark.parametrize( + "user_role", + [ + UserRole.USER, + ], +) +async def test_remove_project_documents_as_admin_with_known_opened_projects( + client: TestClient, + logged_user: UserInfoDict, + redis_document_client: Any, + sample_project_uuids: list[ProjectID], + project_documents_setup: list[str], + mocker, +): + """Test that project documents are NOT removed when projects are in known opened projects list. + + Test scenario: + - Projects 0 and 1: In known opened projects list -> should be preserved + - Project 2: Not in known opened projects and no connections -> should be removed + """ + # Mock list_opened_project_ids to return the first two projects as "known opened" + known_opened_projects = sample_project_uuids[:2] # First two projects are "opened" + mocker.patch( + "simcore_service_webserver.projects._project_document_service.list_opened_project_ids", + return_value=known_opened_projects, + ) + + # Execute the function being tested + await _project_document_service.remove_project_documents_as_admin(client.app) + + # Verify results: Projects 0 and 1 should be preserved, Project 2 should be removed + assert ( + await redis_document_client.redis.exists( + f"projects:{sample_project_uuids[0]}:version" + ) + == 1 + ), "Project 0 should be kept because it's in known opened projects" + + assert ( + await redis_document_client.redis.exists( + f"projects:{sample_project_uuids[1]}:version" + ) + == 1 + ), "Project 1 should be kept because it's in known opened projects" + + assert ( + await redis_document_client.redis.exists( + f"projects:{sample_project_uuids[2]}:version" + ) + == 0 + ), "Project 2 should be removed because it's not in known opened projects and has no socket connections" + + +@pytest.mark.parametrize( + "user_role", + [ + UserRole.USER, + ], +) +async def test_remove_project_documents_as_admin_mixed_state( + client: TestClient, + logged_user: UserInfoDict, + redis_document_client: Any, + sample_project_uuids: list[ProjectID], + create_project_socketio_connections, +): + """Test mixed state: some projects have documents, some have connections without documents.""" + # Setup: Create document only for first project + test_key = f"projects:{sample_project_uuids[0]}:version" + await increment_and_return_project_document_version( + redis_client=redis_document_client, project_uuid=sample_project_uuids[0] + ) + + # Create SocketIO connection for second project (no document) + await create_project_socketio_connections( + project_uuids=sample_project_uuids[1:2], # Only second project + connected_project_indices=[0], # Connect it to room + ) + + # Execute the function + await _project_document_service.remove_project_documents_as_admin(client.app) + + # Verify: First project document should be removed (no connections) + assert ( + await redis_document_client.redis.exists(test_key) == 0 + ), "Project 0 document should be removed (no active connections)" + + # Cleanup + await redis_document_client.redis.delete(test_key) diff --git a/services/web/server/tests/unit/with_dbs/04/garbage_collector/test_resource_manager.py b/services/web/server/tests/unit/with_dbs/04/garbage_collector/test_resource_manager.py index 2c9d421e4f40..629e1a80c689 100644 --- a/services/web/server/tests/unit/with_dbs/04/garbage_collector/test_resource_manager.py +++ b/services/web/server/tests/unit/with_dbs/04/garbage_collector/test_resource_manager.py @@ -5,161 +5,31 @@ # pylint: disable=unused-variable -import asyncio from asyncio import Future -from collections.abc import AsyncIterator, Awaitable, Callable -from pathlib import Path +from collections.abc import Awaitable, Callable from typing import Any from unittest import mock -from unittest.mock import call import pytest import socketio import socketio.exceptions -import sqlalchemy as sa from aiohttp.test_utils import TestClient -from aioresponses import aioresponses -from models_library.api_schemas_directorv2.dynamic_services import DynamicServiceGet -from models_library.api_schemas_dynamic_scheduler.dynamic_services import ( - DynamicServiceStop, -) -from models_library.utils.fastapi_encoders import jsonable_encoder from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict -from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.webserver_login import UserInfoDict -from pytest_simcore.helpers.webserver_parametrizations import MockedStorageSubsystem -from pytest_simcore.helpers.webserver_projects import NewProject -from redis.asyncio import Redis +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status -from servicelib.aiohttp.application import create_safe_application -from servicelib.aiohttp.application_setup import is_setup_completed -from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE from simcore_postgres_database.models.users import UserRole -from simcore_service_webserver.application_settings import setup_settings -from simcore_service_webserver.db.plugin import setup_db -from simcore_service_webserver.director_v2.plugin import setup_director_v2 -from simcore_service_webserver.garbage_collector import _core as gc_core -from simcore_service_webserver.login.plugin import setup_login -from simcore_service_webserver.notifications.plugin import setup_notifications -from simcore_service_webserver.products.plugin import setup_products -from simcore_service_webserver.projects._projects_service import ( - remove_project_dynamic_services, - submit_delete_project_task, -) -from simcore_service_webserver.projects.plugin import setup_projects -from simcore_service_webserver.rabbitmq import setup_rabbitmq -from simcore_service_webserver.resource_manager.plugin import setup_resource_manager from simcore_service_webserver.resource_manager.registry import ( RedisResourceRegistry, - UserSessionDict, + UserSession, get_registry, ) -from simcore_service_webserver.rest.plugin import setup_rest -from simcore_service_webserver.security.plugin import setup_security -from simcore_service_webserver.session.plugin import setup_session -from simcore_service_webserver.socketio.messages import SOCKET_IO_PROJECT_UPDATED_EVENT -from simcore_service_webserver.socketio.plugin import setup_socketio -from simcore_service_webserver.users.api import delete_user_without_projects -from simcore_service_webserver.users.exceptions import UserNotFoundError -from simcore_service_webserver.users.plugin import setup_users from tenacity.asyncio import AsyncRetrying from tenacity.retry import retry_if_exception_type from tenacity.stop import stop_after_delay from tenacity.wait import wait_fixed from yarl import URL -SERVICE_DELETION_DELAY = 1 - - -async def close_project(client, project_uuid: str, client_session_id: str) -> None: - url = client.app.router["close_project"].url_for(project_id=project_uuid) - resp = await client.post(url, json=client_session_id) - await assert_status(resp, status.HTTP_204_NO_CONTENT) - - -@pytest.fixture -async def open_project() -> AsyncIterator[Callable[..., Awaitable[None]]]: - _opened_projects = [] - - async def _open_project(client, project_uuid: str, client_session_id: str) -> None: - url = client.app.router["open_project"].url_for(project_id=project_uuid) - resp = await client.post(url, json=client_session_id) - await assert_status(resp, status.HTTP_200_OK) - _opened_projects.append((client, project_uuid, client_session_id)) - - yield _open_project - # cleanup, if we cannot close that is because the user_role might not allow it - await asyncio.gather( - *( - close_project(client, project_uuid, client_session_id) - for client, project_uuid, client_session_id in _opened_projects - ), - return_exceptions=True, - ) - - -@pytest.fixture -def app_environment( - monkeypatch: pytest.MonkeyPatch, - app_environment: EnvVarsDict, -) -> EnvVarsDict: - # NOTE: undos some app_environment settings - monkeypatch.delenv("WEBSERVER_GARBAGE_COLLECTOR", raising=False) - app_environment.pop("WEBSERVER_GARBAGE_COLLECTOR", None) - - return app_environment | setenvs_from_dict( - monkeypatch, - { - "WEBSERVER_COMPUTATION": "1", - "WEBSERVER_NOTIFICATIONS": "1", - # sets TTL of a resource after logout - "RESOURCE_MANAGER_RESOURCE_TTL_S": f"{SERVICE_DELETION_DELAY}", - "GARBAGE_COLLECTOR_INTERVAL_S": "30", - }, - ) - - -@pytest.fixture -async def client( - aiohttp_client: Callable, - app_environment: EnvVarsDict, - postgres_db: sa.engine.Engine, - mock_orphaned_services, - redis_client: Redis, - mock_dynamic_scheduler_rabbitmq: None, -) -> TestClient: - app = create_safe_application() - - assert "WEBSERVER_GARBAGE_COLLECTOR" not in app_environment - - settings = setup_settings(app) - assert settings.WEBSERVER_GARBAGE_COLLECTOR is not None - assert settings.WEBSERVER_PROJECTS is not None - - setup_db(app) - setup_session(app) - setup_security(app) - setup_rest(app) - setup_login(app) - setup_users(app) - setup_socketio(app) - assert setup_projects(app) - setup_director_v2(app) - assert setup_resource_manager(app) - setup_rabbitmq(app) - setup_notifications(app) - setup_products(app) - - assert is_setup_completed("simcore_service_webserver.resource_manager", app) - - # NOTE: garbage_collector is disabled and instead explicitly called using - # garbage_collectorgc_core.collect_garbage - assert not is_setup_completed("simcore_service_webserver.garbage_collector", app) - - return await aiohttp_client(app) - @pytest.fixture def mock_storage_delete_data_folders(mocker: MockerFixture) -> mock.Mock: @@ -179,48 +49,6 @@ def socket_registry(client: TestClient) -> RedisResourceRegistry: return get_registry(app) -@pytest.fixture -async def empty_user_project( - client, - empty_project, - logged_user, - tests_data_dir: Path, - osparc_product_name: str, -) -> AsyncIterator[dict[str, Any]]: - project = empty_project() - async with NewProject( - project, - client.app, - user_id=logged_user["id"], - tests_data_dir=tests_data_dir, - product_name=osparc_product_name, - ) as project: - print("-----> added project", project["name"]) - yield project - print("<----- removed project", project["name"]) - - -@pytest.fixture -async def empty_user_project2( - client, - empty_project, - logged_user, - tests_data_dir: Path, - osparc_product_name: str, -) -> AsyncIterator[dict[str, Any]]: - project = empty_project() - async with NewProject( - project, - client.app, - user_id=logged_user["id"], - tests_data_dir=tests_data_dir, - product_name=osparc_product_name, - ) as project: - print("-----> added project", project["name"]) - yield project - print("<----- removed project", project["name"]) - - async def test_anonymous_websocket_connection( client_session_id_factory: Callable[[], str], socketio_url_factory: Callable, @@ -261,16 +89,17 @@ async def test_anonymous_websocket_connection( ], ) async def test_websocket_resource_management( - logged_user, + logged_user: UserInfoDict, + client: TestClient, socket_registry: RedisResourceRegistry, - socketio_client_factory: Callable, - client_session_id_factory: Callable[[], str], + create_socketio_connection: Callable[ + [str | None, TestClient | None], Awaitable[tuple[socketio.AsyncClient, str]] + ], ): - cur_client_session_id = client_session_id_factory() - sio = await socketio_client_factory(cur_client_session_id) + sio, cur_client_session_id = await create_socketio_connection(None, client) sid = sio.get_sid() - resource_key = UserSessionDict( - user_id=f"{logged_user['id']}", client_session_id=cur_client_session_id + resource_key = UserSession( + user_id=logged_user["id"], client_session_id=cur_client_session_id ) assert await socket_registry.find_keys(("socket_id", sio.get_sid())) == [ @@ -307,20 +136,21 @@ async def test_websocket_resource_management( ) async def test_websocket_multiple_connections( socket_registry: RedisResourceRegistry, - logged_user, - socketio_client_factory: Callable, - client_session_id_factory: Callable[[], str], + logged_user: UserInfoDict, + client: TestClient, + create_socketio_connection: Callable[ + [str | None, TestClient | None], Awaitable[tuple[socketio.AsyncClient, str]] + ], ): NUMBER_OF_SOCKETS = 5 - resource_keys: list[UserSessionDict] = [] + resource_keys: list[UserSession] = [] # connect multiple clients clients = [] for socket_count in range(1, NUMBER_OF_SOCKETS + 1): - cur_client_session_id = client_session_id_factory() - sio = await socketio_client_factory(cur_client_session_id) - resource_key = UserSessionDict( - user_id=f"{logged_user['id']}", client_session_id=cur_client_session_id + sio, cur_client_session_id = await create_socketio_connection(None, client) + resource_key = UserSession( + user_id=logged_user["id"], client_session_id=cur_client_session_id ) assert await socket_registry.find_keys(("socket_id", sio.get_sid())) == [ resource_key @@ -331,7 +161,7 @@ async def test_websocket_multiple_connections( assert ( len( await socket_registry.find_resources( - {"user_id": str(logged_user["id"]), "client_session_id": "*"}, + UserSession(user_id=logged_user["id"], client_session_id="*"), "socket_id", ) ) @@ -378,9 +208,11 @@ async def test_websocket_multiple_connections( async def test_asyncio_task_pending_on_close( client: TestClient, logged_user: dict[str, Any], - socketio_client_factory: Callable, + create_socketio_connection: Callable[ + [str | None, TestClient | None], Awaitable[tuple[socketio.AsyncClient, str]] + ], ): - sio = await socketio_client_factory() + sio, *_ = await create_socketio_connection(None, client) assert sio # this test generates warnings on its own @@ -397,8 +229,9 @@ async def test_asyncio_task_pending_on_close( async def test_websocket_disconnected_after_logout( client: TestClient, logged_user: dict[str, Any], - socketio_client_factory: Callable, - client_session_id_factory: Callable[[], str], + create_socketio_connection: Callable[ + [str | None, TestClient | None], Awaitable[tuple[socketio.AsyncClient, str]] + ], expected, mocker: MockerFixture, ): @@ -408,20 +241,17 @@ async def test_websocket_disconnected_after_logout( assert socket_registry # connect first socket - cur_client_session_id1 = client_session_id_factory() - sio = await socketio_client_factory(cur_client_session_id1) + sio, *_ = await create_socketio_connection(None, client) socket_logout_mock_callable = mocker.Mock() sio.on("logout", handler=socket_logout_mock_callable) # connect second socket - cur_client_session_id2 = client_session_id_factory() - sio2 = await socketio_client_factory(cur_client_session_id2) + sio2, cur_client_session_id2 = await create_socketio_connection(None, client) socket_logout_mock_callable2 = mocker.Mock() sio2.on("logout", handler=socket_logout_mock_callable2) # connect third socket - cur_client_session_id3 = client_session_id_factory() - sio3 = await socketio_client_factory(cur_client_session_id3) + sio3, *_ = await create_socketio_connection(None, client) socket_logout_mock_callable3 = mocker.Mock() sio3.on("logout", handler=socket_logout_mock_callable3) @@ -450,198 +280,6 @@ async def test_websocket_disconnected_after_logout( assert not sio3.sid -@pytest.mark.parametrize( - "user_role, expected_save_state", - [ - (UserRole.GUEST, False), - (UserRole.USER, True), - (UserRole.TESTER, True), - ], -) -async def test_interactive_services_removed_after_logout( - client: TestClient, - logged_user: dict[str, Any], - empty_user_project: dict[str, Any], - mocked_dynamic_services_interface: dict[str, mock.MagicMock], - create_dynamic_service_mock: Callable[..., Awaitable[DynamicServiceGet]], - client_session_id_factory: Callable[[], str], - socketio_client_factory: Callable, - storage_subsystem_mock: MockedStorageSubsystem, # when guest user logs out garbage is collected - director_v2_service_mock: aioresponses, - expected_save_state: bool, - open_project: Callable, - mocked_notifications_plugin: dict[str, mock.Mock], -): - assert client.app - user_id = logged_user["id"] - service = await create_dynamic_service_mock( - user_id=user_id, project_id=empty_user_project["uuid"] - ) - # create websocket - client_session_id1 = client_session_id_factory() - sio = await socketio_client_factory(client_session_id1) - assert sio - # open project in client 1 - await open_project(client, empty_user_project["uuid"], client_session_id1) - # logout - logout_url = client.app.router["auth_logout"].url_for() - r = await client.post( - f"{logout_url}", json={"client_session_id": client_session_id1} - ) - assert r.url.path == logout_url.path - await assert_status(r, status.HTTP_200_OK) - - # check result perfomed by background task - await asyncio.sleep(SERVICE_DELETION_DELAY + 1) - await gc_core.collect_garbage(client.app) - - # assert dynamic service is removed *this is done in a fire/forget way so give a bit of leeway - async for attempt in AsyncRetrying(**_TENACITY_ASSERT_RETRY): - with attempt: - print( - f"--> Waiting for stop_dynamic_service with: {service.node_uuid}, {expected_save_state=}", - ) - mocked_dynamic_services_interface[ - "dynamic_scheduler.api.stop_dynamic_service" - ].assert_awaited_with( - app=client.app, - dynamic_service_stop=DynamicServiceStop( - user_id=user_id, - project_id=service.project_id, - node_id=service.node_uuid, - simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, - save_state=expected_save_state, - ), - progress=mock.ANY, - ) - - -@pytest.mark.parametrize( - "user_role, expected_save_state", - [ - (UserRole.GUEST, False), - (UserRole.USER, True), - (UserRole.TESTER, True), - ], -) -async def test_interactive_services_remain_after_websocket_reconnection_from_2_tabs( - director_v2_service_mock: aioresponses, - client: TestClient, - logged_user: UserInfoDict, - empty_user_project, - mocked_dynamic_services_interface, - create_dynamic_service_mock: Callable[..., Awaitable[DynamicServiceGet]], - socketio_client_factory: Callable, - client_session_id_factory: Callable[[], str], - storage_subsystem_mock, # when guest user logs out garbage is collected - expected_save_state: bool, - mocker: MockerFixture, - open_project: Callable, - mocked_notifications_plugin: dict[str, mock.Mock], -): - assert client.app - user_id = logged_user["id"] - service = await create_dynamic_service_mock( - user_id=user_id, project_id=empty_user_project["uuid"] - ) - # create first websocket - client_session_id1 = client_session_id_factory() - sio = await socketio_client_factory(client_session_id1) - # open project in client 1 - await open_project(client, empty_user_project["uuid"], client_session_id1) - - # create second websocket - client_session_id2 = client_session_id_factory() - sio2 = await socketio_client_factory(client_session_id2) - assert sio.sid != sio2.sid - socket_project_state_update_mock_callable = mocker.Mock() - sio2.on( - SOCKET_IO_PROJECT_UPDATED_EVENT, - handler=socket_project_state_update_mock_callable, - ) - # disconnect first websocket - # NOTE: since the service deletion delay is set to 1 second for the test, we should not sleep as long here, or the user will be deleted - # We have no mock-up for the heatbeat... - await sio.disconnect() - assert not sio.sid - async for attempt in AsyncRetrying( - **(_TENACITY_ASSERT_RETRY | {"wait": wait_fixed(0.1)}) - ): - with attempt: - socket_project_state_update_mock_callable.assert_called_with( - jsonable_encoder( - { - "project_uuid": empty_user_project["uuid"], - "data": { - "locked": { - "value": False, - "owner": { - "user_id": user_id, - "first_name": logged_user.get("first_name", None), - "last_name": logged_user.get("last_name", None), - }, - "status": "OPENED", - }, - "state": {"value": "NOT_STARTED"}, - }, - } - ) - ) - # open project in second client - await open_project(client, empty_user_project["uuid"], client_session_id2) - # ensure sufficient time is wasted here - await asyncio.sleep(SERVICE_DELETION_DELAY + 1) - await gc_core.collect_garbage(client.app) - - # assert dynamic service is still around - mocked_dynamic_services_interface[ - "dynamic_scheduler.api.stop_dynamic_service" - ].assert_not_called() - # disconnect second websocket - await sio2.disconnect() - assert not sio2.sid - # assert dynamic service is still around for now - mocked_dynamic_services_interface[ - "dynamic_scheduler.api.stop_dynamic_service" - ].assert_not_called() - # reconnect websocket - sio2 = await socketio_client_factory(client_session_id2) - # it should still be there even after waiting for auto deletion from garbage collector - await asyncio.sleep(SERVICE_DELETION_DELAY + 1) - await gc_core.collect_garbage(client.app) - - mocked_dynamic_services_interface[ - "dynamic_scheduler.api.stop_dynamic_service" - ].assert_not_called() - # now really disconnect - await sio2.disconnect() - await sio2.wait() - assert not sio2.sid - # run the garbage collector - # event after waiting some time - await asyncio.sleep(SERVICE_DELETION_DELAY + 1) - await gc_core.collect_garbage(client.app) - - await asyncio.sleep(0) - # assert dynamic service is gone - calls = [ - call( - app=client.app, - dynamic_service_stop=DynamicServiceStop( - user_id=user_id, - project_id=service.project_id, - simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, - save_state=expected_save_state, - node_id=service.node_uuid, - ), - progress=mock.ANY, - ) - ] - mocked_dynamic_services_interface[ - "dynamic_scheduler.api.stop_dynamic_service" - ].assert_has_calls(calls) - - @pytest.fixture async def mocked_notification_system(mocker): mocks = {} @@ -652,301 +290,3 @@ async def mocked_notification_system(mocker): mocked_notification_system.return_value.set_result("") mocks["mocked_notification_system"] = mocked_notification_system return mocks - - -@pytest.mark.parametrize( - "user_role, expected_save_state", - [ - (UserRole.GUEST, False), - (UserRole.USER, True), - (UserRole.TESTER, True), - ], -) -async def test_interactive_services_removed_per_project( - director_v2_service_mock: aioresponses, - client, - logged_user, - empty_user_project, - empty_user_project2, - mocked_dynamic_services_interface, - create_dynamic_service_mock: Callable[..., Awaitable[DynamicServiceGet]], - mocked_notification_system, - socketio_client_factory: Callable, - client_session_id_factory: Callable[[], str], - asyncpg_storage_system_mock, - storage_subsystem_mock, # when guest user logs out garbage is collected - expected_save_state: bool, - open_project: Callable, - mocked_notifications_plugin: dict[str, mock.Mock], -): - user_id = logged_user["id"] - # create server with delay set to DELAY - service1 = await create_dynamic_service_mock( - user_id=user_id, project_id=empty_user_project["uuid"] - ) - service2 = await create_dynamic_service_mock( - user_id=user_id, project_id=empty_user_project2["uuid"] - ) - service3 = await create_dynamic_service_mock( - user_id=user_id, project_id=empty_user_project2["uuid"] - ) - # create websocket1 from tab1 - client_session_id1 = client_session_id_factory() - sio1 = await socketio_client_factory(client_session_id1) - await open_project(client, empty_user_project["uuid"], client_session_id1) - # create websocket2 from tab2 - client_session_id2 = client_session_id_factory() - sio2 = await socketio_client_factory(client_session_id2) - await open_project(client, empty_user_project2["uuid"], client_session_id2) - # disconnect websocket1 - await sio1.disconnect() - assert not sio1.sid - # assert dynamic service is still around - mocked_dynamic_services_interface[ - "dynamic_scheduler.api.stop_dynamic_service" - ].assert_not_called() - # wait the defined delay - await asyncio.sleep(SERVICE_DELETION_DELAY + 1) - await gc_core.collect_garbage(client.app) - # assert dynamic service 1 is removed - calls = [ - call( - app=client.app, - dynamic_service_stop=DynamicServiceStop( - user_id=user_id, - project_id=service1.project_id, - node_id=service1.node_uuid, - simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, - save_state=expected_save_state, - ), - progress=mock.ANY, - ) - ] - mocked_dynamic_services_interface[ - "dynamic_scheduler.api.stop_dynamic_service" - ].assert_has_calls(calls) - mocked_dynamic_services_interface[ - "dynamic_scheduler.api.stop_dynamic_service" - ].reset_mock() - - # disconnect websocket2 - await sio2.disconnect() - assert not sio2.sid - # assert dynamic services are still around - mocked_dynamic_services_interface[ - "dynamic_scheduler.api.stop_dynamic_service" - ].assert_not_called() - # wait the defined delay - await asyncio.sleep(SERVICE_DELETION_DELAY + 1) - await gc_core.collect_garbage(client.app) - # assert dynamic service 2,3 is removed - calls = [ - call( - app=client.server.app, - dynamic_service_stop=DynamicServiceStop( - user_id=user_id, - project_id=service2.project_id, - node_id=service2.node_uuid, - simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, - save_state=expected_save_state, - ), - progress=mock.ANY, - ), - call( - app=client.server.app, - dynamic_service_stop=DynamicServiceStop( - user_id=user_id, - project_id=service3.project_id, - node_id=service3.node_uuid, - simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, - save_state=expected_save_state, - ), - progress=mock.ANY, - ), - ] - mocked_dynamic_services_interface[ - "dynamic_scheduler.api.stop_dynamic_service" - ].assert_has_calls(calls) - mocked_dynamic_services_interface[ - "dynamic_scheduler.api.stop_dynamic_service" - ].reset_mock() - - -@pytest.mark.xfail( - reason="it is currently not permitted to open the same project from 2 different tabs" -) -@pytest.mark.parametrize( - "user_role, expected_save_state", - [ - # (UserRole.ANONYMOUS), - # (UserRole.GUEST), - (UserRole.USER, True), - (UserRole.TESTER, True), - ], -) -async def test_services_remain_after_closing_one_out_of_two_tabs( - director_v2_service_mock: aioresponses, - client, - logged_user, - empty_user_project, - empty_user_project2, - mocked_dynamic_services_interface, - create_dynamic_service_mock: Callable[..., Awaitable[DynamicServiceGet]], - socketio_client_factory: Callable, - client_session_id_factory: Callable[[], str], - expected_save_state: bool, - open_project: Callable, -): - # create server with delay set to DELAY - service = await create_dynamic_service_mock( - user_id=logged_user["id"], project_id=empty_user_project["uuid"] - ) - # open project in tab1 - client_session_id1 = client_session_id_factory() - sio1 = await socketio_client_factory(client_session_id1) - assert sio1 - await open_project(client, empty_user_project["uuid"], client_session_id1) - # open project in tab2 - client_session_id2 = client_session_id_factory() - sio2 = await socketio_client_factory(client_session_id2) - assert sio2 - await open_project(client, empty_user_project["uuid"], client_session_id2) - # close project in tab1 - await close_project(client, empty_user_project["uuid"], client_session_id1) - # wait the defined delay - await asyncio.sleep(SERVICE_DELETION_DELAY + 1) - await gc_core.collect_garbage(client.app) - # assert dynamic service is still around - mocked_dynamic_services_interface[ - "dynamic_scheduler.api.stop_dynamic_service" - ].assert_not_called() - # close project in tab2 - await close_project(client, empty_user_project["uuid"], client_session_id2) - # wait the defined delay - await asyncio.sleep(SERVICE_DELETION_DELAY + 1) - await gc_core.collect_garbage(client.app) - mocked_dynamic_services_interface[ - "dynamic_scheduler.api.stop_dynamic_service" - ].assert_has_calls( - [call(client.server.app, service.node_uuid, expected_save_state)] - ) - - -@pytest.mark.parametrize( - "user_role, expect_call, expected_save_state", - [ - (UserRole.USER, False, True), - (UserRole.TESTER, False, True), - (UserRole.GUEST, True, False), - ], -) -async def test_websocket_disconnected_remove_or_maintain_files_based_on_role( - director_v2_service_mock: aioresponses, - client, - logged_user, - empty_user_project, - mocked_dynamic_services_interface, - create_dynamic_service_mock: Callable[..., Awaitable[DynamicServiceGet]], - client_session_id_factory: Callable[[], str], - socketio_client_factory: Callable, - # asyncpg_storage_system_mock, - storage_subsystem_mock, # when guest user logs out garbage is collected - expect_call: bool, - expected_save_state: bool, - open_project: Callable, - mocked_notifications_plugin: dict[str, mock.Mock], -): - user_id = logged_user["id"] - service = await create_dynamic_service_mock( - user_id=user_id, project_id=empty_user_project["uuid"] - ) - # create websocket - client_session_id1 = client_session_id_factory() - sio: socketio.AsyncClient = await socketio_client_factory(client_session_id1) - assert sio - # open project in client 1 - await open_project(client, empty_user_project["uuid"], client_session_id1) - # logout - logout_url = client.app.router["auth_logout"].url_for() - r = await client.post(logout_url, json={"client_session_id": client_session_id1}) - assert r.url.path == logout_url.path - await assert_status(r, status.HTTP_200_OK) - - # ensure sufficient time is wasted here - await asyncio.sleep(SERVICE_DELETION_DELAY + 1) - await gc_core.collect_garbage(client.app) - - # assert dynamic service is removed - calls = [ - call( - app=client.server.app, - dynamic_service_stop=DynamicServiceStop( - user_id=user_id, - project_id=service.project_id, - simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, - save_state=expected_save_state, - node_id=service.node_uuid, - ), - progress=mock.ANY, - ) - ] - mocked_dynamic_services_interface[ - "dynamic_scheduler.api.stop_dynamic_service" - ].assert_has_calls(calls) - - # this call is done async, so wait a bit here to ensure it is correctly done - async for attempt in AsyncRetrying(**_TENACITY_ASSERT_RETRY): - with attempt: - if expect_call: - # make sure `delete_project` is called - storage_subsystem_mock[1].assert_called_once() - # make sure `delete_user` is called - # asyncpg_storage_system_mock.assert_called_once() - else: - # make sure `delete_project` not called - storage_subsystem_mock[1].assert_not_called() - # make sure `delete_user` not called - # asyncpg_storage_system_mock.assert_not_called() - - -@pytest.mark.parametrize("user_role", [UserRole.USER, UserRole.TESTER, UserRole.GUEST]) -async def test_regression_removing_unexisting_user( - director_v2_service_mock: aioresponses, - client: TestClient, - logged_user: dict[str, Any], - empty_user_project: dict[str, Any], - user_role: UserRole, - mock_storage_delete_data_folders: mock.Mock, -) -> None: - # regression test for https://github.com/ITISFoundation/osparc-simcore/issues/2504 - assert client.app - # remove project - user_id = logged_user["id"] - delete_task = await submit_delete_project_task( - app=client.app, - project_uuid=empty_user_project["uuid"], - user_id=user_id, - simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, - ) - await delete_task - # remove user - await delete_user_without_projects(app=client.app, user_id=user_id) - - with pytest.raises(UserNotFoundError): - await remove_project_dynamic_services( - user_id=user_id, - project_uuid=empty_user_project["uuid"], - app=client.app, - simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, - ) - await remove_project_dynamic_services( - user_id=user_id, - project_uuid=empty_user_project["uuid"], - app=client.app, - user_name={"first_name": "my name is", "last_name": "pytest"}, - simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, - ) - # since the call to delete is happening as fire and forget task, let's wait until it is done - async for attempt in AsyncRetrying(**_TENACITY_ASSERT_RETRY): - with attempt: - mock_storage_delete_data_folders.assert_called() diff --git a/services/web/server/tests/unit/with_dbs/01/test_resource_manager_user_sessions.py b/services/web/server/tests/unit/with_dbs/04/garbage_collector/test_resource_manager_user_sessions.py similarity index 59% rename from services/web/server/tests/unit/with_dbs/01/test_resource_manager_user_sessions.py rename to services/web/server/tests/unit/with_dbs/04/garbage_collector/test_resource_manager_user_sessions.py index a82b2e5fd3f5..e6f9435cf1da 100644 --- a/services/web/server/tests/unit/with_dbs/01/test_resource_manager_user_sessions.py +++ b/services/web/server/tests/unit/with_dbs/04/garbage_collector/test_resource_manager_user_sessions.py @@ -3,7 +3,7 @@ # pylint: disable=too-many-arguments # pylint: disable=unused-argument # pylint: disable=unused-variable -import time +import asyncio from collections.abc import Callable from random import randint from uuid import uuid4 @@ -12,20 +12,24 @@ import redis.asyncio as aioredis from aiohttp import web from faker import Faker +from models_library.users import UserID from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from servicelib.aiohttp.application import create_safe_application from servicelib.aiohttp.application_setup import is_setup_completed from simcore_service_webserver.application_settings import setup_settings +from simcore_service_webserver.resource_manager.models import ( + ALIVE_SUFFIX, + RESOURCE_SUFFIX, + RedisHashKey, + UserSession, +) from simcore_service_webserver.resource_manager.plugin import setup_resource_manager from simcore_service_webserver.resource_manager.registry import ( - _ALIVE_SUFFIX, - _RESOURCE_SUFFIX, RedisResourceRegistry, get_registry, ) from simcore_service_webserver.resource_manager.settings import get_plugin_settings from simcore_service_webserver.resource_manager.user_sessions import ( - UserSessionID, managed_resource, ) from tenacity import AsyncRetrying, stop_after_delay, wait_fixed @@ -78,128 +82,162 @@ def redis_registry(redis_enabled_app: web.Application) -> RedisResourceRegistry: @pytest.fixture -def create_user_ids(): - def _do(number: int) -> list[int]: - return list(range(number)) +def create_user_ids(faker: Faker) -> Callable[[int], list[UserID]]: + def _do(number: int) -> list[UserID]: + unique_ids = set() + while len(unique_ids) < number: + unique_ids.add(faker.pyint(min_value=1)) + return list(unique_ids) return _do @pytest.mark.parametrize( - "key, hash_key", + "user_session, key", [ - ({"some_key": "some_value"}, "some_key=some_value"), ( - {"some_key": "some_value", "another_key": "another_value"}, - "some_key=some_value:another_key=another_value", + UserSession(user_id=456, client_session_id="some_value"), + "user_id=456:client_session_id=some_value", + ), + ( + UserSession(user_id=123, client_session_id="*"), + "user_id=123:client_session_id=*", ), ], ) -async def test_redis_registry_hashes(redis_enabled_app: web.Application, key, hash_key): - # pylint: disable=protected-access - assert RedisResourceRegistry._hash_key(key) == hash_key - assert ( - RedisResourceRegistry._decode_hash_key(f"{hash_key}:{_RESOURCE_SUFFIX}") == key - ) - assert RedisResourceRegistry._decode_hash_key(f"{hash_key}:{_ALIVE_SUFFIX}") == key +async def test_redis_registry_hashes( + redis_enabled_app: web.Application, user_session: UserSession, key: RedisHashKey +): + assert user_session.to_redis_hash_key() == key + assert UserSession.from_redis_hash_key(key) == user_session + assert UserSession.from_redis_hash_key(f"{key}:{RESOURCE_SUFFIX}") == user_session + assert UserSession.from_redis_hash_key(f"{key}:{RESOURCE_SUFFIX}") == user_session + assert UserSession.from_redis_hash_key(f"{key}:{ALIVE_SUFFIX}") == user_session + + +@pytest.fixture +def create_user_session(faker: Faker) -> Callable[[], UserSession]: + def _() -> UserSession: + return UserSession( + user_id=faker.pyint(), + client_session_id=faker.uuid4(), + ) + return _ -async def test_redis_registry(redis_registry: RedisResourceRegistry): - random_value = randint(1, 10) - key = {f"key_{x}": f"value_{x}" for x in range(random_value)} - second_key = {f"sec_key_{x}": f"sec_value_{x}" for x in range(random_value)} - invalid_key = {"invalid_key": "invalid_value"} + +async def test_redis_registry( + redis_registry: RedisResourceRegistry, + create_user_session: Callable[[], UserSession], +): + user_session = create_user_session() + user_session2 = create_user_session() + + invalid_user_session = create_user_session() NUM_RESOURCES = 7 resources = [(f"res_key{x}", f"res_value{x}") for x in range(NUM_RESOURCES)] invalid_resource = ("invalid_res_key", "invalid_res_value") # create resources for res in resources: - await redis_registry.set_resource(key, res) - assert len(await redis_registry.get_resources(key)) == resources.index(res) + 1 + await redis_registry.set_resource(user_session, res) + assert ( + len(await redis_registry.get_resources(user_session)) + == resources.index(res) + 1 + ) # get them - assert await redis_registry.get_resources(key) == {x[0]: x[1] for x in resources} - assert not await redis_registry.get_resources(invalid_key) + assert await redis_registry.get_resources(user_session) == { + x[0]: x[1] for x in resources + } + assert not await redis_registry.get_resources(invalid_user_session) # find them for res in resources: - assert await redis_registry.find_resources(key, res[0]) == [res[1]] - assert not await redis_registry.find_resources(invalid_key, res[0]) - assert not await redis_registry.find_resources(key, invalid_resource[0]) - assert await redis_registry.find_keys(res) == [key] + assert await redis_registry.find_resources(user_session, res[0]) == [res[1]] + assert not await redis_registry.find_resources(invalid_user_session, res[0]) + assert not await redis_registry.find_resources( + user_session, invalid_resource[0] + ) + assert await redis_registry.find_keys(res) == [user_session] assert not await redis_registry.find_keys(invalid_resource) # add second key for res in resources: - await redis_registry.set_resource(second_key, res) + await redis_registry.set_resource(user_session2, res) assert ( - len(await redis_registry.get_resources(second_key)) + len(await redis_registry.get_resources(user_session2)) == resources.index(res) + 1 ) # find them for res in resources: - assert await redis_registry.find_resources(key, res[0]) == [res[1]] - assert not await redis_registry.find_resources(invalid_key, res[0]) - assert not await redis_registry.find_resources(key, invalid_resource[0]) - assert not await redis_registry.find_resources(second_key, invalid_resource[0]) + assert await redis_registry.find_resources(user_session, res[0]) == [res[1]] + assert not await redis_registry.find_resources(invalid_user_session, res[0]) + assert not await redis_registry.find_resources( + user_session, invalid_resource[0] + ) + assert not await redis_registry.find_resources( + user_session2, invalid_resource[0] + ) found_keys = await redis_registry.find_keys(res) - assert all(x in found_keys for x in [key, second_key]) - assert all(x in [key, second_key] for x in found_keys) + assert all(x in found_keys for x in [user_session, user_session2]) + assert all(x in [user_session, user_session2] for x in found_keys) assert not await redis_registry.find_keys(invalid_resource) DEAD_KEY_TIMEOUT = 1 STILL_ALIVE_KEY_TIMEOUT = DEAD_KEY_TIMEOUT + 1 # create a key which will be alive when testing - await redis_registry.set_key_alive(key, STILL_ALIVE_KEY_TIMEOUT) - assert await redis_registry.is_key_alive(key) is True + await redis_registry.set_key_alive( + user_session, expiration_time=STILL_ALIVE_KEY_TIMEOUT + ) + assert await redis_registry.is_key_alive(user_session) is True # create soon to be dead key - await redis_registry.set_key_alive(second_key, DEAD_KEY_TIMEOUT) + await redis_registry.set_key_alive(user_session2, expiration_time=DEAD_KEY_TIMEOUT) alive_keys, dead_keys = await redis_registry.get_all_resource_keys() assert not dead_keys - assert all(x in alive_keys for x in [key, second_key]) - assert all(x in [key, second_key] for x in alive_keys) + assert all(x in alive_keys for x in [user_session, user_session2]) + assert all(x in [user_session, user_session2] for x in alive_keys) - time.sleep(DEAD_KEY_TIMEOUT) + await asyncio.sleep(DEAD_KEY_TIMEOUT) - assert await redis_registry.is_key_alive(second_key) is False + assert await redis_registry.is_key_alive(user_session2) is False alive_keys, dead_keys = await redis_registry.get_all_resource_keys() - assert alive_keys == [key] - assert dead_keys == [second_key] + assert alive_keys == [user_session] + assert dead_keys == [user_session2] # clean up - await redis_registry.remove_key(key) - assert await redis_registry.is_key_alive(key) is False + await redis_registry.remove_key(user_session) + assert await redis_registry.is_key_alive(user_session) is False for res in resources: - assert await redis_registry.find_keys(res) == [second_key] - await redis_registry.remove_resource(second_key, res[0]) - assert len(await redis_registry.get_resources(second_key)) == len(resources) - ( - resources.index(res) + 1 - ) + assert await redis_registry.find_keys(res) == [user_session2] + await redis_registry.remove_resource(user_session2, res[0]) + assert len(await redis_registry.get_resources(user_session2)) == len( + resources + ) - (resources.index(res) + 1) async def test_redis_registry_key_will_always_expire( redis_registry: RedisResourceRegistry, + create_user_session: Callable[[], UserSession], ): def get_random_int(): - return randint(1, 10) + return randint(1, 10) # noqa: S311 - first_key = {f"key_{x}": f"value_{x}" for x in range(get_random_int())} - second_key = {f"sec_key_{x}": f"sec_value_{x}" for x in range(get_random_int())} + first_key = create_user_session() + second_key = create_user_session() resources = [(f"res_key{x}", f"res_value{x}") for x in range(get_random_int())] for resource in resources: await redis_registry.set_resource(first_key, resource) await redis_registry.set_resource(second_key, resource) - await redis_registry.set_key_alive(first_key, 0) - await redis_registry.set_key_alive(second_key, -3000) + await redis_registry.set_key_alive(first_key, expiration_time=0) + await redis_registry.set_key_alive(second_key, expiration_time=-3000) async for attempt in AsyncRetrying( wait=wait_fixed(0.1), stop=stop_after_delay(5), reraise=True, ): - with attempt: print( f"checking redis registry for keys alive, [attempt {attempt.retry_state.attempt_number}]..." @@ -215,7 +253,7 @@ def get_random_int(): async def test_users_sessions_resources_registry( redis_enabled_app: web.Application, redis_registry: RedisResourceRegistry, - create_user_ids: Callable, + create_user_ids: Callable[[int], list[UserID]], ): # create some user ids and socket ids NUM_USER_IDS = 5 @@ -236,11 +274,10 @@ async def test_users_sessions_resources_registry( tabs[socket_id] = client_session_id with managed_resource(user_id, client_session_id, redis_enabled_app) as rt: - user_session_key = { - "user_id": f"{user_id}", - "client_session_id": client_session_id, - } - assert rt._resource_key() == user_session_key + user_session_key = UserSession( + user_id=user_id, client_session_id=client_session_id + ) + assert rt.resource_key == user_session_key # set the socket id and check it is rightfully there await rt.set_socket_id(socket_id) @@ -263,18 +300,12 @@ async def test_users_sessions_resources_registry( # resource key shall be filled assert await rt.find(res_key) == [res_value] - list_of_same_resource_users: list[ - UserSessionID - ] = await rt.find_users_of_resource( + list_of_same_resource_users = await rt.find_users_of_resource( redis_enabled_app, res_key, res_value ) - assert list_user_ids[: (list_user_ids.index(user_id) + 1)] == sorted( - { - user_session.user_id - for user_session in list_of_same_resource_users - } - ) - + assert set(list_user_ids[: (list_user_ids.index(user_id) + 1)]) == { + user_session.user_id for user_session in list_of_same_resource_users + } # remove sockets (<=> disconnecting user sessions) for user_id in list_user_ids: user = f"user id {user_id}" diff --git a/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_checkouts_rest.py b/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_checkouts_rest.py index 1a6a81e76f4c..0ca8dbbada04 100644 --- a/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_checkouts_rest.py +++ b/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_checkouts_rest.py @@ -16,7 +16,7 @@ ) from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from simcore_service_webserver.db.models import UserRole diff --git a/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_purchases_rest.py b/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_purchases_rest.py index 7abbd37b2961..4bb0c1e3fd09 100644 --- a/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_purchases_rest.py +++ b/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_purchases_rest.py @@ -16,7 +16,7 @@ ) from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from simcore_service_webserver.db.models import UserRole diff --git a/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_repository.py b/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_repository.py index 01df2519fe68..c8d7fb9ef80c 100644 --- a/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_repository.py +++ b/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_repository.py @@ -14,7 +14,7 @@ LicensedResourceType, ) from models_library.rest_ordering import OrderBy -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from simcore_postgres_database.models.licensed_item_to_resource import ( licensed_item_to_resource, ) diff --git a/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_rest.py b/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_rest.py index 914187e5d4c1..4586c106a4c5 100644 --- a/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_rest.py +++ b/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_items_rest.py @@ -23,7 +23,7 @@ from models_library.licenses import VIP_DETAILS_EXAMPLE, LicensedResourceType from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.licensed_item_to_resource import ( licensed_item_to_resource, diff --git a/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_resources_repository.py b/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_resources_repository.py index 22069d929149..2614189b139c 100644 --- a/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_resources_repository.py +++ b/services/web/server/tests/unit/with_dbs/04/licenses/test_licensed_resources_repository.py @@ -11,7 +11,7 @@ LicensedResourcePatchDB, LicensedResourceType, ) -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from simcore_service_webserver.db.models import UserRole from simcore_service_webserver.licenses import _licensed_resources_repository from simcore_service_webserver.projects.models import ProjectDict diff --git a/services/web/server/tests/unit/with_dbs/04/licenses/test_licenses_rpc.py b/services/web/server/tests/unit/with_dbs/04/licenses/test_licenses_rpc.py index 65bb4ae66c46..ceb4487c2daf 100644 --- a/services/web/server/tests/unit/with_dbs/04/licenses/test_licenses_rpc.py +++ b/services/web/server/tests/unit/with_dbs/04/licenses/test_licenses_rpc.py @@ -16,7 +16,7 @@ from pytest_mock import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.rabbitmq import RabbitMQRPCClient from servicelib.rabbitmq.rpc_interfaces.webserver.licenses.licensed_items import ( checkout_licensed_item_for_wallet, diff --git a/services/web/server/tests/unit/with_dbs/04/notifications/test_notifications__db_comp_tasks_listening_task.py b/services/web/server/tests/unit/with_dbs/04/notifications/test_notifications__db_comp_tasks_listening_task.py index 1d787e86b937..9c360f74ae72 100644 --- a/services/web/server/tests/unit/with_dbs/04/notifications/test_notifications__db_comp_tasks_listening_task.py +++ b/services/web/server/tests/unit/with_dbs/04/notifications/test_notifications__db_comp_tasks_listening_task.py @@ -5,11 +5,14 @@ # pylint:disable=too-many-arguments # pylint:disable=protected-access +import asyncio import json import logging import secrets from collections.abc import AsyncIterator, Awaitable, Callable from dataclasses import dataclass +from datetime import timedelta +from pathlib import Path from typing import Any from unittest import mock @@ -18,11 +21,15 @@ import simcore_service_webserver.db_listener import simcore_service_webserver.db_listener._db_comp_tasks_listening_task from aiohttp.test_utils import TestClient +from aioresponses import aioresponses as AioResponsesMock +from common_library.async_tools import delayed_start from faker import Faker from models_library.projects import ProjectAtDB +from models_library.projects_nodes import InputsDict from pytest_mock import MockType from pytest_mock.plugin import MockerFixture -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.logging_tools import log_context +from pytest_simcore.helpers.webserver_users import UserInfoDict from simcore_postgres_database.models.comp_pipeline import StateType from simcore_postgres_database.models.comp_tasks import NodeClass, comp_tasks from simcore_postgres_database.models.users import UserRole @@ -30,6 +37,7 @@ create_comp_tasks_listening_task, ) from sqlalchemy.ext.asyncio import AsyncEngine +from tenacity import stop_after_attempt from tenacity.asyncio import AsyncRetrying from tenacity.before_sleep import before_sleep_log from tenacity.retry import retry_if_exception_type @@ -168,20 +176,20 @@ async def test_db_listener_triggers_on_event_with_multiple_tasks( mock_project_subsystem: dict[str, mock.Mock], spied_get_changed_comp_task_row: MockType, logged_user: UserInfoDict, - project: Callable[..., Awaitable[ProjectAtDB]], - pipeline: Callable[..., dict[str, Any]], - comp_task: Callable[..., dict[str, Any]], + create_project: Callable[..., Awaitable[ProjectAtDB]], + create_pipeline: Callable[..., Awaitable[dict[str, Any]]], + create_comp_task: Callable[..., Awaitable[dict[str, Any]]], with_started_listening_task: None, params: _CompTaskChangeParams, task_class: NodeClass, faker: Faker, mocker: MockerFixture, ): - some_project = await project(logged_user) - pipeline(project_id=f"{some_project.uuid}") + some_project = await create_project(logged_user) + await create_pipeline(project_id=f"{some_project.uuid}") # Create 3 tasks with different node_ids tasks = [ - comp_task( + await create_comp_task( project_id=f"{some_project.uuid}", node_id=faker.uuid4(), outputs=json.dumps({}), @@ -208,3 +216,169 @@ async def test_db_listener_triggers_on_event_with_multiple_tasks( ), f"_get_changed_comp_task_row was not called with task_id={updated_task_id}. Calls: {spied_get_changed_comp_task_row.call_args_list}" else: spied_get_changed_comp_task_row.assert_not_called() + + +@pytest.fixture +def fake_2connected_jupyterlabs_workbench(tests_data_dir: Path) -> dict[str, Any]: + fpath = tests_data_dir / "workbench_2connected_jupyterlabs.json" + assert fpath.exists() + return json.loads(fpath.read_text()) + + +@pytest.fixture +async def mock_dynamic_service_rpc( + mocker: MockerFixture, +) -> mock.AsyncMock: + """ + Mocks the dynamic service RPC calls to avoid actual service calls during tests. + """ + import servicelib.rabbitmq.rpc_interfaces.dynamic_scheduler.services + + return mocker.patch.object( + servicelib.rabbitmq.rpc_interfaces.dynamic_scheduler.services, + "retrieve_inputs", + autospec=True, + ) + + +async def _check_for_stability( + function: Callable[..., Awaitable[None]], *args, **kwargs +) -> None: + async for attempt in AsyncRetrying( + stop=stop_after_attempt(5), + wait=wait_fixed(1), + retry=retry_if_exception_type(), + reraise=True, + ): + with attempt: # noqa: SIM117 + with log_context( + logging.INFO, + msg=f"check stability of {function.__name__} {attempt.retry_state.retry_object.statistics}", + ) as log_ctx: + await function(*args, **kwargs) + log_ctx.logger.info( + "stable for %s...", attempt.retry_state.seconds_since_start + ) + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test_db_listener_upgrades_projects_row_correctly( + with_started_listening_task: None, + director_v2_service_mock: AioResponsesMock, + mocked_dynamic_services_interface: dict[str, mock.MagicMock], + mock_dynamic_service_rpc: mock.AsyncMock, + sqlalchemy_async_engine: AsyncEngine, + logged_user: UserInfoDict, + create_project: Callable[..., Awaitable[ProjectAtDB]], + fake_2connected_jupyterlabs_workbench: dict[str, Any], + create_pipeline: Callable[..., Awaitable[dict[str, Any]]], + create_comp_task: Callable[..., Awaitable[dict[str, Any]]], + spied_get_changed_comp_task_row: MockType, + faker: Faker, +): + some_project = await create_project( + logged_user, workbench=fake_2connected_jupyterlabs_workbench + ) + + # create the corresponding comp_task entries for the project workbench + await create_pipeline(project_id=f"{some_project.uuid}") + tasks = [ + await create_comp_task( + project_id=f"{some_project.uuid}", + node_id=node_id, + outputs=node_data.get("outputs", {}), + node_class=( + NodeClass.INTERACTIVE + if "dynamic" in node_data["key"] + else NodeClass.COMPUTATIONAL + ), + inputs=node_data.get("inputs", InputsDict()), + ) + for node_id, node_data in fake_2connected_jupyterlabs_workbench.items() + ] + assert len(tasks) == 2, "Expected two tasks for the two JupyterLab nodes" + first_jupyter_task = tasks[0] + second_jupyter_task = tasks[1] + assert ( + len(second_jupyter_task["inputs"]) > 0 + ), "Expected inputs for the second JupyterLab task" + number_of_inputs_linked = len(second_jupyter_task["inputs"]) + + # simulate a concurrent change in all the outputs of first jupyterlab + async def _update_first_jupyter_task_output( + port_index: int, data: dict[str, Any] + ) -> None: + with log_context(logging.INFO, msg=f"Updating output {port_index + 1}"): + async with sqlalchemy_async_engine.begin() as conn: + result = await conn.execute( + comp_tasks.select() + .with_only_columns(comp_tasks.c.outputs) + .where(comp_tasks.c.task_id == first_jupyter_task["task_id"]) + .with_for_update() + ) + row = result.first() + current_outputs = row[0] if row and row[0] else {} + + # Update/add the new key while preserving existing keys + current_outputs[f"output_{port_index + 1}"] = data + + # Write back the updated outputs + await conn.execute( + comp_tasks.update() + .values(outputs=current_outputs) + .where(comp_tasks.c.task_id == first_jupyter_task["task_id"]) + ) + + @delayed_start(timedelta(seconds=2)) + async def _change_outputs_sequentially(sleep: float) -> None: + """ + Sequentially updates the outputs of the second JupyterLab task to trigger the dynamic service RPC. + """ + for i in range(number_of_inputs_linked): + await _update_first_jupyter_task_output(i, {"data": i}) + await asyncio.sleep(sleep) + + # this runs in a task + sequential_task = asyncio.create_task(_change_outputs_sequentially(5)) + assert sequential_task is not None, "Failed to create the sequential task" + + async def _check_retrieve_rpc_called(expected_ports_retrieved: int) -> None: + async for attempt in AsyncRetrying( + stop=stop_after_delay(60), + wait=wait_fixed(1), + retry=retry_if_exception_type(AssertionError), + reraise=True, + ): + with attempt: # noqa: SIM117 + with log_context( + logging.INFO, + msg=f"Checking if dynamic service retrieve RPC was called and " + f"all expected ports were retrieved {expected_ports_retrieved} " + f"times, {attempt.retry_state.retry_object.statistics}", + ) as log_ctx: + if mock_dynamic_service_rpc.call_count > 0: + log_ctx.logger.info( + "call arguments: %s", + mock_dynamic_service_rpc.call_args_list, + ) + # Assert that the dynamic service RPC was called + assert ( + mock_dynamic_service_rpc.call_count > 0 + ), "Dynamic service retrieve RPC was not called" + # now get we check which ports were retrieved, we expect all of them + all_ports = set() + for call in mock_dynamic_service_rpc.call_args_list: + retrieved_ports = call[1]["port_keys"] + all_ports.update(retrieved_ports) + assert len(all_ports) == expected_ports_retrieved, ( + f"Expected {expected_ports_retrieved} ports to be retrieved, " + f"but got {len(all_ports)}: {all_ports}" + ) + log_ctx.logger.info( + "Dynamic service retrieve RPC was called with all expected ports!" + ) + + await _check_for_stability(_check_retrieve_rpc_called, number_of_inputs_linked) + await asyncio.wait_for(sequential_task, timeout=60) + assert sequential_task.done(), "Sequential task did not complete" + assert not sequential_task.cancelled(), "Sequential task was cancelled unexpectedly" diff --git a/services/web/server/tests/unit/with_dbs/04/products/test_products_rest.py b/services/web/server/tests/unit/with_dbs/04/products/test_products_rest.py index f9a047ef50eb..35d228cfda86 100644 --- a/services/web/server/tests/unit/with_dbs/04/products/test_products_rest.py +++ b/services/web/server/tests/unit/with_dbs/04/products/test_products_rest.py @@ -13,7 +13,7 @@ from models_library.api_schemas_webserver.products import ProductGet, ProductUIGet from models_library.products import ProductName from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from servicelib.rest_constants import X_PRODUCT_NAME_HEADER from servicelib.status_codes_utils import is_2xx_success @@ -51,7 +51,7 @@ async def test_get_product_price_when_undefined( (UserRole.USER, status.HTTP_403_FORBIDDEN), (UserRole.TESTER, status.HTTP_403_FORBIDDEN), (UserRole.PRODUCT_OWNER, status.HTTP_200_OK), - (UserRole.ADMIN, status.HTTP_403_FORBIDDEN), + (UserRole.ADMIN, status.HTTP_200_OK), ], ) async def test_get_product_access_rights( @@ -135,7 +135,7 @@ async def test_get_current_product_ui( user_role: UserRole, expected_status_code: int, ): - assert logged_user["role"] == user_role.value + assert logged_user["role"] == user_role assert product_name in app_products_names # give access to user to this product diff --git a/services/web/server/tests/unit/with_dbs/04/products/test_products_rpc.py b/services/web/server/tests/unit/with_dbs/04/products/test_products_rpc.py index 08763afefa21..70db1f0e841a 100644 --- a/services/web/server/tests/unit/with_dbs/04/products/test_products_rpc.py +++ b/services/web/server/tests/unit/with_dbs/04/products/test_products_rpc.py @@ -7,16 +7,13 @@ from decimal import Decimal import pytest -from models_library.api_schemas_webserver import WEBSERVER_RPC_NAMESPACE -from models_library.api_schemas_webserver.products import CreditResultRpcGet from models_library.products import ProductName -from models_library.rabbitmq_basic_types import RPCMethodName -from pydantic import TypeAdapter from pytest_mock import MockerFixture from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.rabbitmq import RabbitMQRPCClient, RPCServerError +from servicelib.rabbitmq.rpc_interfaces.webserver.products import get_credit_amount from settings_library.rabbit import RabbitSettings from simcore_postgres_database.models.users import UserRole from simcore_service_webserver.application_settings import ApplicationSettings @@ -69,28 +66,26 @@ async def test_get_credit_amount( osparc_product_name: ProductName, logged_user: UserInfoDict, ): - result = await rpc_client.request( - WEBSERVER_RPC_NAMESPACE, - TypeAdapter(RPCMethodName).validate_python("get_credit_amount"), + # Using the new client function for s4l + credit_result = await get_credit_amount( + rpc_client, dollar_amount=Decimal(900), product_name="s4l", ) - credit_result = CreditResultRpcGet.model_validate(result) assert credit_result.credit_amount == 100 - result = await rpc_client.request( - WEBSERVER_RPC_NAMESPACE, - TypeAdapter(RPCMethodName).validate_python("get_credit_amount"), + # Using the new client function for tis + credit_result = await get_credit_amount( + rpc_client, dollar_amount=Decimal(900), product_name="tis", ) - credit_result = CreditResultRpcGet.model_validate(result) assert credit_result.credit_amount == 180 + # Testing the error case with pytest.raises(RPCServerError) as exc_info: - await rpc_client.request( - WEBSERVER_RPC_NAMESPACE, - TypeAdapter(RPCMethodName).validate_python("get_credit_amount"), + await get_credit_amount( + rpc_client, dollar_amount=Decimal(900), product_name="osparc", ) diff --git a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/conftest.py b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/conftest.py index cdae2960c743..47279c6f2dd2 100644 --- a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/conftest.py +++ b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/conftest.py @@ -1,22 +1,38 @@ +# pylint: disable=protected-access # pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments # pylint: disable=unused-argument # pylint: disable=unused-variable -# pylint: disable=too-many-arguments - -import logging +from collections.abc import Iterator +from contextlib import ExitStack import pytest +import sqlalchemy as sa +from pytest_simcore.helpers.faker_factories import ( + random_service_access_rights, + random_service_consume_filetype, + random_service_meta_data, +) from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.postgres_tools import sync_insert_and_get_row_lifespan from pytest_simcore.helpers.typing_env import EnvVarsDict -from simcore_service_webserver.log import setup_logging +from simcore_postgres_database.models.services import ( + services_access_rights, + services_meta_data, +) +from simcore_postgres_database.models.services_consume_filetypes import ( + services_consume_filetypes, +) from simcore_service_webserver.studies_dispatcher.settings import ( StudiesDispatcherSettings, ) @pytest.fixture -def app_environment(app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch): +def app_environment( + app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch +) -> EnvVarsDict: envs_plugins = setenvs_from_dict( monkeypatch, { @@ -50,15 +66,221 @@ def app_environment(app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatc }, ) - # NOTE: To see logs, use pytest -s --log-cli-level=DEBUG - setup_logging( - level=logging.DEBUG, - log_format_local_dev_enabled=True, - logger_filter_mapping={}, - tracing_settings=None, - ) - plugin_settings = StudiesDispatcherSettings.create_from_envs() print(plugin_settings.model_dump_json(indent=1)) return {**app_environment, **envs_plugins, **envs_studies_dispatcher} + + +@pytest.fixture(scope="module") +def services_metadata_in_db( + postgres_db: sa.engine.Engine, +) -> Iterator[list[dict]]: + """Pre-populate services metadata table with test data maintaining original structure.""" + services_data = [ + random_service_meta_data( + key="simcore/services/dynamic/raw-graphs", + version="2.11.1", + name="2D plot", + description="2D plots powered by RAW Graphs", + thumbnail=None, + ), + random_service_meta_data( + key="simcore/services/dynamic/bio-formats-web", + version="1.0.1", + name="bio-formats", + description="Bio-Formats image viewer", + thumbnail="https://www.openmicroscopy.org/img/logos/bio-formats.svg", + ), + random_service_meta_data( + key="simcore/services/dynamic/jupyter-octave-python-math", + version="1.6.9", + name="JupyterLab Math", + description="JupyterLab Math with octave and python", + thumbnail=None, + ), + random_service_meta_data( + key="simcore/services/dynamic/s4l-ui-modeling", + version="3.2.300", + name="Hornet Flow", + description="Hornet Flow UI for Sim4Life", + thumbnail=None, + ), + ] + + with ExitStack() as stack: + created_services = [] + for service_data in services_data: + row = stack.enter_context( + sync_insert_and_get_row_lifespan( + postgres_db, + table=services_meta_data, + values=service_data, + pk_cols=[services_meta_data.c.key, services_meta_data.c.version], + ) + ) + created_services.append(row) + + yield created_services + + +@pytest.fixture(scope="module") +def services_consume_filetypes_in_db( + postgres_db: sa.engine.Engine, services_metadata_in_db: list[dict] +) -> Iterator[list[dict]]: + """Pre-populate services consume filetypes table with test data.""" + filetypes_data = [ + random_service_consume_filetype( + service_key="simcore/services/dynamic/bio-formats-web", + service_version="1.0.1", + service_display_name="bio-formats", + service_input_port="input_1", + filetype="PNG", + preference_order=0, + is_guest_allowed=True, + ), + random_service_consume_filetype( + service_key="simcore/services/dynamic/raw-graphs", + service_version="2.11.1", + service_display_name="RAWGraphs", + service_input_port="input_1", + filetype="CSV", + preference_order=0, + is_guest_allowed=True, + ), + random_service_consume_filetype( + service_key="simcore/services/dynamic/bio-formats-web", + service_version="1.0.1", + service_display_name="bio-formats", + service_input_port="input_1", + filetype="JPEG", + preference_order=0, + is_guest_allowed=True, + ), + random_service_consume_filetype( + service_key="simcore/services/dynamic/raw-graphs", + service_version="2.11.1", + service_display_name="RAWGraphs", + service_input_port="input_1", + filetype="TSV", + preference_order=0, + is_guest_allowed=True, + ), + random_service_consume_filetype( + service_key="simcore/services/dynamic/raw-graphs", + service_version="2.11.1", + service_display_name="RAWGraphs", + service_input_port="input_1", + filetype="XLSX", + preference_order=0, + is_guest_allowed=True, + ), + random_service_consume_filetype( + service_key="simcore/services/dynamic/raw-graphs", + service_version="2.11.1", + service_display_name="RAWGraphs", + service_input_port="input_1", + filetype="JSON", + preference_order=0, + is_guest_allowed=True, + ), + random_service_consume_filetype( + service_key="simcore/services/dynamic/jupyter-octave-python-math", + service_version="1.6.9", + service_display_name="JupyterLab Math", + service_input_port="input_1", + filetype="PY", + preference_order=0, + is_guest_allowed=False, + ), + random_service_consume_filetype( + service_key="simcore/services/dynamic/jupyter-octave-python-math", + service_version="1.6.9", + service_display_name="JupyterLab Math", + service_input_port="input_1", + filetype="IPYNB", + preference_order=0, + is_guest_allowed=False, + ), + random_service_consume_filetype( + service_key="simcore/services/dynamic/s4l-ui-modeling", + service_version="3.2.300", + service_display_name="Hornet Flow", + service_input_port="input_1", + filetype="HORNET_REPO", + preference_order=0, + is_guest_allowed=False, + ), + ] + + with ExitStack() as stack: + created_filetypes = [] + for filetype_data in filetypes_data: + row = stack.enter_context( + sync_insert_and_get_row_lifespan( + postgres_db, + table=services_consume_filetypes, + values=filetype_data, + pk_cols=[ + services_consume_filetypes.c.service_key, + services_consume_filetypes.c.service_version, + services_consume_filetypes.c.filetype, + ], + ) + ) + created_filetypes.append(row) + + yield created_filetypes + + +@pytest.fixture(scope="module") +def services_access_rights_in_db( + postgres_db: sa.engine.Engine, services_metadata_in_db: list[dict] +) -> Iterator[list[dict]]: + """Pre-populate services access rights table with test data.""" + access_rights_data = [ + random_service_access_rights( + key="simcore/services/dynamic/raw-graphs", + version="2.11.1", + gid=1, # everyone group + execute_access=True, + write_access=False, + product_name="osparc", + ), + random_service_access_rights( + key="simcore/services/dynamic/jupyter-octave-python-math", + version="1.6.9", + gid=1, # everyone group + execute_access=True, + write_access=False, + product_name="osparc", + ), + random_service_access_rights( + key="simcore/services/dynamic/s4l-ui-modeling", + version="3.2.300", + gid=1, # everyone group + execute_access=True, + write_access=False, + product_name="osparc", + ), + ] + + with ExitStack() as stack: + created_access_rights = [] + for access_data in access_rights_data: + row = stack.enter_context( + sync_insert_and_get_row_lifespan( + postgres_db, + table=services_access_rights, + values=access_data, + pk_cols=[ + services_access_rights.c.key, + services_access_rights.c.version, + services_access_rights.c.gid, + services_access_rights.c.product_name, + ], + ) + ) + created_access_rights.append(row) + + yield created_access_rights diff --git a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_projects.py b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_projects.py index b05757d70bcd..3f4c8e8e9c6a 100644 --- a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_projects.py +++ b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_projects.py @@ -10,13 +10,18 @@ import pytest from aiohttp.test_utils import TestClient +from common_library.json_serialization import json_dumps +from common_library.serialization import model_dump_with_secrets from faker import Faker from models_library.projects import Project, ProjectID from models_library.projects_nodes_io import NodeID from pytest_mock import MockerFixture +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.typing_env import EnvVarsDict from pytest_simcore.helpers.webserver_fake_services_data import list_fake_file_consumers from pytest_simcore.helpers.webserver_login import NewUser from pytest_simcore.helpers.webserver_projects import delete_all_projects +from settings_library.rabbit import RabbitSettings from simcore_service_webserver.groups.api import auto_add_user_to_groups from simcore_service_webserver.projects._projects_service import get_project_for_user from simcore_service_webserver.studies_dispatcher._models import ServiceInfo @@ -27,11 +32,32 @@ _create_project_with_filepicker_and_service, _create_project_with_service, ) -from simcore_service_webserver.users.api import get_user +from simcore_service_webserver.users.users_service import get_user + +pytest_simcore_core_services_selection = [ + "rabbit", +] + FAKE_FILE_VIEWS = list_fake_file_consumers() +@pytest.fixture +def app_environment( + app_environment: EnvVarsDict, + monkeypatch: pytest.MonkeyPatch, + rabbit_service: RabbitSettings, +) -> EnvVarsDict: + return setenvs_from_dict( + monkeypatch, + { + "WEBSERVER_RABBITMQ": json_dumps( + model_dump_with_secrets(rabbit_service, show_secrets=True) + ) + }, + ) + + @pytest.fixture async def user(client: TestClient) -> AsyncIterator[UserInfo]: async with NewUser(app=client.app) as user_db: @@ -94,8 +120,11 @@ async def test_add_new_project_from_model_instance( ): assert client.app - mock_directorv2_api = mocker.patch( - "simcore_service_webserver.director_v2.director_v2_service.create_or_update_pipeline", + import simcore_service_webserver.director_v2.director_v2_service + + mock_directorv2_api = mocker.patch.object( + simcore_service_webserver.director_v2.director_v2_service, + "create_or_update_pipeline", return_value=None, ) diff --git a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_repository.py b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_repository.py new file mode 100644 index 000000000000..8a508f360179 --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_repository.py @@ -0,0 +1,203 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + +from collections.abc import AsyncIterator + +import pytest +from pytest_simcore.helpers.faker_factories import ( + random_service_consume_filetype, + random_service_meta_data, +) +from pytest_simcore.helpers.postgres_tools import insert_and_get_row_lifespan +from simcore_postgres_database.models.services import services_meta_data +from simcore_postgres_database.models.services_consume_filetypes import ( + services_consume_filetypes, +) +from simcore_service_webserver.studies_dispatcher._models import ViewerInfo +from simcore_service_webserver.studies_dispatcher._repository import ( + StudiesDispatcherRepository, +) +from sqlalchemy.ext.asyncio import AsyncEngine + + +@pytest.fixture +async def service_metadata_in_db(asyncpg_engine: AsyncEngine) -> AsyncIterator[dict]: + """Pre-populate services metadata table with test data.""" + service_data = random_service_meta_data( + key="simcore/services/dynamic/viewer", + version="1.0.0", + name="Test Viewer Service", + ) + # pylint: disable=contextmanager-generator-missing-cleanup + async with insert_and_get_row_lifespan( + asyncpg_engine, + table=services_meta_data, + values=service_data, + pk_col=services_meta_data.c.key, + pk_value=service_data["key"], + ) as row: + yield row + # cleanup happens automatically + + +@pytest.fixture +async def consume_filetypes_in_db( + asyncpg_engine: AsyncEngine, service_metadata_in_db: dict +): + """Pre-populate services consume filetypes table with test data.""" + consume_data = random_service_consume_filetype( + service_key=service_metadata_in_db["key"], + service_version=service_metadata_in_db["version"], + filetype="CSV", + service_display_name="CSV Viewer", + service_input_port="input_1", + preference_order=1, + is_guest_allowed=True, + ) + + # pylint: disable=contextmanager-generator-missing-cleanup + async with insert_and_get_row_lifespan( + asyncpg_engine, + table=services_consume_filetypes, + values=consume_data, + pk_col=services_consume_filetypes.c.service_key, + pk_value=consume_data["service_key"], + ) as row: + yield row + + +@pytest.fixture +def studies_dispatcher_repository( + asyncpg_engine: AsyncEngine, +) -> StudiesDispatcherRepository: + """Create StudiesDispatcherRepository instance.""" + return StudiesDispatcherRepository(engine=asyncpg_engine) + + +async def test_list_viewers_info_all( + studies_dispatcher_repository: StudiesDispatcherRepository, + consume_filetypes_in_db: dict, +): + """Test listing all viewer services.""" + # Act + viewers = await studies_dispatcher_repository.list_viewers_info() + + # Assert + assert len(viewers) == 1 + viewer = viewers[0] + assert isinstance(viewer, ViewerInfo) + assert viewer.key == consume_filetypes_in_db["service_key"] + assert viewer.version == consume_filetypes_in_db["service_version"] + assert viewer.filetype == consume_filetypes_in_db["filetype"] + assert viewer.label == consume_filetypes_in_db["service_display_name"] + assert viewer.input_port_key == consume_filetypes_in_db["service_input_port"] + assert viewer.is_guest_allowed == consume_filetypes_in_db["is_guest_allowed"] + + +async def test_list_viewers_info_filtered_by_filetype( + studies_dispatcher_repository: StudiesDispatcherRepository, + consume_filetypes_in_db: dict, +): + """Test listing viewer services filtered by file type.""" + # Act + viewers = await studies_dispatcher_repository.list_viewers_info(file_type="CSV") + + # Assert + assert len(viewers) == 1 + assert viewers[0].filetype == "CSV" + + # Test with non-existent filetype + viewers_empty = await studies_dispatcher_repository.list_viewers_info( + file_type="NONEXISTENT" + ) + assert len(viewers_empty) == 0 + + +async def test_list_viewers_info_only_default( + studies_dispatcher_repository: StudiesDispatcherRepository, + consume_filetypes_in_db: dict, +): + """Test listing only default viewer services.""" + # Act + viewers = await studies_dispatcher_repository.list_viewers_info( + file_type="CSV", only_default=True + ) + + # Assert + assert len(viewers) == 1 + assert viewers[0].filetype == "CSV" + + +async def test_get_default_viewer_for_filetype( + studies_dispatcher_repository: StudiesDispatcherRepository, + consume_filetypes_in_db: dict, +): + """Test getting the default viewer for a specific file type.""" + # Act + viewer = await studies_dispatcher_repository.get_default_viewer_for_filetype( + file_type="CSV" + ) + + # Assert + assert viewer is not None + assert isinstance(viewer, ViewerInfo) + assert viewer.key == consume_filetypes_in_db["service_key"] + assert viewer.version == consume_filetypes_in_db["service_version"] + assert viewer.filetype == "CSV" + assert viewer.label == consume_filetypes_in_db["service_display_name"] + + # Test with non-existent filetype + viewer_none = await studies_dispatcher_repository.get_default_viewer_for_filetype( + file_type="NONEXISTENT" + ) + assert viewer_none is None + + +async def test_find_compatible_viewer_found( + studies_dispatcher_repository: StudiesDispatcherRepository, + consume_filetypes_in_db: dict, +): + """Test finding a compatible viewer service that exists.""" + # Act + viewer = await studies_dispatcher_repository.find_compatible_viewer( + file_type="CSV", + service_key=consume_filetypes_in_db["service_key"], + service_version="1.0.0", + ) + + # Assert + assert viewer is not None + assert isinstance(viewer, ViewerInfo) + assert viewer.key == consume_filetypes_in_db["service_key"] + assert viewer.version == "1.0.0" # Should use the requested version + assert viewer.filetype == "CSV" + assert viewer.label == consume_filetypes_in_db["service_display_name"] + + +async def test_find_compatible_viewer_not_found( + studies_dispatcher_repository: StudiesDispatcherRepository, + consume_filetypes_in_db: dict, +): + """Test finding a compatible viewer service that doesn't exist.""" + # Act - test with non-existent service key + viewer = await studies_dispatcher_repository.find_compatible_viewer( + file_type="CSV", + service_key="simcore/services/dynamic/nonexistent", + service_version="1.0.0", + ) + + # Assert + assert viewer is None + + # Act - test with incompatible filetype + viewer_wrong_filetype = await studies_dispatcher_repository.find_compatible_viewer( + file_type="NONEXISTENT", + service_key=consume_filetypes_in_db["service_key"], + service_version="1.0.0", + ) + + # Assert + assert viewer_wrong_filetype is None diff --git a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_rest_nih.py b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_rest_nih.py new file mode 100644 index 000000000000..77b9e41f091f --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_rest_nih.py @@ -0,0 +1,169 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + + +import pytest +from aiohttp.test_utils import TestClient, TestServer +from common_library.json_serialization import json_dumps +from common_library.serialization import model_dump_with_secrets +from pydantic import TypeAdapter +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.typing_env import EnvVarsDict +from servicelib.aiohttp import status +from settings_library.rabbit import RabbitSettings +from settings_library.redis import RedisSettings +from simcore_service_webserver.studies_dispatcher._controller.rest.nih_schemas import ( + ServiceGet, +) +from yarl import URL + +pytest_simcore_core_services_selection = [ + "rabbit", +] + + +@pytest.fixture +def app_environment( + app_environment: EnvVarsDict, + monkeypatch: pytest.MonkeyPatch, + rabbit_service: RabbitSettings, +) -> EnvVarsDict: + return setenvs_from_dict( + monkeypatch, + { + "WEBSERVER_RABBITMQ": json_dumps( + model_dump_with_secrets(rabbit_service, show_secrets=True) + ) + }, + ) + + +@pytest.fixture +def web_server( + redis_service: RedisSettings, + rabbit_service: RabbitSettings, + web_server: TestServer, + # Add dependencies to ensure database is populated before app starts + services_metadata_in_db: list[dict], + services_consume_filetypes_in_db: list[dict], + services_access_rights_in_db: list[dict], +) -> TestServer: + # + # Extends web_server to start redis_service and ensure DB is populated + # + print( + "Redis service started with settings: ", redis_service.model_dump_json(indent=1) + ) + return web_server + + +def _get_base_url(client: TestClient) -> str: + s = client.server + assert isinstance(s.scheme, str) + url = URL.build(scheme=s.scheme, host=s.host, port=s.port) + return f"{url}" + + +async def test_api_get_viewer_for_file(client: TestClient): + resp = await client.get("/v0/viewers/default?file_type=JPEG") + data, _ = await assert_status(resp, status.HTTP_200_OK) + + base_url = _get_base_url(client) + assert data == [ + { + "file_type": "JPEG", + "title": "Bio-formats v1.0.1", + "view_url": f"{base_url}/view?file_type=JPEG&viewer_key=simcore/services/dynamic/bio-formats-web&viewer_version=1.0.1", + }, + ] + + +async def test_api_get_viewer_for_unsupported_type(client: TestClient): + resp = await client.get("/v0/viewers/default?file_type=UNSUPPORTED_TYPE") + data, error = await assert_status(resp, status.HTTP_200_OK) + assert data == [] + assert error is None + + +async def test_api_list_supported_filetypes(client: TestClient): + resp = await client.get("/v0/viewers/default") + data, _ = await assert_status(resp, status.HTTP_200_OK) + + base_url = _get_base_url(client) + assert data == [ + { + "title": "Rawgraphs v2.11.1", + "file_type": "CSV", + "view_url": f"{base_url}/view?file_type=CSV&viewer_key=simcore/services/dynamic/raw-graphs&viewer_version=2.11.1", + }, + { + "file_type": "HORNET_REPO", + "title": "Hornet flow v3.2.300", + "view_url": f"{base_url}/view?file_type=HORNET_REPO&viewer_key=simcore/services/dynamic/s4l-ui-modeling&viewer_version=3.2.300", + }, + { + "title": "Jupyterlab math v1.6.9", + "file_type": "IPYNB", + "view_url": f"{base_url}/view?file_type=IPYNB&viewer_key=simcore/services/dynamic/jupyter-octave-python-math&viewer_version=1.6.9", + }, + { + "title": "Bio-formats v1.0.1", + "file_type": "JPEG", + "view_url": f"{base_url}/view?file_type=JPEG&viewer_key=simcore/services/dynamic/bio-formats-web&viewer_version=1.0.1", + }, + { + "title": "Rawgraphs v2.11.1", + "file_type": "JSON", + "view_url": f"{base_url}/view?file_type=JSON&viewer_key=simcore/services/dynamic/raw-graphs&viewer_version=2.11.1", + }, + { + "title": "Bio-formats v1.0.1", + "file_type": "PNG", + "view_url": f"{base_url}/view?file_type=PNG&viewer_key=simcore/services/dynamic/bio-formats-web&viewer_version=1.0.1", + }, + { + "title": "Jupyterlab math v1.6.9", + "file_type": "PY", + "view_url": f"{base_url}/view?file_type=PY&viewer_key=simcore/services/dynamic/jupyter-octave-python-math&viewer_version=1.6.9", + }, + { + "title": "Rawgraphs v2.11.1", + "file_type": "TSV", + "view_url": f"{base_url}/view?file_type=TSV&viewer_key=simcore/services/dynamic/raw-graphs&viewer_version=2.11.1", + }, + { + "title": "Rawgraphs v2.11.1", + "file_type": "XLSX", + "view_url": f"{base_url}/view?file_type=XLSX&viewer_key=simcore/services/dynamic/raw-graphs&viewer_version=2.11.1", + }, + ] + + +async def test_api_list_services(client: TestClient): + assert client.app + + url = client.app.router["list_latest_services"].url_for() + response = await client.get(f"{url}") + + data, error = await assert_status(response, status.HTTP_200_OK) + + services = TypeAdapter(list[ServiceGet]).validate_python(data) + assert services + + # latest versions of services with everyone + ospar-product (see services_access_rights_in_db) + assert services[0].key == "simcore/services/dynamic/raw-graphs" + assert services[0].file_extensions == ["CSV", "JSON", "TSV", "XLSX"] + + assert services[0].view_url.query + assert "2.11.1" in services[0].view_url.query + + assert services[2].key == "simcore/services/dynamic/jupyter-octave-python-math" + assert services[2].file_extensions == ["IPYNB", "PY"] + assert services[2].view_url.query + assert "1.6.9" in services[2].view_url.query + + assert error is None diff --git a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_handlers.py b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_rest_redirects.py similarity index 61% rename from services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_handlers.py rename to services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_rest_redirects.py index b2a885451602..de3cbeec3fe1 100644 --- a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_handlers.py +++ b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_rest_redirects.py @@ -8,76 +8,62 @@ import re import urllib.parse from typing import Any +from unittest import mock import pytest -import simcore_service_webserver.studies_dispatcher -import sqlalchemy as sa from aiohttp import ClientResponse, ClientSession from aiohttp.test_utils import TestClient, TestServer from aioresponses import aioresponses -from models_library.projects_state import ProjectLocked, ProjectStatus -from pydantic import BaseModel, ByteSize, TypeAdapter +from common_library.json_serialization import json_dumps +from common_library.serialization import model_dump_with_secrets +from common_library.users_enums import UserRole +from models_library.projects_state import ProjectShareState, ProjectStatus +from pydantic import ByteSize, TypeAdapter from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import UserInfoDict, UserRole -from pytest_simcore.pydantic_models import ( - assert_validation_model, - walk_model_examples_in_package, -) +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.typing_env import EnvVarsDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status +from settings_library.rabbit import RabbitSettings from settings_library.redis import RedisSettings from settings_library.utils_session import DEFAULT_SESSION_COOKIE_NAME -from simcore_service_webserver.studies_dispatcher._core import ViewerInfo -from simcore_service_webserver.studies_dispatcher._rest_handlers import ServiceGet -from sqlalchemy.sql import text +from simcore_service_webserver.studies_dispatcher._models import ViewerInfo from yarl import URL -# -# FIXTURES OVERRIDES -# - +pytest_simcore_core_services_selection = [ + "rabbit", +] -@pytest.fixture(scope="module") -def postgres_db(postgres_db: sa.engine.Engine) -> sa.engine.Engine: - # - # Extends postgres_db fixture (called with web_server) to inject tables and start redis - # - stmt_create_services = text( - 'INSERT INTO "services_meta_data" ("key", "version", "owner", "name", "description", "thumbnail", "classifiers", "created", "modified", "quality") VALUES' - "('simcore/services/dynamic/raw-graphs', '2.11.1', NULL, '2D plot', '2D plots powered by RAW Graphs', NULL, '{}', '2021-03-02 16:08:28.655207', '2021-03-02 16:08:28.655207', '{}')," - "('simcore/services/dynamic/bio-formats-web', '1.0.1', NULL, 'bio-formats', 'Bio-Formats image viewer', 'https://www.openmicroscopy.org/img/logos/bio-formats.svg', '{}', '2021-03-02 16:08:28.420722', '2021-03-02 16:08:28.420722', '{}')," - "('simcore/services/dynamic/jupyter-octave-python-math', '1.6.9', NULL, 'JupyterLab Math', 'JupyterLab Math with octave and python', NULL, '{}', '2021-03-02 16:08:28.420722', '2021-03-02 16:08:28.420722', '{}');" - ) - stmt_create_services_consume_filetypes = text( - 'INSERT INTO "services_consume_filetypes" ("service_key", "service_version", "service_display_name", "service_input_port", "filetype", "preference_order", "is_guest_allowed") VALUES' - "('simcore/services/dynamic/bio-formats-web', '1.0.1', 'bio-formats', 'input_1', 'PNG', 0, '1')," - "('simcore/services/dynamic/raw-graphs', '2.11.1', 'RAWGraphs', 'input_1', 'CSV', 0, '1')," - "('simcore/services/dynamic/bio-formats-web', '1.0.1', 'bio-formats', 'input_1', 'JPEG', 0, '1')," - "('simcore/services/dynamic/raw-graphs', '2.11.1', 'RAWGraphs', 'input_1', 'TSV', 0, '1')," - "('simcore/services/dynamic/raw-graphs', '2.11.1', 'RAWGraphs', 'input_1', 'XLSX', 0, '1')," - "('simcore/services/dynamic/raw-graphs', '2.11.1', 'RAWGraphs', 'input_1', 'JSON', 0, '1')," - "('simcore/services/dynamic/jupyter-octave-python-math', '1.6.9', 'JupyterLab Math', 'input_1', 'PY', 0, '0')," - "('simcore/services/dynamic/jupyter-octave-python-math', '1.6.9', 'JupyterLab Math', 'input_1', 'IPYNB',0, '0');" - ) - # NOTE: users default osparc project and everyone group (which should be by default already in tables) - stmt_create_services_access_rights = text( - ' INSERT INTO "services_access_rights" ("key", "version", "gid", "execute_access", "write_access", "created", "modified", "product_name") VALUES' - "('simcore/services/dynamic/raw-graphs', '2.11.1', 1, 't', 'f', '2022-05-23 08:44:45.418376', '2022-05-23 08:44:45.418376', 'osparc')," - "('simcore/services/dynamic/jupyter-octave-python-math', '1.6.9', 1, 't', 'f', '2022-05-23 08:44:45.418376', '2022-05-23 08:44:45.418376', 'osparc');" +@pytest.fixture +def app_environment( + app_environment: EnvVarsDict, + monkeypatch: pytest.MonkeyPatch, + rabbit_service: RabbitSettings, +) -> EnvVarsDict: + return setenvs_from_dict( + monkeypatch, + { + "WEBSERVER_RABBITMQ": json_dumps( + model_dump_with_secrets(rabbit_service, show_secrets=True) + ) + }, ) - with postgres_db.connect() as conn: - conn.execute(stmt_create_services) - conn.execute(stmt_create_services_consume_filetypes) - conn.execute(stmt_create_services_access_rights) - - return postgres_db @pytest.fixture -def web_server(redis_service: RedisSettings, web_server: TestServer) -> TestServer: +def web_server( + redis_service: RedisSettings, + rabbit_service: RabbitSettings, + web_server: TestServer, + # Add dependencies to ensure database is populated before app starts + services_metadata_in_db: list[dict], + services_consume_filetypes_in_db: list[dict], + services_access_rights_in_db: list[dict], +) -> TestServer: # - # Extends web_server to start redis_service + # Extends web_server to start redis_service and ensure DB is populated # print( "Redis service started with settings: ", redis_service.model_dump_json(indent=1) @@ -152,123 +138,6 @@ async def director_v2_automock( ] -# REST-API -# Samples taken from trials on http://127.0.0.1:9081/dev/doc#/viewer/get_viewer_for_file -# - - -def _get_base_url(client: TestClient) -> str: - s = client.server - assert isinstance(s.scheme, str) - url = URL.build(scheme=s.scheme, host=s.host, port=s.port) - return f"{url}" - - -async def test_api_get_viewer_for_file(client: TestClient): - resp = await client.get("/v0/viewers/default?file_type=JPEG") - data, _ = await assert_status(resp, status.HTTP_200_OK) - - base_url = _get_base_url(client) - assert data == [ - { - "file_type": "JPEG", - "title": "Bio-formats v1.0.1", - "view_url": f"{base_url}/view?file_type=JPEG&viewer_key=simcore/services/dynamic/bio-formats-web&viewer_version=1.0.1", - }, - ] - - -async def test_api_get_viewer_for_unsupported_type(client: TestClient): - resp = await client.get("/v0/viewers/default?file_type=UNSUPPORTED_TYPE") - data, error = await assert_status(resp, status.HTTP_200_OK) - assert data == [] - assert error is None - - -async def test_api_list_supported_filetypes(client: TestClient): - resp = await client.get("/v0/viewers/default") - data, _ = await assert_status(resp, status.HTTP_200_OK) - - base_url = _get_base_url(client) - assert data == [ - { - "title": "Rawgraphs v2.11.1", - "file_type": "CSV", - "view_url": f"{base_url}/view?file_type=CSV&viewer_key=simcore/services/dynamic/raw-graphs&viewer_version=2.11.1", - }, - { - "title": "Jupyterlab math v1.6.9", - "file_type": "IPYNB", - "view_url": f"{base_url}/view?file_type=IPYNB&viewer_key=simcore/services/dynamic/jupyter-octave-python-math&viewer_version=1.6.9", - }, - { - "title": "Bio-formats v1.0.1", - "file_type": "JPEG", - "view_url": f"{base_url}/view?file_type=JPEG&viewer_key=simcore/services/dynamic/bio-formats-web&viewer_version=1.0.1", - }, - { - "title": "Rawgraphs v2.11.1", - "file_type": "JSON", - "view_url": f"{base_url}/view?file_type=JSON&viewer_key=simcore/services/dynamic/raw-graphs&viewer_version=2.11.1", - }, - { - "title": "Bio-formats v1.0.1", - "file_type": "PNG", - "view_url": f"{base_url}/view?file_type=PNG&viewer_key=simcore/services/dynamic/bio-formats-web&viewer_version=1.0.1", - }, - { - "title": "Jupyterlab math v1.6.9", - "file_type": "PY", - "view_url": f"{base_url}/view?file_type=PY&viewer_key=simcore/services/dynamic/jupyter-octave-python-math&viewer_version=1.6.9", - }, - { - "title": "Rawgraphs v2.11.1", - "file_type": "TSV", - "view_url": f"{base_url}/view?file_type=TSV&viewer_key=simcore/services/dynamic/raw-graphs&viewer_version=2.11.1", - }, - { - "title": "Rawgraphs v2.11.1", - "file_type": "XLSX", - "view_url": f"{base_url}/view?file_type=XLSX&viewer_key=simcore/services/dynamic/raw-graphs&viewer_version=2.11.1", - }, - ] - - -@pytest.mark.parametrize( - "model_cls, example_name, example_data", - walk_model_examples_in_package(simcore_service_webserver.studies_dispatcher), -) -def test_model_examples( - model_cls: type[BaseModel], example_name: str, example_data: Any -): - assert_validation_model( - model_cls, example_name=example_name, example_data=example_data - ) - - -async def test_api_list_services(client: TestClient): - assert client.app - - url = client.app.router["list_latest_services"].url_for() - response = await client.get(f"{url}") - - data, error = await assert_status(response, status.HTTP_200_OK) - - services = TypeAdapter(list[ServiceGet]).validate_python(data) - assert services - - # latest versions of services with everyone + ospar-product (see stmt_create_services_access_rights) - assert services[0].key == "simcore/services/dynamic/raw-graphs" - assert services[0].file_extensions == ["CSV", "JSON", "TSV", "XLSX"] - assert "2.11.1" in services[0].view_url.query - - assert services[1].key == "simcore/services/dynamic/jupyter-octave-python-math" - assert services[1].file_extensions == ["IPYNB", "PY"] - assert "1.6.9" in services[1].view_url.query - - assert error is None - - # REDIRECT ROUTES -------------------------------------------------------------------------------- @@ -298,8 +167,10 @@ def mocks_on_projects_api(mocker) -> None: All projects in this module are UNLOCKED """ mocker.patch( - "simcore_service_webserver.projects._projects_service._get_project_lock_state", - return_value=ProjectLocked(value=False, status=ProjectStatus.CLOSED), + "simcore_service_webserver.projects._projects_service._get_project_share_state", + return_value=ProjectShareState( + locked=False, status=ProjectStatus.CLOSED, current_user_groupids=[] + ), ) @@ -385,12 +256,12 @@ def redirect_url(redirect_type: str, client: TestClient) -> URL: async def test_dispatch_study_anonymously( + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, redirect_url: URL, redirect_type: str, mocker: MockerFixture, storage_subsystem_mock, - catalog_subsystem_mock: None, mocks_on_projects_api, ): assert client.app @@ -399,7 +270,7 @@ async def test_dispatch_study_anonymously( return_value=None, ) mock_dynamic_scheduler_update_project_networks = mocker.patch( - "simcore_service_webserver.studies_dispatcher._redirects_handlers.dynamic_scheduler_service.update_projects_networks", + "simcore_service_webserver.studies_dispatcher._controller.rest.redirects.dynamic_scheduler_service.update_projects_networks", return_value=None, ) @@ -424,7 +295,7 @@ async def test_dispatch_study_anonymously( # guest user only a copy of the template project url = client.app.router["list_projects"].url_for() - response = await client.get(f'{url.with_query(type="user")}') + response = await client.get(f"{url.with_query(type='user')}") payload = await response.json() assert response.status == 200, payload @@ -449,6 +320,7 @@ async def test_dispatch_study_anonymously( ], ) async def test_dispatch_logged_in_user( + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, redirect_url: URL, redirect_type: str, @@ -456,7 +328,6 @@ async def test_dispatch_logged_in_user( mocker: MockerFixture, mock_dynamic_scheduler: None, storage_subsystem_mock, - catalog_subsystem_mock: None, mocks_on_projects_api: None, ): assert client.app @@ -465,7 +336,7 @@ async def test_dispatch_logged_in_user( return_value=None, ) mock_dynamic_scheduler_update_project_networks = mocker.patch( - "simcore_service_webserver.studies_dispatcher._redirects_handlers.dynamic_scheduler_service.update_projects_networks", + "simcore_service_webserver.studies_dispatcher._controller.rest.redirects.dynamic_scheduler_service.update_projects_networks", return_value=None, ) @@ -482,7 +353,7 @@ async def test_dispatch_logged_in_user( # guest user only a copy of the template project url = client.app.router["list_projects"].url_for() - response = await client.get(f'{url.with_query(type="user")}') + response = await client.get(f"{url.with_query(type='user')}") payload = await response.json() assert response.status == 200, payload @@ -549,7 +420,7 @@ async def test_viewer_redirect_with_file_type_errors(client: TestClient): message, status_code = assert_error_in_fragment(resp) assert status_code == status.HTTP_422_UNPROCESSABLE_ENTITY - assert "type" in message.lower() + assert "link" in message.lower() async def test_viewer_redirect_with_client_errors(client: TestClient): diff --git a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_studies_access.py b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_studies_access.py index 6f4e923a203b..056a64254ad7 100644 --- a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_studies_access.py +++ b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_studies_access.py @@ -12,40 +12,55 @@ from copy import deepcopy from pathlib import Path from pprint import pformat +from typing import Any +from unittest import mock import pytest import redis.asyncio as aioredis from aiohttp import ClientResponse, ClientSession, web from aiohttp.test_utils import TestClient, TestServer +from common_library.json_serialization import json_dumps +from common_library.serialization import model_dump_with_secrets +from common_library.users_enums import UserRole from faker import Faker from models_library.api_schemas_rpc_async_jobs.async_jobs import AsyncJobStatus from models_library.progress_bar import ProgressReport -from models_library.projects_state import ProjectLocked, ProjectStatus +from models_library.projects_state import ( + ProjectShareState, + ProjectStatus, +) from models_library.users import UserID from pytest_mock import MockerFixture from pytest_simcore.aioresponses_mocker import AioResponsesMock from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import UserInfoDict, UserRole +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.typing_env import EnvVarsDict from pytest_simcore.helpers.webserver_parametrizations import MockedStorageSubsystem from pytest_simcore.helpers.webserver_projects import NewProject, delete_all_projects +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE from servicelib.rabbitmq.rpc_interfaces.async_jobs.async_jobs import ( AsyncJobComposedResult, ) from servicelib.rest_responses import unwrap_envelope +from settings_library.rabbit import RabbitSettings from settings_library.utils_session import DEFAULT_SESSION_COOKIE_NAME from simcore_service_webserver.projects._projects_service import ( submit_delete_project_task, ) from simcore_service_webserver.projects.models import ProjectDict from simcore_service_webserver.projects.utils import NodesMap -from simcore_service_webserver.users.api import ( +from simcore_service_webserver.users.users_service import ( delete_user_without_projects, get_user_role, ) from tenacity import retry, stop_after_attempt, wait_fixed +pytest_simcore_core_services_selection = [ + "rabbit", +] + async def _get_user_projects(client) -> list[ProjectDict]: url = client.app.router["list_projects"].url_for() @@ -60,7 +75,7 @@ async def _get_user_projects(client) -> list[ProjectDict]: return projects -def _assert_same_projects(got: dict, expected: dict): +def _assert_same_projects(got: dict[str, Any], expected: dict[str, Any]): exclude = { "accessRights", "creationDate", @@ -75,15 +90,32 @@ def _assert_same_projects(got: dict, expected: dict): "type", "templateType", } - for key in expected: - if key not in exclude: - assert got[key] == expected[key], f"Failed in {key}" + expected_values = {k: v for k, v in expected.items() if k not in exclude} + got_values = {k: got[k] for k in expected if k not in exclude} + + assert got_values == expected_values def _is_user_authenticated(session: ClientSession) -> bool: return DEFAULT_SESSION_COOKIE_NAME in [c.key for c in session.cookie_jar] +@pytest.fixture +def app_environment( + app_environment: EnvVarsDict, + monkeypatch: pytest.MonkeyPatch, + rabbit_service: RabbitSettings, +) -> EnvVarsDict: + return setenvs_from_dict( + monkeypatch, + { + "WEBSERVER_RABBITMQ": json_dumps( + model_dump_with_secrets(rabbit_service, show_secrets=True) + ) + }, + ) + + @pytest.fixture async def published_project( client: TestClient, @@ -142,9 +174,14 @@ def mocks_on_projects_api(mocker: MockerFixture) -> None: """ All projects in this module are UNLOCKED """ - mocker.patch( - "simcore_service_webserver.projects._projects_service._get_project_lock_state", - return_value=ProjectLocked(value=False, status=ProjectStatus.CLOSED), + import simcore_service_webserver.projects._projects_service + + mocker.patch.object( + simcore_service_webserver.projects._projects_service, + "_get_project_share_state", + return_value=ProjectShareState( + locked=False, status=ProjectStatus.CLOSED, current_user_groupids=[] + ), ) @@ -274,7 +311,8 @@ async def _assert_redirected_to_study( async def test_access_to_invalid_study(client: TestClient, faker: Faker): - response = await client.get(f"/study/{faker.uuid4()}") + invalid_project_id = faker.uuid4() + response = await client.get(f"/study/{invalid_project_id}") _assert_redirected_to_error_page( response, @@ -296,18 +334,16 @@ async def test_access_to_forbidden_study( async def test_access_study_anonymously( + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, published_project: ProjectDict, storage_subsystem_mock_override: None, - catalog_subsystem_mock: Callable[[list[ProjectDict]], None], mock_dynamic_scheduler: None, director_v2_service_mock: AioResponsesMock, mocks_on_projects_api: None, # needed to cleanup the locks between parametrizations redis_locks_client: AsyncIterator[aioredis.Redis], ): - catalog_subsystem_mock([published_project]) - assert not _is_user_authenticated(client.session), "Is anonymous" assert client.app study_url = client.app.router["get_redirection_to_study_page"].url_for( @@ -346,11 +382,11 @@ async def auto_delete_projects(client: TestClient) -> AsyncIterator[None]: @pytest.mark.parametrize("user_role", [UserRole.USER, UserRole.TESTER]) async def test_access_study_by_logged_user( + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, logged_user: UserInfoDict, published_project: ProjectDict, storage_subsystem_mock_override: None, - catalog_subsystem_mock: Callable[[list[ProjectDict]], None], mock_dynamic_scheduler: None, director_v2_service_mock: AioResponsesMock, mocks_on_projects_api: None, @@ -359,7 +395,6 @@ async def test_access_study_by_logged_user( redis_locks_client: AsyncIterator[aioredis.Redis], ): assert client.app - catalog_subsystem_mock([published_project]) assert _is_user_authenticated(client.session), "Is already logged-in" study_url = client.app.router["get_redirection_to_study_page"].url_for( @@ -381,17 +416,16 @@ async def test_access_study_by_logged_user( async def test_access_cookie_of_expired_user( + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, published_project: ProjectDict, storage_subsystem_mock_override: None, - catalog_subsystem_mock: Callable[[list[ProjectDict]], None], director_v2_service_mock: AioResponsesMock, mock_dynamic_scheduler: None, mocks_on_projects_api: None, # needed to cleanup the locks between parametrizations redis_locks_client: AsyncIterator[aioredis.Redis], ): - catalog_subsystem_mock([published_project]) # emulates issue #1570 assert client.app # nosec app: web.Application = client.app @@ -427,7 +461,7 @@ async def enforce_garbage_collect_guest(uid): ) await delete_task - await delete_user_without_projects(app, uid) + await delete_user_without_projects(app, user_id=uid) return uid user_id = await enforce_garbage_collect_guest(uid=data["id"]) @@ -464,19 +498,18 @@ async def enforce_garbage_collect_guest(uid): ], ) async def test_guest_user_is_not_garbage_collected( + mocked_dynamic_services_interface: dict[str, mock.MagicMock], number_of_simultaneous_requests: int, web_server: TestServer, aiohttp_client: Callable, published_project: ProjectDict, storage_subsystem_mock_override: None, - catalog_subsystem_mock: Callable[[list[ProjectDict]], None], mock_dynamic_scheduler: None, director_v2_service_mock: AioResponsesMock, mocks_on_projects_api: None, # needed to cleanup the locks between parametrizations redis_locks_client: AsyncIterator[aioredis.Redis], ): - catalog_subsystem_mock([published_project]) ## NOTE: use pytest -s --log-cli-level=DEBUG to see GC logs async def _test_guest_user_workflow(request_index): diff --git a/services/web/server/tests/unit/with_dbs/04/test_fogbugz_client.py b/services/web/server/tests/unit/with_dbs/04/test_fogbugz_client.py new file mode 100644 index 000000000000..6f105e79f16d --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/04/test_fogbugz_client.py @@ -0,0 +1,125 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments +# pylint: disable=too-many-statements + +import json +from collections.abc import Iterator + +import httpx +import pytest +import respx +from aiohttp.test_utils import TestClient +from pytest_mock import MockerFixture +from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict +from pytest_simcore.helpers.typing_env import EnvVarsDict +from simcore_service_webserver.fogbugz._client import ( + FogbugzCaseCreate, + get_fogbugz_rest_client, +) +from simcore_service_webserver.fogbugz.settings import FogbugzSettings + + +@pytest.fixture +def fake_api_base_url() -> str: + return "https://dummy.com" + + +@pytest.fixture +def app_environment( + monkeypatch: pytest.MonkeyPatch, + app_environment: EnvVarsDict, + fake_api_base_url: str, + mocker: MockerFixture, +): + return app_environment | setenvs_from_dict( + monkeypatch, + { + "FOGBUGZ_URL": fake_api_base_url, + "FOGBUGZ_API_TOKEN": "asdf", + }, + ) + + +_IXBUG_DUMMY = "12345" + + +@pytest.fixture +def mock_fogbugz_api(fake_api_base_url: str) -> Iterator[respx.MockRouter]: + """Mock Fogbugz API responses in sequence for the test flow""" + + # Define responses in the order they will be called during the test + responses = [ + # 1. create_case response + {"data": {"case": {"ixBug": _IXBUG_DUMMY}}}, + # 2. get_case_status response (after creation) + {"data": {"cases": [{"ixBug": _IXBUG_DUMMY, "sStatus": "Active"}]}}, + # 3. resolve_case response + {"data": {}}, + # 4. get_case_status response (after resolve) + { + "data": { + "cases": [{"ixBug": _IXBUG_DUMMY, "sStatus": "Resolved (Completed)"}] + } + }, + # 5. reopen_case response (inside you need to get the status ones) + { + "data": { + "cases": [{"ixBug": _IXBUG_DUMMY, "sStatus": "Resolved (Completed)"}] + } + }, + {"data": {}}, + # 6. get_case_status response (after reopen) + {"data": {"cases": [{"ixBug": _IXBUG_DUMMY, "sStatus": "Active"}]}}, + ] + + with respx.mock(base_url=fake_api_base_url) as mock: + # Create a side_effect that returns responses in sequence + mock.post(path="/f/api/0/jsonapi").mock( + side_effect=[httpx.Response(200, json=response) for response in responses] + ) + yield mock + + +async def test_fogubugz_client( + app_environment: EnvVarsDict, + client: TestClient, + mock_fogbugz_api: respx.MockRouter, +): + assert client.app + + settings = FogbugzSettings.create_from_envs() + assert settings.FOGBUGZ_API_TOKEN + + fogbugz_client = get_fogbugz_rest_client(client.app) + assert fogbugz_client + + _json = {"first_key": "test", "second_key": "test2"} + _description = f""" + Dear Support Team, + + We have received a support request. + + Extra content: {json.dumps(_json)} + """ + + case_id = await fogbugz_client.create_case( + data=FogbugzCaseCreate( + fogbugz_project_id=45, + title="Matus Test Automatic Creation of Fogbugz Case", + description=_description, + ) + ) + assert case_id == _IXBUG_DUMMY + + status = await fogbugz_client.get_case_status(case_id) + assert status == "Active" + + await fogbugz_client.resolve_case(case_id) + status = await fogbugz_client.get_case_status(case_id) + assert status == "Resolved (Completed)" + + await fogbugz_client.reopen_case(case_id, assigned_fogbugz_person_id="281") + status = await fogbugz_client.get_case_status(case_id) + assert status == "Active" diff --git a/services/web/server/tests/unit/with_dbs/04/wallets/conftest.py b/services/web/server/tests/unit/with_dbs/04/wallets/conftest.py index 56ffb85ebf6c..f004044c34ff 100644 --- a/services/web/server/tests/unit/with_dbs/04/wallets/conftest.py +++ b/services/web/server/tests/unit/with_dbs/04/wallets/conftest.py @@ -13,8 +13,8 @@ from faker import Faker from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.webserver_login import UserInfoDict from pytest_simcore.helpers.webserver_projects import NewProject, delete_all_projects +from pytest_simcore.helpers.webserver_users import UserInfoDict from simcore_postgres_database.models.wallets import wallets from simcore_service_webserver.application_settings import ApplicationSettings diff --git a/services/web/server/tests/unit/with_dbs/04/wallets/payments/conftest.py b/services/web/server/tests/unit/with_dbs/04/wallets/payments/conftest.py index 5fce5fad9cb6..4eb4f8f01ca9 100644 --- a/services/web/server/tests/unit/with_dbs/04/wallets/payments/conftest.py +++ b/services/web/server/tests/unit/with_dbs/04/wallets/payments/conftest.py @@ -32,13 +32,14 @@ from pydantic import EmailStr, HttpUrl from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.payments_transactions import payments_transactions from simcore_postgres_database.models.users_details import ( users_pre_registration_details, ) from simcore_service_webserver.db.models import UserRole +from simcore_service_webserver.models import PhoneNumberStr from simcore_service_webserver.payments._methods_api import ( _fake_cancel_creation_of_wallet_payment_method, _fake_delete_wallet_payment_method, @@ -320,7 +321,10 @@ async def _get_invoice_url( @pytest.fixture def setup_user_pre_registration_details_db( - postgres_db: sa.engine.Engine, logged_user: UserInfoDict, faker: Faker + postgres_db: sa.engine.Engine, + logged_user: UserInfoDict, + faker: Faker, + user_phone_number: PhoneNumberStr, ) -> Iterator[int]: with postgres_db.connect() as con: result = con.execute( @@ -330,7 +334,7 @@ def setup_user_pre_registration_details_db( pre_email=faker.email(), pre_first_name=faker.first_name(), pre_last_name=faker.last_name(), - pre_phone=faker.phone_number(), + pre_phone=user_phone_number, institution=faker.company(), address=faker.address().replace("\n", ", "), city=faker.city(), @@ -338,6 +342,8 @@ def setup_user_pre_registration_details_db( country=faker.random_element([c.name for c in pycountry.countries]), postal_code=faker.postcode(), created_by=None, + # NOTE: here product is not specified (i.e. product_name=None) on purspose to check + # backwards compatibility ) .returning(sa.literal_column("*")) ) diff --git a/services/web/server/tests/unit/with_dbs/04/wallets/payments/test_payments.py b/services/web/server/tests/unit/with_dbs/04/wallets/payments/test_payments.py index ade4a9d58c1f..890e15e5709a 100644 --- a/services/web/server/tests/unit/with_dbs/04/wallets/payments/test_payments.py +++ b/services/web/server/tests/unit/with_dbs/04/wallets/payments/test_payments.py @@ -20,7 +20,8 @@ from pydantic import TypeAdapter from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import LoggedUser, NewUser, UserInfoDict +from pytest_simcore.helpers.webserver_login import LoggedUser +from pytest_simcore.helpers.webserver_users import NewUser, UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.payments_transactions import ( PaymentTransactionState, @@ -333,6 +334,7 @@ async def test_billing_info_missing_error( assert not data assert MSG_BILLING_DETAILS_NOT_DEFINED_ERROR in error["message"] + assert error["supportId"] is not None async def test_payment_not_found( diff --git a/services/web/server/tests/unit/with_dbs/04/wallets/payments/test_payments_rpc.py b/services/web/server/tests/unit/with_dbs/04/wallets/payments/test_payments_rpc.py index af0f7d304cae..bbbc194a4281 100644 --- a/services/web/server/tests/unit/with_dbs/04/wallets/payments/test_payments_rpc.py +++ b/services/web/server/tests/unit/with_dbs/04/wallets/payments/test_payments_rpc.py @@ -9,14 +9,11 @@ import pytest from aiohttp.test_utils import TestClient -from models_library.api_schemas_webserver import WEBSERVER_RPC_NAMESPACE -from models_library.payments import InvoiceDataGet -from models_library.rabbitmq_basic_types import RPCMethodName -from pydantic import TypeAdapter from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.rabbitmq import RabbitMQRPCClient +from servicelib.rabbitmq.rpc_interfaces.webserver.payments import get_invoice_data from settings_library.rabbit import RabbitSettings from simcore_service_webserver.application_settings import ApplicationSettings from simcore_service_webserver.payments.settings import ( @@ -75,13 +72,11 @@ async def test_one_time_payment_worfklow( assert settings.PAYMENTS_FAKE_COMPLETION is False - result = await rpc_client.request( - WEBSERVER_RPC_NAMESPACE, - TypeAdapter(RPCMethodName).validate_python("get_invoice_data"), + invoice_data_get = await get_invoice_data( + rpc_client, user_id=logged_user["id"], dollar_amount=Decimal(900), product_name="osparc", ) - invoice_data_get = InvoiceDataGet.model_validate(result) assert invoice_data_get assert len(invoice_data_get.user_invoice_address.country) == 2 diff --git a/services/web/server/tests/unit/with_dbs/04/wallets/test_wallets.py b/services/web/server/tests/unit/with_dbs/04/wallets/test_wallets.py index d35be5074aee..80fd0aa0029d 100644 --- a/services/web/server/tests/unit/with_dbs/04/wallets/test_wallets.py +++ b/services/web/server/tests/unit/with_dbs/04/wallets/test_wallets.py @@ -30,7 +30,7 @@ from simcore_service_webserver.login._login_service import notify_user_confirmation from simcore_service_webserver.products.products_service import get_product from simcore_service_webserver.projects.models import ProjectDict -from simcore_service_webserver.users.api import UserDisplayAndIdNamesTuple +from simcore_service_webserver.users.models import UserDisplayAndIdNamesTuple from simcore_service_webserver.wallets._events import ( _WALLET_DESCRIPTION_TEMPLATE, _WALLET_NAME_TEMPLATE, diff --git a/services/web/server/tests/unit/with_dbs/04/wallets/test_wallets_groups.py b/services/web/server/tests/unit/with_dbs/04/wallets/test_wallets_groups.py index cd21bfea509d..b9187aa7040a 100644 --- a/services/web/server/tests/unit/with_dbs/04/wallets/test_wallets_groups.py +++ b/services/web/server/tests/unit/with_dbs/04/wallets/test_wallets_groups.py @@ -10,7 +10,7 @@ import pytest from aiohttp.test_utils import TestClient from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict +from pytest_simcore.helpers.webserver_users import NewUser, UserInfoDict from servicelib.aiohttp import status from simcore_service_webserver.db.models import UserRole from simcore_service_webserver.projects.models import ProjectDict diff --git a/services/web/server/tests/unit/with_dbs/04/workspaces/conftest.py b/services/web/server/tests/unit/with_dbs/04/workspaces/conftest.py index 2f2e06af8fb1..fa008269aaff 100644 --- a/services/web/server/tests/unit/with_dbs/04/workspaces/conftest.py +++ b/services/web/server/tests/unit/with_dbs/04/workspaces/conftest.py @@ -5,7 +5,6 @@ import pytest import sqlalchemy as sa -from pytest_mock import MockerFixture from simcore_postgres_database.models.projects import projects from simcore_postgres_database.models.workspaces import workspaces @@ -16,17 +15,3 @@ def workspaces_clean_db(postgres_db: sa.engine.Engine) -> Iterator[None]: yield con.execute(workspaces.delete()) con.execute(projects.delete()) - - -@pytest.fixture -def mock_catalog_api_get_services_for_user_in_product(mocker: MockerFixture): - mocker.patch( - "simcore_service_webserver.projects._crud_api_read.catalog_service.get_services_for_user_in_product", - spec=True, - return_value=[], - ) - mocker.patch( - "simcore_service_webserver.projects._controller.projects_rest.project_uses_available_services", - spec=True, - return_value=True, - ) diff --git a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces.py b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces.py index 88b684ddb562..256cf617fa3b 100644 --- a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces.py +++ b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces.py @@ -11,13 +11,12 @@ from aiohttp.test_utils import TestClient from models_library.api_schemas_webserver.workspaces import WorkspaceGet from models_library.rest_ordering import OrderDirection -from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import UserInfoDict from pytest_simcore.helpers.webserver_parametrizations import ( ExpectedResponse, standard_role_response, ) +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from simcore_service_webserver.db.models import UserRole from simcore_service_webserver.projects.models import ProjectDict @@ -64,7 +63,6 @@ async def test_workspaces_workflow( logged_user: UserInfoDict, user_project: ProjectDict, expected: HTTPStatus, - mock_catalog_api_get_services_for_user_in_product: MockerFixture, workspaces_clean_db: AsyncIterator[None], ): assert client.app diff --git a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__delete_workspace_with_content.py b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__delete_workspace_with_content.py index 35f98a79f854..38487e61070e 100644 --- a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__delete_workspace_with_content.py +++ b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__delete_workspace_with_content.py @@ -14,8 +14,8 @@ from models_library.api_schemas_webserver.workspaces import WorkspaceGet from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import UserInfoDict from pytest_simcore.helpers.webserver_projects import create_project +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from simcore_service_webserver.db.models import UserRole from simcore_service_webserver.projects.models import ProjectDict @@ -23,10 +23,6 @@ @pytest.fixture def mock_storage_delete_data_folders(mocker: MockerFixture) -> mock.Mock: - mocker.patch( - "simcore_service_webserver.dynamic_scheduler.api.list_dynamic_services", - autospec=True, - ) mocker.patch( "simcore_service_webserver.projects._projects_service.remove_project_dynamic_services", autospec=True, @@ -43,11 +39,11 @@ def mock_storage_delete_data_folders(mocker: MockerFixture) -> mock.Mock: @pytest.mark.parametrize("user_role,expected", [(UserRole.USER, status.HTTP_200_OK)]) async def test_workspaces_full_workflow_deletion( + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, expected: HTTPStatus, - mock_catalog_api_get_services_for_user_in_product: MockerFixture, fake_project: ProjectDict, workspaces_clean_db: None, mock_storage_delete_data_folders: mock.Mock, diff --git a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__folders_and_projects_crud.py b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__folders_and_projects_crud.py index b536ef5bf01b..73f89dde70c8 100644 --- a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__folders_and_projects_crud.py +++ b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__folders_and_projects_crud.py @@ -12,6 +12,8 @@ import pytest from aiohttp.test_utils import TestClient from models_library.api_schemas_webserver.workspaces import WorkspaceGet +from models_library.groups import GroupID +from pydantic import TypeAdapter from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.webserver_login import LoggedUser, UserInfoDict @@ -23,12 +25,12 @@ @pytest.mark.parametrize("user_role,expected", [(UserRole.USER, status.HTTP_200_OK)]) -async def test_workspaces_full_workflow_with_folders_and_projects( +async def test_workspaces_full_workflow_with_folders_and_projects( # noqa: PLR0915 + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, expected: HTTPStatus, - mock_catalog_api_get_services_for_user_in_product: MockerFixture, fake_project: ProjectDict, workspaces_clean_db: None, ): @@ -76,7 +78,7 @@ async def test_workspaces_full_workflow_with_folders_and_projects( data, _ = await assert_status(resp, status.HTTP_200_OK) assert data["uuid"] == project["uuid"] assert data["workspaceId"] == added_workspace.workspace_id - assert data["folderId"] is None + assert data.get("folderId") is None # Create folder in workspace url = client.app.router["create_folder"].url_for() @@ -140,7 +142,9 @@ async def test_workspaces_full_workflow_with_folders_and_projects( await update_or_insert_workspace_group( client.app, workspace_id=added_workspace.workspace_id, - group_id=new_logged_user["primary_gid"], + group_id=TypeAdapter(GroupID).validate_python( + new_logged_user["primary_gid"] + ), read=True, write=True, delete=False, @@ -204,7 +208,9 @@ async def test_workspaces_full_workflow_with_folders_and_projects( await update_or_insert_workspace_group( client.app, workspace_id=added_workspace.workspace_id, - group_id=new_logged_user["primary_gid"], + group_id=TypeAdapter(GroupID).validate_python( + new_logged_user["primary_gid"] + ), read=True, write=False, delete=False, @@ -233,10 +239,6 @@ async def test_workspaces_full_workflow_with_folders_and_projects( @pytest.fixture def mock_storage_delete_data_folders(mocker: MockerFixture) -> mock.Mock: - mocker.patch( - "simcore_service_webserver.dynamic_scheduler.api.list_dynamic_services", - autospec=True, - ) mocker.patch( "simcore_service_webserver.projects._projects_service.remove_project_dynamic_services", autospec=True, @@ -253,11 +255,11 @@ def mock_storage_delete_data_folders(mocker: MockerFixture) -> mock.Mock: @pytest.mark.parametrize("user_role,expected", [(UserRole.USER, status.HTTP_200_OK)]) async def test_workspaces_delete_folders( + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, expected: HTTPStatus, - mock_catalog_api_get_services_for_user_in_product: MockerFixture, fake_project: ProjectDict, workspaces_clean_db: None, mock_storage_delete_data_folders: mock.Mock, @@ -363,11 +365,11 @@ async def test_workspaces_delete_folders( @pytest.mark.parametrize("user_role,expected", [(UserRole.USER, status.HTTP_200_OK)]) async def test_listing_folders_and_projects_in_workspace__multiple_workspaces_created( request: pytest.FixtureRequest, + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, expected: HTTPStatus, - mock_catalog_api_get_services_for_user_in_product: MockerFixture, fake_project: ProjectDict, workspaces_clean_db: None, ): diff --git a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__list_folders_full_search.py b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__list_folders_full_search.py index 3cfc1a78842f..e95f2126d780 100644 --- a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__list_folders_full_search.py +++ b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__list_folders_full_search.py @@ -6,17 +6,19 @@ from http import HTTPStatus +from unittest import mock import pytest from aiohttp.test_utils import TestClient from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import UserInfoDict +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from simcore_service_webserver.db.models import UserRole @pytest.mark.parametrize("user_role,expected", [(UserRole.USER, status.HTTP_200_OK)]) async def test_workspaces__list_folders_full_search( + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, logged_user: UserInfoDict, expected: HTTPStatus, diff --git a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__list_projects_full_search.py b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__list_projects_full_search.py index 3ee21f6d55fa..04dddceaa593 100644 --- a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__list_projects_full_search.py +++ b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__list_projects_full_search.py @@ -8,15 +8,18 @@ import json from copy import deepcopy from http import HTTPStatus +from unittest import mock import pytest from aiohttp.test_utils import TestClient -from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import UserInfoDict from pytest_simcore.helpers.webserver_projects import create_project +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from simcore_service_webserver.db.models import UserRole +from simcore_service_webserver.projects import ( + _projects_repository as projects_service_repository, +) from simcore_service_webserver.projects.models import ProjectDict _SEARCH_NAME_1 = "Quantum Solutions" @@ -25,12 +28,12 @@ @pytest.mark.parametrize("user_role,expected", [(UserRole.USER, status.HTTP_200_OK)]) -async def test_workspaces__list_projects_full_search( +async def test_workspaces__list_projects_full_search( # noqa: PLR0915 + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, expected: HTTPStatus, - mock_catalog_api_get_services_for_user_in_product: MockerFixture, fake_project: ProjectDict, workspaces_clean_db: None, ): @@ -147,10 +150,10 @@ async def test_workspaces__list_projects_full_search( @pytest.mark.parametrize("user_role", [UserRole.USER]) async def test__list_projects_full_search_with_query_parameters( + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, - mock_catalog_api_get_services_for_user_in_product: MockerFixture, fake_project: ProjectDict, workspaces_clean_db: None, ): @@ -218,3 +221,220 @@ async def test__list_projects_full_search_with_query_parameters( # data, _ = await assert_status(resp, status.HTTP_200_OK) # assert len(data) == 1 # assert data[0]["uuid"] == project["uuid"] + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test__list_projects_full_search_with_type_filter( + mocked_dynamic_services_interface: dict[str, mock.MagicMock], + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, + fake_project: ProjectDict, + workspaces_clean_db: None, +): + """Test the list_projects_full_search endpoint with type query parameter.""" + assert client.app + + # Create a regular user project + user_project_data = deepcopy(fake_project) + user_project_data["name"] = "User Project Test" + user_project_created = await create_project( + client.app, + user_project_data, + user_id=logged_user["id"], + product_name="osparc", + ) + + # Create a template project + template_project_data = deepcopy(fake_project) + template_project_data["name"] = "Template Project Test" + template_project_created = await create_project( + client.app, + template_project_data, + user_id=logged_user["id"], + product_name="osparc", + as_template=True, + ) + + base_url = client.app.router["list_projects_full_search"].url_for() + + # Test: Filter by type="user" + url = base_url.with_query({"text": "Project Test", "type": "user"}) + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + user_project_uuids = [p["uuid"] for p in data] + assert user_project_created["uuid"] in user_project_uuids + assert template_project_created["uuid"] not in user_project_uuids + + # Test: Filter by type="template" + url = base_url.with_query({"text": "Project Test", "type": "template"}) + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + template_project_uuids = [p["uuid"] for p in data] + assert user_project_created["uuid"] not in template_project_uuids + assert template_project_created["uuid"] in template_project_uuids + + # Test: Filter by type="all" + url = base_url.with_query({"text": "Project Test", "type": "all"}) + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + all_project_uuids = [p["uuid"] for p in data] + assert user_project_created["uuid"] in all_project_uuids + assert template_project_created["uuid"] in all_project_uuids + + # Test: Default behavior (no type parameter) + url = base_url.with_query({"text": "Project Test"}) + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + default_project_uuids = [p["uuid"] for p in data] + assert user_project_created["uuid"] in default_project_uuids + assert template_project_created["uuid"] in default_project_uuids + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test__list_projects_full_search_with_template_type_hypertool_and_tutorial( + mocked_dynamic_services_interface: dict[str, mock.MagicMock], + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, + fake_project: ProjectDict, + workspaces_clean_db: None, +): + """Test the list_projects_full_search endpoint with template_type hypertool and tutorial.""" + assert client.app + + # Create a hypertool template project + hypertool_project_data = deepcopy(fake_project) + hypertool_project_data["name"] = "Hypertool Project Test" + hypertool_project_created = await create_project( + client.app, + hypertool_project_data, + user_id=logged_user["id"], + product_name="osparc", + as_template=True, + ) + # Patch the hypertool project to set template_type to "HYPERTOOL" + await projects_service_repository.patch_project( + client.app, + project_uuid=hypertool_project_created["uuid"], + new_partial_project_data={"template_type": "HYPERTOOL"}, + ) + # Create a tutorial template project + tutorial_project_data = deepcopy(fake_project) + tutorial_project_data["name"] = "Tutorial Project Test" + tutorial_project_created = await create_project( + client.app, + tutorial_project_data, + user_id=logged_user["id"], + product_name="osparc", + as_template=True, + ) + # Patch the tutorial project to set template_type to "TUTORIAL" + await projects_service_repository.patch_project( + client.app, + project_uuid=tutorial_project_created["uuid"], + new_partial_project_data={"template_type": "TUTORIAL"}, + ) + + base_url = client.app.router["list_projects_full_search"].url_for() + + # Test: Filter by template_type="hypertool" + url = base_url.with_query( + {"text": "Project Test", "type": "template", "template_type": "HYPERTOOL"} + ) + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + hypertool_uuids = [p["uuid"] for p in data] + assert hypertool_project_created["uuid"] in hypertool_uuids + assert tutorial_project_created["uuid"] not in hypertool_uuids + + # Test: Filter by template_type="tutorial" + url = base_url.with_query( + {"text": "Project Test", "type": "template", "template_type": "TUTORIAL"} + ) + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + tutorial_uuids = [p["uuid"] for p in data] + assert hypertool_project_created["uuid"] not in tutorial_uuids + assert tutorial_project_created["uuid"] in tutorial_uuids + + +@pytest.mark.parametrize("user_role", [UserRole.USER]) +async def test__list_projects_full_search_with_template_type_regular_and_none( + mocked_dynamic_services_interface: dict[str, mock.MagicMock], + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, + fake_project: ProjectDict, + workspaces_clean_db: None, +): + """Test the list_projects_full_search endpoint with template_type template and None.""" + assert client.app + + # Create a regular user project + user_project_data = deepcopy(fake_project) + user_project_data["name"] = "User Project Test" + user_project_created = await create_project( + client.app, + user_project_data, + user_id=logged_user["id"], + product_name="osparc", + ) + + # Create a regular template project + template_project_data = deepcopy(fake_project) + template_project_data["name"] = "Template Project Test" + template_project_created = await create_project( + client.app, + template_project_data, + user_id=logged_user["id"], + product_name="osparc", + as_template=True, + ) + + # Create a hypertool template project for comparison + hypertool_project_data = deepcopy(fake_project) + hypertool_project_data["name"] = "Hypertool Project Test" + hypertool_project_created = await create_project( + client.app, + hypertool_project_data, + user_id=logged_user["id"], + product_name="osparc", + as_template=True, + ) + # Patch the tutorial project to set template_type to "TUTORIAL" + await projects_service_repository.patch_project( + client.app, + project_uuid=hypertool_project_created["uuid"], + new_partial_project_data={"template_type": "HYPERTOOL"}, + ) + + base_url = client.app.router["list_projects_full_search"].url_for() + + # Test: Filter by template_type="template" --> Default type is "all" + url = base_url.with_query({"text": "Project Test", "template_type": "TEMPLATE"}) + resp = await client.get(f"{url}") + await assert_status(resp, status.HTTP_422_UNPROCESSABLE_ENTITY) + + # Test: Filter by type= template_type="null" + url = base_url.with_query( + {"text": "Project Test", "type": "all", "template_type": "null"} + ) + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + none_template_uuids = [p["uuid"] for p in data] + # NOTE: type "all" takes precedence over template_type "null" (practically is not used) + assert user_project_created["uuid"] in none_template_uuids + assert template_project_created["uuid"] in none_template_uuids + assert hypertool_project_created["uuid"] in none_template_uuids + + # Test: Filter by type="user" & template_type="None" + url = base_url.with_query( + {"text": "Project Test", "type": "user", "template_type": "None"} + ) + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + none_template_uuids = [p["uuid"] for p in data] + assert user_project_created["uuid"] in none_template_uuids + assert template_project_created["uuid"] not in none_template_uuids + assert hypertool_project_created["uuid"] not in none_template_uuids diff --git a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__moving_folders_between_workspaces.py b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__moving_folders_between_workspaces.py index ad01ef5b947c..e4f45506fe66 100644 --- a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__moving_folders_between_workspaces.py +++ b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__moving_folders_between_workspaces.py @@ -7,13 +7,13 @@ from copy import deepcopy from http.client import NO_CONTENT +from unittest import mock import pytest from aiohttp.test_utils import TestClient -from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import UserInfoDict from pytest_simcore.helpers.webserver_projects import create_project +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from simcore_service_webserver.db.models import UserRole from simcore_service_webserver.db.plugin import setup_db @@ -27,6 +27,7 @@ def user_role() -> UserRole: @pytest.fixture async def moving_folder_id( + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, logged_user: UserInfoDict, fake_project: ProjectDict, @@ -184,9 +185,9 @@ async def _move_folder_to_workspace_and_assert( async def test_moving_between_private_and_shared_workspaces( + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, logged_user: UserInfoDict, - mock_catalog_api_get_services_for_user_in_product: MockerFixture, fake_project: ProjectDict, moving_folder_id: str, workspaces_clean_db: None, diff --git a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__moving_projects_between_workspaces.py b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__moving_projects_between_workspaces.py index af37341eda56..23a1fe9fc801 100644 --- a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__moving_projects_between_workspaces.py +++ b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__moving_projects_between_workspaces.py @@ -7,18 +7,18 @@ from copy import deepcopy from http import HTTPStatus +from unittest import mock import pytest import sqlalchemy as sa from aiohttp.test_utils import TestClient -from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import UserInfoDict from pytest_simcore.helpers.webserver_parametrizations import ( ExpectedResponse, standard_role_response, ) from pytest_simcore.helpers.webserver_projects import create_project +from pytest_simcore.helpers.webserver_users import UserInfoDict from servicelib.aiohttp import status from simcore_postgres_database.models.projects_to_folders import projects_to_folders from simcore_service_webserver.db.models import UserRole @@ -31,7 +31,6 @@ async def test_moving_between_workspaces_user_role_permissions( logged_user: UserInfoDict, user_project: ProjectDict, expected: ExpectedResponse, - mock_catalog_api_get_services_for_user_in_product: MockerFixture, fake_project: ProjectDict, workspaces_clean_db: None, ): @@ -45,11 +44,11 @@ async def test_moving_between_workspaces_user_role_permissions( @pytest.mark.parametrize("user_role,expected", [(UserRole.USER, status.HTTP_200_OK)]) async def test_moving_between_private_and_shared_workspaces( + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, expected: HTTPStatus, - mock_catalog_api_get_services_for_user_in_product: MockerFixture, fake_project: ProjectDict, workspaces_clean_db: None, ): @@ -94,7 +93,7 @@ async def test_moving_between_private_and_shared_workspaces( base_url = client.app.router["get_project"].url_for(project_id=project["uuid"]) resp = await client.get(f"{base_url}") data, _ = await assert_status(resp, status.HTTP_200_OK) - assert data["workspaceId"] is None # <-- Workspace ID is None + assert data.get("workspaceId") is None # <-- Workspace ID is None # Move project from your private workspace to shared workspace base_url = client.app.router["move_project_to_workspace"].url_for( @@ -112,11 +111,11 @@ async def test_moving_between_private_and_shared_workspaces( @pytest.mark.parametrize("user_role,expected", [(UserRole.USER, status.HTTP_200_OK)]) async def test_moving_between_shared_and_shared_workspaces( + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, expected: HTTPStatus, - mock_catalog_api_get_services_for_user_in_product: MockerFixture, fake_project: ProjectDict, workspaces_clean_db: None, ): @@ -178,11 +177,11 @@ async def test_moving_between_shared_and_shared_workspaces( @pytest.mark.parametrize("user_role,expected", [(UserRole.USER, status.HTTP_200_OK)]) async def test_moving_between_workspaces_check_removed_from_folder( + mocked_dynamic_services_interface: dict[str, mock.MagicMock], client: TestClient, logged_user: UserInfoDict, user_project: ProjectDict, expected: HTTPStatus, - mock_catalog_api_get_services_for_user_in_product: MockerFixture, fake_project: ProjectDict, workspaces_clean_db: None, postgres_db: sa.engine.Engine, @@ -253,7 +252,7 @@ async def test_moving_between_workspaces_check_removed_from_folder( base_url = client.app.router["get_project"].url_for(project_id=project["uuid"]) resp = await client.get(f"{base_url}") data, _ = await assert_status(resp, status.HTTP_200_OK) - assert data["workspaceId"] is None # <-- Workspace ID is None + assert data.get("workspaceId") is None # <-- Workspace ID is None # Check project_to_folders DB is empty with postgres_db.connect() as con: diff --git a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces_groups.py b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces_groups.py index af19129d88ca..8dc7b59ab9ef 100644 --- a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces_groups.py +++ b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces_groups.py @@ -10,7 +10,7 @@ import pytest from aiohttp.test_utils import TestClient from pytest_simcore.helpers.assert_checks import assert_status -from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict +from pytest_simcore.helpers.webserver_users import NewUser, UserInfoDict from servicelib.aiohttp import status from simcore_service_webserver.db.models import UserRole from simcore_service_webserver.projects.models import ProjectDict diff --git a/services/web/server/tests/unit/with_dbs/conftest.py b/services/web/server/tests/unit/with_dbs/conftest.py index 710bf07215a5..f15dd5421a0b 100644 --- a/services/web/server/tests/unit/with_dbs/conftest.py +++ b/services/web/server/tests/unit/with_dbs/conftest.py @@ -49,11 +49,10 @@ from pytest_simcore.helpers.faker_factories import random_product from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict from pytest_simcore.helpers.typing_env import EnvVarsDict -from pytest_simcore.helpers.webserver_login import UserInfoDict from pytest_simcore.helpers.webserver_parametrizations import MockedStorageSubsystem from pytest_simcore.helpers.webserver_projects import NewProject +from pytest_simcore.helpers.webserver_users import UserInfoDict from redis import Redis -from servicelib.aiohttp.application_keys import APP_AIOPG_ENGINE_KEY from servicelib.common_aiopg_utils import DSN from servicelib.rabbitmq.rpc_interfaces.async_jobs.async_jobs import ( AsyncJobComposedResult, @@ -71,8 +70,11 @@ ) from simcore_service_webserver.application import create_application from simcore_service_webserver.application_settings_utils import AppConfigDict -from simcore_service_webserver.constants import INDEX_RESOURCE_NAME -from simcore_service_webserver.db.plugin import get_database_engine +from simcore_service_webserver.constants import ( + APP_AIOPG_ENGINE_KEY, + INDEX_RESOURCE_NAME, +) +from simcore_service_webserver.db.plugin import get_database_engine_legacy from simcore_service_webserver.projects.models import ProjectDict from simcore_service_webserver.projects.utils import NodesMap from simcore_service_webserver.statics._constants import ( @@ -205,7 +207,7 @@ async def _print_mail_to_stdout( ) -@pytest_asyncio.fixture(loop_scope="function") +@pytest_asyncio.fixture(loop_scope="function", scope="function") async def web_server( app_environment: EnvVarsDict, postgres_db: sa.engine.Engine, @@ -262,7 +264,7 @@ def osparc_product_api_base_url() -> str: @pytest.fixture async def default_product_name(client: TestClient) -> ProductName: assert client.app - async with get_database_engine(client.app).acquire() as conn: + async with get_database_engine_legacy(client.app).acquire() as conn: return await get_default_product_name(conn) @@ -420,14 +422,6 @@ async def _mock_result() -> None: return MockedStorageSubsystem(mock, mock1, mock2, mock3) -@pytest.fixture -def asyncpg_storage_system_mock(mocker): - return mocker.patch( - "simcore_service_webserver.login._login_repository_legacy.AsyncpgStorage.delete_user", - return_value="", - ) - - @pytest.fixture async def mocked_dynamic_services_interface( mocker: MockerFixture, diff --git a/services/web/server/tests/unit/with_dbs/docker-compose-devel.yml b/services/web/server/tests/unit/with_dbs/docker-compose-devel.yml index 15c36b031366..e8275c3afe77 100644 --- a/services/web/server/tests/unit/with_dbs/docker-compose-devel.yml +++ b/services/web/server/tests/unit/with_dbs/docker-compose-devel.yml @@ -63,7 +63,7 @@ services: "--loglevel", "verbose", "--databases", - "8", + "11", "--appendonly", "yes", "--requirepass", @@ -71,7 +71,7 @@ services: ] redis-commander: init: true - image: rediscommander/redis-commander:latest + image: ghcr.io/joeferner/redis-commander:latest restart: always environment: - >- @@ -82,13 +82,17 @@ services: scheduled_maintenance:redis:6379:3:${TEST_REDIS_PASSWORD}, user_notifications:redis:6379:4:${TEST_REDIS_PASSWORD}, announcements:redis:6379:5:${TEST_REDIS_PASSWORD}, - distributed_identifiers:redis:6379:6:${TEST_REDIS_PASSWORD}, - deferred_tasks:redis:6379:7:${TEST_REDIS_PASSWORD} + long_running_tasks:redis:6379:6:${TEST_REDIS_PASSWORD}, + deferred_tasks:redis:6379:7:${TEST_REDIS_PASSWORD}, + dynamic_services:redis:6379:8:${TEST_REDIS_PASSWORD}, + celery_tasks:redis:6379:9:${TEST_REDIS_PASSWORD}, + documents:redis:6379:10:${TEST_REDIS_PASSWORD} ports: - "18081:8081" + user: redis rabbit: - image: itisfoundation/rabbitmq:3.13.7-management + image: itisfoundation/rabbitmq:4.1.2-management init: true environment: - RABBITMQ_DEFAULT_USER=admin diff --git a/services/web/server/tests/unit/with_dbs/docker-compose.yml b/services/web/server/tests/unit/with_dbs/docker-compose.yml index f2cba772f463..6482ab57d4ea 100644 --- a/services/web/server/tests/unit/with_dbs/docker-compose.yml +++ b/services/web/server/tests/unit/with_dbs/docker-compose.yml @@ -46,12 +46,12 @@ services: "--loglevel", "verbose", "--databases", - "8", + "11", "--appendonly", "yes", "--requirepass", "${TEST_REDIS_PASSWORD}" ] rabbit: - image: itisfoundation/rabbitmq:3.13.7-management + image: itisfoundation/rabbitmq:4.1.2-management init: true diff --git a/tests/e2e-playwright/Makefile b/tests/e2e-playwright/Makefile index 4b684cfa8230..bc31b63ce953 100644 --- a/tests/e2e-playwright/Makefile +++ b/tests/e2e-playwright/Makefile @@ -139,9 +139,10 @@ SLEEPERS_INPUT_FILE := .e2e-playwright-sleepers-env.txt S4L_INPUT_FILE := .e2e-playwright-sim4life-env.txt JUPYTER_LAB_INPUT_FILE := .e2e-playwright-jupyterlab-env.txt CLASSIC_TIP_INPUT_FILE := .e2e-playwright-classictip-env.txt +RSM_INPUT_FILE := .e2e-playwright-rsm-env.txt # Prompt the user for input and store it into variables -$(SLEEPERS_INPUT_FILE) $(JUPYTER_LAB_INPUT_FILE) $(CLASSIC_TIP_INPUT_FILE) $(S4L_INPUT_FILE): +$(SLEEPERS_INPUT_FILE) $(JUPYTER_LAB_INPUT_FILE) $(CLASSIC_TIP_INPUT_FILE) $(S4L_INPUT_FILE) $(RSM_INPUT_FILE): @read -p "Enter your product URL: " PRODUCT_URL; \ read -p "Is the product billable [y/n]: " BILLABLE; \ read -p "Is the product lite [y/n]: " IS_LITE; \ @@ -189,6 +190,20 @@ $(SLEEPERS_INPUT_FILE) $(JUPYTER_LAB_INPUT_FILE) $(CLASSIC_TIP_INPUT_FILE) $(S4L elif [ "$@" = "$(SLEEPERS_INPUT_FILE)" ]; then \ read -p "Enter the number of sleepers: " NUM_SLEEPERS; \ echo "--num-sleepers=$$NUM_SLEEPERS" >> $@; \ + elif [ "$@" = "$(RSM_INPUT_FILE)" ]; then \ + read -p "Enter the service key (default to mmux-vite-app-sumo-write): " SERVICE_KEY; \ + if [ -z "$$SERVICE_KEY" ]; then \ + echo "No service key specified, using default."; \ + echo "--service-key=mmux-vite-app-sumo-write" >> $@; \ + else \ + echo "--service-key=$$SERVICE_KEY" >> $@; \ + fi; \ + read -p "Enter the service version (default to latest): " SERVICE_VERSION; \ + if [ -z "$$SERVICE_VERSION" ]; then \ + echo "No service version specified, using default."; \ + else \ + echo "--service-version=$$SERVICE_VERSION" >> $@; \ + fi; \ fi # Run the tests @@ -202,7 +217,10 @@ test-jupyterlab-anywhere: _check_venv_active $(JUPYTER_LAB_INPUT_FILE) ## run ju @$(call run_test, $(JUPYTER_LAB_INPUT_FILE), tests/jupyterlabs/test_jupyterlab.py) test-tip-anywhere: _check_venv_active $(CLASSIC_TIP_INPUT_FILE) ## run classic tip test and cache settings - $(call run_test, $(CLASSIC_TIP_INPUT_FILE), tests/tip/test_ti_plan.py) + @$(call run_test, $(CLASSIC_TIP_INPUT_FILE), tests/tip/test_ti_plan.py) + +test-response-surface-modeling-anywhere: _check_venv_active $(RSM_INPUT_FILE) ## run response surface modeling test and cache settings + @$(call run_test, $(RSM_INPUT_FILE), tests/metamodeling/test_response_surface_modeling.py) # Define the common test running function define run_test diff --git a/tests/e2e-playwright/requirements/_test.txt b/tests/e2e-playwright/requirements/_test.txt index 43eb1c9e8d01..cf94941b953f 100644 --- a/tests/e2e-playwright/requirements/_test.txt +++ b/tests/e2e-playwright/requirements/_test.txt @@ -30,11 +30,11 @@ faker==36.1.1 # via -r requirements/_test_wo_playwright.txt greenlet==3.1.1 # via playwright -h11==0.14.0 +h11==0.16.0 # via # -r requirements/_test_wo_playwright.txt # httpcore -httpcore==1.0.7 +httpcore==1.0.9 # via # -r requirements/_test_wo_playwright.txt # httpx @@ -51,7 +51,7 @@ iniconfig==2.0.0 # via # -r requirements/_test_wo_playwright.txt # pytest -jinja2==3.1.5 +jinja2==3.1.6 # via # -r requirements/_test_wo_playwright.txt # pytest-html @@ -70,15 +70,19 @@ pluggy==1.5.0 # via # -r requirements/_test_wo_playwright.txt # pytest -pydantic==2.10.6 +pydantic==2.11.7 # via -r requirements/_test_wo_playwright.txt -pydantic-core==2.27.2 +pydantic-core==2.33.2 # via # -r requirements/_test_wo_playwright.txt # pydantic pyee==12.1.1 # via playwright -pytest==8.3.5 +pygments==2.19.2 + # via + # -r requirements/_test_wo_playwright.txt + # pytest +pytest==8.4.1 # via # -r requirements/_test_wo_playwright.txt # pytest-base-url @@ -111,7 +115,7 @@ python-slugify==8.0.4 # via pytest-playwright pyyaml==6.0.2 # via -r requirements/_test_wo_playwright.txt -requests==2.32.3 +requests==2.32.4 # via # -r requirements/_test_wo_playwright.txt # docker @@ -136,18 +140,23 @@ types-python-dateutil==2.9.0.20241206 # via # -r requirements/_test_wo_playwright.txt # arrow -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -r requirements/_test_wo_playwright.txt # anyio # pydantic # pydantic-core # pyee + # typing-inspection +typing-inspection==0.4.1 + # via + # -r requirements/_test_wo_playwright.txt + # pydantic tzdata==2025.1 # via # -r requirements/_test_wo_playwright.txt # faker -urllib3==2.3.0 +urllib3==2.5.0 # via # -r requirements/_test_wo_playwright.txt # docker diff --git a/tests/e2e-playwright/requirements/_test_wo_playwright.txt b/tests/e2e-playwright/requirements/_test_wo_playwright.txt index 6bb18aa518fe..c1b02485018d 100644 --- a/tests/e2e-playwright/requirements/_test_wo_playwright.txt +++ b/tests/e2e-playwright/requirements/_test_wo_playwright.txt @@ -19,9 +19,9 @@ email-validator==2.2.0 # via pydantic faker==36.1.1 # via -r requirements/_test_wo_playwright.in -h11==0.14.0 +h11==0.16.0 # via httpcore -httpcore==1.0.7 +httpcore==1.0.9 # via httpx httpx==0.28.1 # via -r requirements/_test_wo_playwright.in @@ -33,7 +33,7 @@ idna==3.10 # requests iniconfig==2.0.0 # via pytest -jinja2==3.1.5 +jinja2==3.1.6 # via pytest-html markupsafe==3.0.2 # via jinja2 @@ -43,11 +43,13 @@ packaging==24.2 # pytest-sugar pluggy==1.5.0 # via pytest -pydantic==2.10.6 +pydantic==2.11.7 # via -r requirements/_test_wo_playwright.in -pydantic-core==2.27.2 +pydantic-core==2.33.2 # via pydantic -pytest==8.3.5 +pygments==2.19.2 + # via pytest +pytest==8.4.1 # via # pytest-html # pytest-instafail @@ -67,7 +69,7 @@ python-dateutil==2.9.0.post0 # via arrow pyyaml==6.0.2 # via -r requirements/_test_wo_playwright.in -requests==2.32.3 +requests==2.32.4 # via docker six==1.17.0 # via python-dateutil @@ -79,14 +81,17 @@ termcolor==2.5.0 # via pytest-sugar types-python-dateutil==2.9.0.20241206 # via arrow -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # anyio # pydantic # pydantic-core + # typing-inspection +typing-inspection==0.4.1 + # via pydantic tzdata==2025.1 # via faker -urllib3==2.3.0 +urllib3==2.5.0 # via # docker # requests diff --git a/tests/e2e-playwright/requirements/_tools.txt b/tests/e2e-playwright/requirements/_tools.txt index 853cda1d8ca0..7346e8591146 100644 --- a/tests/e2e-playwright/requirements/_tools.txt +++ b/tests/e2e-playwright/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.8 +click==8.2.1 # via # black # pip-tools @@ -26,9 +26,9 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via -r requirements/../../../requirements/devenv.txt -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via # black # mypy @@ -40,7 +40,9 @@ packaging==24.2 # black # build pathspec==0.12.1 - # via black + # via + # black + # mypy pip==25.0.1 # via pip-tools pip-tools==7.4.1 @@ -65,11 +67,11 @@ pyyaml==6.0.2 # pre-commit ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.2 +setuptools==80.9.0 # via pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_test.txt # mypy diff --git a/tests/e2e-playwright/tests/conftest.py b/tests/e2e-playwright/tests/conftest.py index dbafc9474705..ee86f70953cb 100644 --- a/tests/e2e-playwright/tests/conftest.py +++ b/tests/e2e-playwright/tests/conftest.py @@ -479,7 +479,7 @@ def create_new_project_and_delete( # noqa: C901, PLR0915 api_request_context: APIRequestContext, product_url: AnyUrl, ) -> Iterator[ - Callable[[tuple[RunningState], bool, str | None, str | None], dict[str, Any]] + Callable[[tuple[RunningState, ...], bool, str | None, str | None], dict[str, Any]] ]: """The first available service currently displayed in the dashboard will be opened NOTE: cannot be used multiple times or going back to dashboard will fail!! @@ -487,17 +487,13 @@ def create_new_project_and_delete( # noqa: C901, PLR0915 created_project_uuids = [] def _( # noqa: C901 - expected_states: tuple[RunningState], + expected_states: tuple[RunningState, ...], press_open: bool, template_id: str | None, service_version: str | None, ) -> dict[str, Any]: - assert ( - len(created_project_uuids) == 0 - ), "misuse of this fixture! only 1 study can be opened at a time. Otherwise please modify the fixture" with log_context( - logging.INFO, - f"Open project in {product_url=} as {is_product_billable=}", + logging.INFO, f"Open project in {product_url=} as {is_product_billable=}" ) as ctx: waiter = SocketIOProjectStateUpdatedWaiter(expected_states=expected_states) timeout = ( @@ -505,6 +501,16 @@ def _( # noqa: C901 if template_id is not None else _OPENING_NEW_EMPTY_PROJECT_MAX_WAIT_TIME ) + + # Enhanced context for better debugging when timeout occurs + operation_type = "template" if template_id is not None else "new project" + ctx.logger.info( + "Waiting for project to open: %s (timeout: %s seconds, expected_states: %s)", + operation_type, + (timeout + 10 * SECOND) / 1000, + expected_states, + ) + with log_in_and_out.expect_event( "framereceived", waiter, timeout=timeout + 10 * SECOND ): @@ -588,22 +594,29 @@ def wait_for_done(response): yield _ # go back to dashboard and wait for project to close - with ExitStack() as stack: - for project_uuid in created_project_uuids: - ctx = stack.enter_context( - log_context(logging.INFO, f"Wait for closed project {project_uuid=}") - ) - stack.enter_context( - log_in_and_out.expect_event( - "framereceived", - SocketIOProjectClosedWaiter(ctx.logger), - timeout=_PROJECT_CLOSING_TIMEOUT, - ) + with log_context(logging.INFO, "Go back to dashboard") as ctx1: + if page.get_by_test_id("dashboardBtn").is_visible(): + with ExitStack() as stack: + for project_uuid in created_project_uuids: + ctx = stack.enter_context( + log_context( + logging.INFO, f"Wait for closed project {project_uuid=}" + ) + ) + stack.enter_context( + log_in_and_out.expect_event( + "framereceived", + SocketIOProjectClosedWaiter(ctx.logger), + timeout=_PROJECT_CLOSING_TIMEOUT, + ) + ) + if created_project_uuids: + page.get_by_test_id("dashboardBtn").click() + page.get_by_test_id("confirmDashboardBtn").click() + else: + ctx1.logger.warning( + "Cannot go back to dashboard, 'dashboard' button is not visible, we are probably already there" ) - if created_project_uuids: - with log_context(logging.INFO, "Go back to dashboard"): - page.get_by_test_id("dashboardBtn").click() - page.get_by_test_id("confirmDashboardBtn").click() for project_uuid in created_project_uuids: with log_context( @@ -685,8 +698,13 @@ def create_project_from_new_button( ) -> Callable[[str], dict[str, Any]]: def _(plus_button_test_id: str) -> dict[str, Any]: start_study_from_plus_button(plus_button_test_id) - expected_states = (RunningState.UNKNOWN,) - return create_new_project_and_delete(expected_states, False, None, None) + expected_states = (RunningState.NOT_STARTED,) + return create_new_project_and_delete( + expected_states, + False, # noqa: FBT003 + None, + None, + ) return _ @@ -700,8 +718,13 @@ def create_project_from_template_dashboard( ) -> Callable[[str], dict[str, Any]]: def _(template_id: str) -> dict[str, Any]: find_and_click_template_in_dashboard(template_id) - expected_states = (RunningState.UNKNOWN,) - return create_new_project_and_delete(expected_states, True, template_id, None) + expected_states = (RunningState.NOT_STARTED,) + return create_new_project_and_delete( + expected_states, + True, # noqa: FBT003 + template_id, + None, + ) return _ @@ -722,11 +745,15 @@ def _( find_and_start_service_in_dashboard( service_type, service_name, service_key_prefix ) - expected_states = (RunningState.UNKNOWN,) + expected_states = (RunningState.NOT_STARTED,) if service_type is ServiceType.COMPUTATIONAL: expected_states = (RunningState.NOT_STARTED,) + # press_open=True, template_id=None, service_version=service_version return create_new_project_and_delete( - expected_states, True, None, service_version + expected_states, + True, + None, + service_version, ) return _ diff --git a/tests/e2e-playwright/tests/jupyterlabs/test_jupyterlab.py b/tests/e2e-playwright/tests/jupyterlabs/test_jupyterlab.py index 23b33a7ad8aa..c49e2e6c931b 100644 --- a/tests/e2e-playwright/tests/jupyterlabs/test_jupyterlab.py +++ b/tests/e2e-playwright/tests/jupyterlabs/test_jupyterlab.py @@ -154,7 +154,8 @@ def test_jupyterlab( expected_message_type="stdout", expected_message_contents="copied" ), timeout=_WAITING_TIME_FILE_CREATION_PER_GB_IN_TERMINAL - * max(int(large_file_size.to("GiB")), 1), + * max(int(large_file_size.to("GiB")), 1) + * 3, # avoids flakyness since timeout is deterimned based on size ): terminal.fill( f"dd if=/dev/urandom of=output.txt bs={large_file_block_size} count={blocks_count} iflag=fullblock" diff --git a/tests/e2e-playwright/tests/metamodeling/test_response_surface_modeling.py b/tests/e2e-playwright/tests/metamodeling/test_response_surface_modeling.py new file mode 100644 index 000000000000..f67b8b078dec --- /dev/null +++ b/tests/e2e-playwright/tests/metamodeling/test_response_surface_modeling.py @@ -0,0 +1,228 @@ +# pylint: disable=logging-fstring-interpolation +# pylint:disable=no-value-for-parameter +# pylint:disable=protected-access +# pylint:disable=redefined-outer-name +# pylint:disable=too-many-arguments +# pylint:disable=too-many-statements +# pylint:disable=unused-argument +# pylint:disable=unused-variable + +import json +import logging +import re +from collections.abc import Callable, Iterator +from typing import Any, Final + +import pytest +from playwright.sync_api import APIRequestContext, Page +from pydantic import AnyUrl +from pytest_simcore.helpers.logging_tools import log_context +from pytest_simcore.helpers.playwright import ( + MINUTE, + RobustWebSocket, + ServiceType, + wait_for_service_running, +) + +_WAITING_FOR_SERVICE_TO_START: Final[int] = 5 * MINUTE +_WAITING_FOR_SERVICE_TO_APPEAR: Final[int] = 2 * MINUTE +_DEFAULT_RESPONSE_TO_WAIT_FOR: Final[re.Pattern] = re.compile( + r"/flask/list_function_job_collections_for_functionid" +) + +_STUDY_FUNCTION_NAME: Final[str] = "playwright_test_study_for_rsm" +_FUNCTION_NAME: Final[str] = "playwright_test_function" + + +@pytest.fixture +def create_function_from_project( + api_request_context: APIRequestContext, + is_product_billable: bool, + product_url: AnyUrl, +) -> Iterator[Callable[[Page, str], dict[str, Any]]]: + created_function_uuids: list[str] = [] + + def _create_function_from_project( + page: Page, + project_uuid: str, + ) -> dict[str, Any]: + with log_context( + logging.INFO, + f"Convert {project_uuid=} / {_STUDY_FUNCTION_NAME} to a function", + ) as ctx: + with page.expect_response(re.compile(rf"/projects/{project_uuid}")): + page.get_by_test_id(f"studyBrowserListItem_{project_uuid}").click() + page.wait_for_timeout(2000) + page.get_by_text("create function").first.click() + page.wait_for_timeout(2000) + + with page.expect_response( + lambda response: re.compile(r"/functions").search(response.url) + is not None + and response.request.method == "POST" + ) as create_function_response: + page.get_by_test_id("create_function_page_btn").click() + assert ( + create_function_response.value.ok + ), f"Failed to create function: {create_function_response.value.status}" + function_data = create_function_response.value.json() + + ctx.logger.info( + "Created function: %s", f"{json.dumps(function_data['data'], indent=2)}" + ) + + page.keyboard.press("Escape") + created_function_uuids.append(function_data["data"]["uuid"]) + return function_data["data"] + + yield _create_function_from_project + + # cleanup the functions + for function_uuid in created_function_uuids: + with log_context( + logging.INFO, + f"Delete function with {function_uuid=} in {product_url=} as {is_product_billable=}", + ): + response = api_request_context.delete( + f"{product_url}v0/functions/{function_uuid}" + ) + assert ( + response.status == 204 + ), f"Unexpected error while deleting project: '{response.json()}'" + + +def test_response_surface_modeling( + page: Page, + create_project_from_service_dashboard: Callable[ + [ServiceType, str, str | None, str | None], dict[str, Any] + ], + log_in_and_out: RobustWebSocket, + service_key: str, + service_version: str | None, + product_url: AnyUrl, + is_service_legacy: bool, + create_function_from_project: Callable[[Page, str], dict[str, Any]], +): + # 1. create the initial study with jsonifier + with log_context(logging.INFO, "Create new study for function"): + jsonifier_project_data = create_project_from_service_dashboard( + ServiceType.COMPUTATIONAL, "jsonifier", None, service_version + ) + assert ( + "workbench" in jsonifier_project_data + ), "Expected workbench to be in project data!" + assert isinstance( + jsonifier_project_data["workbench"], dict + ), "Expected workbench to be a dict!" + node_ids: list[str] = list(jsonifier_project_data["workbench"]) + assert len(node_ids) == 1, "Expected 1 node in the workbench!" + + # select the jsonifier, it's the second one as the study has the same name + page.get_by_test_id("nodeTreeItem").filter(has_text="jsonifier").all()[ + 1 + ].click() + + # create the probe + with page.expect_response( + lambda response: re.compile( + rf"/projects/{jsonifier_project_data['uuid']}" + ).search(response.url) + is not None + and response.request.method == "PATCH" + ): + page.get_by_test_id("connect_probe_btn_number_3").click() + + # # create the parameter + page.get_by_test_id("connect_input_btn_number_1").click() + with page.expect_response( + lambda response: re.compile( + rf"/projects/{jsonifier_project_data['uuid']}" + ).search(response.url) + is not None + and response.request.method == "PATCH" + ): + page.get_by_text("new parameter").click() + + # rename the project to identify it + page.get_by_test_id("studyTitleRenamer").click() + with page.expect_response( + lambda response: re.compile( + rf"/projects/{jsonifier_project_data['uuid']}" + ).search(response.url) + is not None + and response.request.method == "PATCH" + ): + page.get_by_test_id("studyTitleRenamer").locator("input").fill( + _STUDY_FUNCTION_NAME + ) + + # 2. go back to dashboard + with ( + log_context(logging.INFO, "Go back to dashboard"), + page.expect_response(re.compile(r"/projects\?.+")) as list_projects_response, + ): + page.get_by_test_id("dashboardBtn").click() + page.get_by_test_id("confirmDashboardBtn").click() + assert ( + list_projects_response.value.ok + ), f"Failed to list projects: {list_projects_response.value.status}" + project_listing = list_projects_response.value.json() + assert "data" in project_listing + assert len(project_listing["data"]) > 0 + # find the project we just created, it's the first one + our_project = project_listing["data"][0] + assert ( + our_project["name"] == _STUDY_FUNCTION_NAME + ), f"Expected to find our project named {_STUDY_FUNCTION_NAME} in {project_listing}" + + # 3. convert it to a function + create_function_from_project(page, our_project["uuid"]) + + # 3. start a RSM with that function + + with log_context( + logging.INFO, + f"Waiting for {service_key} to be responsive (waiting for {_DEFAULT_RESPONSE_TO_WAIT_FOR})", + ): + project_data = create_project_from_service_dashboard( + ServiceType.DYNAMIC, service_key, None, service_version + ) + assert "workbench" in project_data, "Expected workbench to be in project data!" + assert isinstance( + project_data["workbench"], dict + ), "Expected workbench to be a dict!" + node_ids: list[str] = list(project_data["workbench"]) + assert len(node_ids) == 1, "Expected 1 node in the workbench!" + + wait_for_service_running( + page=page, + node_id=node_ids[0], + websocket=log_in_and_out, + timeout=_WAITING_FOR_SERVICE_TO_START, + press_start_button=False, + product_url=product_url, + is_service_legacy=is_service_legacy, + ) + + service_iframe = page.frame_locator("iframe") + with log_context(logging.INFO, "Waiting for the RSM to be ready..."): + service_iframe.get_by_role("grid").wait_for( + state="visible", timeout=_WAITING_FOR_SERVICE_TO_APPEAR + ) + + page.wait_for_timeout(10000) + + # # select the function + # service_iframe.get_by_role("gridcell", name=_FUNCTION_NAME).click() + + # # Find the first input field (textbox) in the iframe + # min_input_field = service_iframe.get_by_role("textbox").nth(0) + # min_input_field.fill("1") + # max_input_field = service_iframe.get_by_role("textbox").nth(1) + # max_input_field.fill("10") + + # # click on next + # service_iframe.get_by_role("button", name="Next").click() + + # # then we wait a long time + # page.wait_for_timeout(1 * MINUTE) diff --git a/tests/e2e-playwright/tests/sleepers/test_sleepers.py b/tests/e2e-playwright/tests/sleepers/test_sleepers.py index dfeab9cd0aef..5c6c081b43a3 100644 --- a/tests/e2e-playwright/tests/sleepers/test_sleepers.py +++ b/tests/e2e-playwright/tests/sleepers/test_sleepers.py @@ -170,6 +170,7 @@ def test_sleepers( RunningState.WAITING_FOR_CLUSTER, RunningState.WAITING_FOR_RESOURCES, RunningState.STARTED, + RunningState.SUCCESS, ), timeout_ms=_WAITING_FOR_PIPELINE_TO_CHANGE_STATE, ) @@ -182,6 +183,7 @@ def test_sleepers( expected_states=( RunningState.WAITING_FOR_RESOURCES, RunningState.STARTED, + RunningState.SUCCESS, ), timeout_ms=_WAITING_FOR_CLUSTER_MAX_WAITING_TIME, ) @@ -191,7 +193,10 @@ def test_sleepers( current_state, websocket=log_in_and_out, if_in_states=(RunningState.WAITING_FOR_RESOURCES,), - expected_states=(RunningState.STARTED,), + expected_states=( + RunningState.STARTED, + RunningState.SUCCESS, + ), timeout_ms=_WAITING_FOR_STARTED_MAX_WAITING_TIME, ) diff --git a/tests/e2e-playwright/tests/tip/conftest.py b/tests/e2e-playwright/tests/tip/conftest.py index 094c8b8f78e6..23c2765a93c1 100644 --- a/tests/e2e-playwright/tests/tip/conftest.py +++ b/tests/e2e-playwright/tests/tip/conftest.py @@ -32,7 +32,12 @@ def create_tip_plan_from_dashboard( ) -> Callable[[str], dict[str, Any]]: def _(plan_name_test_id: str) -> dict[str, Any]: find_and_start_tip_plan_in_dashboard(plan_name_test_id) - expected_states = (RunningState.UNKNOWN,) - return create_new_project_and_delete(expected_states, False, None, None) + expected_states = (RunningState.NOT_STARTED,) + return create_new_project_and_delete( + expected_states, + False, # noqa: FBT003 + None, + None, + ) return _ diff --git a/tests/e2e-playwright/tests/tip/test_ti_plan.py b/tests/e2e-playwright/tests/tip/test_ti_plan.py index 32ab5ebfe170..1d5d53ff50a9 100644 --- a/tests/e2e-playwright/tests/tip/test_ti_plan.py +++ b/tests/e2e-playwright/tests/tip/test_ti_plan.py @@ -21,7 +21,9 @@ MINUTE, SECOND, RobustWebSocket, + SocketIOWaitNodeForOutputs, app_mode_trigger_next_app, + decode_socketio_42_message, expected_service_running, wait_for_service_running, ) @@ -165,7 +167,7 @@ def test_classic_ti_plan( # noqa: PLR0915 # NOTE: Sometimes this iframe flicks and shows a white page. This wait will avoid it page.wait_for_timeout(_ELECTRODE_SELECTOR_FLICKERING_WAIT_TIME) - with log_context(logging.INFO, "Configure selector"): + with log_context(logging.INFO, "Configure selector", logger=ctx.logger): assert ( page.get_by_test_id("settingsForm_" + node_ids[0]).count() == 0 ), "service settings should not be visible" @@ -185,22 +187,20 @@ def test_classic_ti_plan( # noqa: PLR0915 electrode_id = "Electrode_" + selection[1] electrode_selector_iframe.get_by_test_id(group_id).click() electrode_selector_iframe.get_by_test_id(electrode_id).click() - # configuration done, push and wait for output - with ( - log_context(logging.INFO, "Check outputs"), - page.expect_request( - lambda r: bool( - re.search(_GET_NODE_OUTPUTS_REQUEST_PATTERN, r.url) - and r.method.upper() == "GET" - ) - ) as request_info, - ): - electrode_selector_iframe.get_by_test_id("FinishSetUp").click() - response = request_info.value.response() - assert response - assert response.ok, f"{response.json()}" - response_body = response.json() - ctx.logger.info("the following output was generated: %s", response_body) + # configuration done, push and wait for the 1 output + with log_context(logging.INFO, "Check outputs", logger=ctx.logger): + waiter = SocketIOWaitNodeForOutputs( + ctx.logger, expected_number_of_outputs=1, node_id=node_ids[0] + ) + with log_in_and_out.expect_event( + "framereceived", waiter + ) as frame_received_event: + electrode_selector_iframe.get_by_test_id("FinishSetUp").click() + socket_io_message = decode_socketio_42_message(frame_received_event.value) + ctx.logger.info( + "the following output was generated: %s", + socket_io_message.obj["data"]["outputs"]["output_1"]["path"], + ) with log_context( logging.INFO, "Classic TI step (2/%s)", expected_number_of_steps diff --git a/tests/e2e/docker-compose.yml b/tests/e2e/docker-compose.yml index 245eb07482b6..e319d6275966 100644 --- a/tests/e2e/docker-compose.yml +++ b/tests/e2e/docker-compose.yml @@ -1,6 +1,6 @@ services: registry: - image: registry:2 + image: registry:3 restart: always ports: - "5000:5000" diff --git a/tests/e2e/jest.config.js b/tests/e2e/jest.config.js index 8c91e5eea9f9..41fbe5a2a2c2 100644 --- a/tests/e2e/jest.config.js +++ b/tests/e2e/jest.config.js @@ -6,7 +6,7 @@ module.exports = { globals: { url: "http://127.0.0.1.nip.io:9081/", // For local testing, set your deployed url here apiVersion: 'v0/', - ourTimeout: 40000, + ourTimeout: 120000, }, maxWorkers: 1, maxConcurrency: 1 diff --git a/tests/e2e/portal-files/VTK_file.js b/tests/e2e/portal-files/VTK_file.js index 77a293052d72..62c5201ef796 100644 --- a/tests/e2e/portal-files/VTK_file.js +++ b/tests/e2e/portal-files/VTK_file.js @@ -38,7 +38,12 @@ async function runTutorial () { const workbenchData = utils.extractWorkbenchData(studyData["data"]); const nodeIdViewer = workbenchData["nodeIds"][1]; - await tutorial.waitForServices(workbenchData["studyId"], [nodeIdViewer], startTimeout); + await tutorial.waitForServices( + workbenchData["studyId"], + [nodeIdViewer], + startTimeout, + false + ); await utils.takeScreenshot(page, screenshotPrefix + 'service_started'); // Some time for setting up service's frontend diff --git a/tests/e2e/portal/2D_Plot.js b/tests/e2e/portal/2D_Plot.js index f7b311344e73..195e6a55e8e6 100644 --- a/tests/e2e/portal/2D_Plot.js +++ b/tests/e2e/portal/2D_Plot.js @@ -26,7 +26,11 @@ async function runTutorial () { const workbenchData = utils.extractWorkbenchData(studyData["data"]); const nodeIdViewer = workbenchData["nodeIds"][1]; - await tutorial.waitForServices(workbenchData["studyId"], [nodeIdViewer], startTimeout); + await tutorial.waitForServices( + workbenchData["studyId"], + [nodeIdViewer], + startTimeout + ); await tutorial.waitFor(5000, 'Some time for starting the service'); await utils.takeScreenshot(page, screenshotPrefix + 'service_started'); diff --git a/tests/e2e/portal/3D_Anatomical.js b/tests/e2e/portal/3D_Anatomical.js index d2f933bf9124..bd495358ab9a 100644 --- a/tests/e2e/portal/3D_Anatomical.js +++ b/tests/e2e/portal/3D_Anatomical.js @@ -25,18 +25,29 @@ async function runTutorial () { const studyData = await tutorial.openStudyLink(); const workbenchData = utils.extractWorkbenchData(studyData["data"]); - await tutorial.waitForServices(workbenchData["studyId"], [workbenchData["nodeIds"][1]], startTimeout); + const vtkNodeId = workbenchData["nodeIds"][1]; + await tutorial.waitForServices( + workbenchData["studyId"], + [vtkNodeId], + startTimeout, + false + ); await tutorial.waitFor(10000, 'Some time for starting the service'); await utils.takeScreenshot(page, screenshotPrefix + 'service_started'); - // This study opens in fullscreen mode - await tutorial.restoreIFrame(); - - const outFiles = [ - "data.zip" + const iframe = await tutorial.getIframe(vtkNodeId); + const entitiesListed = [ + "Vein.vtk", + "Artery.vtk", + "Bones.e", ]; - await tutorial.checkNodeOutputs(1, outFiles); + for (const text of entitiesListed) { + const found = await utils.waitForLabelText(iframe, text); + if (!found) { + throw new Error(`Text "${text}" not visible on the page within timeout.`); + } + } } catch(err) { await tutorial.setTutorialFailed(); diff --git a/tests/e2e/portal/3D_EM.js b/tests/e2e/portal/3D_EM.js index 0934baf30012..8473c059f87a 100644 --- a/tests/e2e/portal/3D_EM.js +++ b/tests/e2e/portal/3D_EM.js @@ -25,18 +25,28 @@ async function runTutorial () { const studyData = await tutorial.openStudyLink(); const workbenchData = utils.extractWorkbenchData(studyData["data"]); - await tutorial.waitForServices(workbenchData["studyId"], [workbenchData["nodeIds"][2]], startTimeout); + const vtkNodeId = workbenchData["nodeIds"][2]; + await tutorial.waitForServices( + workbenchData["studyId"], + [vtkNodeId], + startTimeout, + false + ); await tutorial.waitFor(10000, 'Some time for starting the service'); await utils.takeScreenshot(page, screenshotPrefix + 'service_started'); - // This study opens in fullscreen mode - await tutorial.restoreIFrame(); - - const outFiles = [ - "data.zip" + const iframe = await tutorial.getIframe(vtkNodeId); + const entitiesListed = [ + "EM_02mm.vtk", + "CellDatatoPointData1", ]; - await tutorial.checkNodeOutputs(2, outFiles); + for (const text of entitiesListed) { + const found = await utils.waitForLabelText(iframe, text); + if (!found) { + throw new Error(`Text "${text}" not visible on the page within timeout.`); + } + } } catch(err) { await tutorial.setTutorialFailed(); diff --git a/tests/e2e/portal/BIOS_VNS_Calibrator.js b/tests/e2e/portal/BIOS_VNS_Calibrator.js index db0aaabf943f..e88be891749c 100644 --- a/tests/e2e/portal/BIOS_VNS_Calibrator.js +++ b/tests/e2e/portal/BIOS_VNS_Calibrator.js @@ -27,7 +27,8 @@ async function runTutorial () { const workbenchData = utils.extractWorkbenchData(studyData["data"]); console.log("Workbench Data:", workbenchData); const BIOSIdViewer = workbenchData["nodeIds"][0]; - await tutorial.waitForServices(workbenchData["studyId"], + await tutorial.waitForServices( + workbenchData["studyId"], [BIOSIdViewer], startTimeout, false diff --git a/tests/e2e/portal/Bornstein.js b/tests/e2e/portal/Bornstein.js index fa823b0382e4..deab2b74bc78 100644 --- a/tests/e2e/portal/Bornstein.js +++ b/tests/e2e/portal/Bornstein.js @@ -25,7 +25,11 @@ async function runTutorial () { const studyData = await tutorial.openStudyLink(); const workbenchData = utils.extractWorkbenchData(studyData["data"]); - await tutorial.waitForServices(workbenchData["studyId"], [workbenchData["nodeIds"][0]], startTimeout); + await tutorial.waitForServices( + workbenchData["studyId"], + [workbenchData["nodeIds"][0]], + startTimeout + ); await tutorial.waitFor(60000, 'Some time for starting the service'); await utils.takeScreenshot(page, screenshotPrefix + 'service_started'); diff --git a/tests/e2e/portal/Mattward.js b/tests/e2e/portal/Mattward.js index c6cde9101dcf..78fa70f3203b 100644 --- a/tests/e2e/portal/Mattward.js +++ b/tests/e2e/portal/Mattward.js @@ -25,7 +25,11 @@ async function runTutorial () { const studyData = await tutorial.openStudyLink(); const workbenchData = utils.extractWorkbenchData(studyData["data"]); - await tutorial.waitForServices(workbenchData["studyId"], [workbenchData["nodeIds"][0]], startTimeout); + await tutorial.waitForServices( + workbenchData["studyId"], + [workbenchData["nodeIds"][0]], + startTimeout + ); await tutorial.waitFor(20000, 'Some time for starting the service'); await utils.takeScreenshot(page, screenshotPrefix + 'service_started'); diff --git a/tests/e2e/requirements/requirements.txt b/tests/e2e/requirements/requirements.txt index c5473cc73055..4b615661c46d 100644 --- a/tests/e2e/requirements/requirements.txt +++ b/tests/e2e/requirements/requirements.txt @@ -12,11 +12,11 @@ pyyaml==6.0.2 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/requirements.in -requests==2.32.3 +requests==2.32.4 # via docker tenacity==9.0.0 # via -r requirements/requirements.in -urllib3==2.3.0 +urllib3==2.5.0 # via # -c requirements/../../../requirements/constraints.txt # docker diff --git a/tests/e2e/tests/startupCalls.js b/tests/e2e/tests/startupCalls.js index ae4c642c8557..ab5822033236 100644 --- a/tests/e2e/tests/startupCalls.js +++ b/tests/e2e/tests/startupCalls.js @@ -47,7 +47,7 @@ module.exports = { await auto.register(page, user, pass); console.log("Registered"); - await page.waitFor(10000); + await page.waitFor(60000); }, ourTimeout); afterAll(async () => { diff --git a/tests/e2e/tutorials/tutorialBase.js b/tests/e2e/tutorials/tutorialBase.js index 4739f1f7c330..0c6859a1829b 100644 --- a/tests/e2e/tutorials/tutorialBase.js +++ b/tests/e2e/tutorials/tutorialBase.js @@ -337,6 +337,7 @@ class TutorialBase { return appModeButtonIds; } + // the waitForConnected only works for old dynamic services async waitForServices(studyId, nodeIds, timeout = 40000, waitForConnected = true) { console.log("waitForServices timeout:", timeout); if (nodeIds.length < 1) { diff --git a/tests/e2e/utils/utils.js b/tests/e2e/utils/utils.js index 432264196151..b328da6f2402 100644 --- a/tests/e2e/utils/utils.js +++ b/tests/e2e/utils/utils.js @@ -469,8 +469,8 @@ async function isStudyUnlocked(page, studyId) { return false; } - if (resp !== null && "locked" in resp && "value" in resp["locked"]) { - const studyLocked = resp["locked"]["value"]; + if (resp !== null && "shareState" in resp && "value" in resp["shareState"]) { + const studyLocked = resp["shareState"]["locked"]; console.log("Study Lock Status:", studyId, studyLocked); return !studyLocked; } @@ -634,6 +634,31 @@ async function getButtonsWithText(page, text) { return buttons; } +async function waitForLabelText(page, text, timeout = 10000) { + console.log("Waiting for label text:", text); + try { + await page.waitForFunction( + (text) => { + return [...document.body.querySelectorAll('*')].some(el => { + if (typeof el.innerText !== 'string') return false; + + const lines = el.innerText.split('\n').map(line => line.trim()); + return lines.some(line => + line.includes(text) && + !!(el.offsetWidth || el.offsetHeight || el.getClientRects().length) + ); + }); + }, + { timeout }, + text + ); + return true; + } catch (err) { + console.error("waitForLabelText failed:", err); + return false; + } +} + module.exports = { makeRequest, @@ -671,4 +696,5 @@ module.exports = { isElementVisible, clickLoggerTitle, getButtonsWithText, + waitForLabelText, } diff --git a/tests/environment-setup/requirements/requirements.txt b/tests/environment-setup/requirements/requirements.txt index be1f58f4078c..66ab35d23fbf 100644 --- a/tests/environment-setup/requirements/requirements.txt +++ b/tests/environment-setup/requirements/requirements.txt @@ -8,7 +8,7 @@ packaging==24.2 # pytest-sugar pluggy==1.5.0 # via pytest -pydantic==2.10.6 +pydantic==2.11.7 # via # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt @@ -20,15 +20,17 @@ pydantic==2.10.6 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/requirements.in -pydantic-core==2.27.2 +pydantic-core==2.33.2 # via pydantic -pytest==8.3.5 +pygments==2.19.2 + # via pytest +pytest==8.4.1 # via # -r requirements/requirements.in # pytest-asyncio # pytest-instafail # pytest-sugar -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via -r requirements/requirements.in pytest-instafail==0.5.0 # via -r requirements/requirements.in @@ -50,7 +52,10 @@ pyyaml==6.0.2 # -r requirements/requirements.in termcolor==2.5.0 # via pytest-sugar -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # pydantic # pydantic-core + # typing-inspection +typing-inspection==0.4.1 + # via pydantic diff --git a/tests/environment-setup/test_used_python.py b/tests/environment-setup/test_used_python.py index fe11f3bd6af9..fd80a0642fef 100644 --- a/tests/environment-setup/test_used_python.py +++ b/tests/environment-setup/test_used_python.py @@ -51,25 +51,6 @@ def expected_python_version(osparc_simcore_root_dir: Path) -> tuple[int, ...]: return to_version(py_version) -@pytest.fixture(scope="session") -def expected_pip_version(osparc_simcore_root_dir: Path) -> str: - version = None - ref_script = osparc_simcore_root_dir / "ci/helpers/ensure_python_pip.bash" - - found = re.search(r"PIP_VERSION=([\d\.]+)", ref_script.read_text()) - assert found - version = found.group(1) - - print( - str(ref_script.relative_to(osparc_simcore_root_dir)), - "->", - version, - ) - assert version - - return version - - PathVersionTuple: TypeAlias = tuple[Path, str] @@ -135,20 +116,6 @@ def test_running_python_version(expected_python_version: tuple[int, ...]): ), f"Expected python {to_str(tuple(sys.version_info))} installed, got {to_str(expected_python_version)}" -def test_all_images_have_the_same_pip_version( - expected_pip_version: str, pip_in_dockerfiles: list[PathVersionTuple] -): - for dockerfile, pip_version in pip_in_dockerfiles: - if dockerfile.parent.name in FROZEN_SERVICES: - print( - "Skipping check on {dockefile} since this service/package development was froozen " - ) - else: - assert ( - pip_version == expected_pip_version - ), f"Expected pip {expected_pip_version} in {dockerfile}, got {pip_version}" - - def test_tooling_pre_commit_config( osparc_simcore_root_dir: Path, expected_python_version: tuple[int, ...] ): diff --git a/tests/performance/locustfiles/deployment_max_rps_single_endpoint.py b/tests/performance/locustfiles/deployment_max_rps_single_endpoint.py index 0b798b6eaab5..24c578b7bd93 100644 --- a/tests/performance/locustfiles/deployment_max_rps_single_endpoint.py +++ b/tests/performance/locustfiles/deployment_max_rps_single_endpoint.py @@ -9,6 +9,10 @@ # +import json +from collections.abc import Callable + +import jsf from common.base_user import OsparcWebUserBase from locust import events, task from locust.argument_parser import LocustArgumentParser @@ -23,9 +27,49 @@ def _(parser: LocustArgumentParser) -> None: default="/", help="The endpoint to test (e.g., /v0/health)", ) + parser.add_argument( + "--http-method", + type=str, + default="GET", + help="The HTTP method to test ('GET', 'POST', 'PUT', 'PATCH' or 'DELETE')", + ) + parser.add_argument( + "--body", + type=str, + default="", + help="Optional HTTP body as json string", + ) + parser.add_argument( + "--body-json-schema", + type=str, + default="", + help="Optional JSON schema for the request body. If specified, the request data will be randomly generated from this schema.", + ) + parser.add_argument( + "--headers", + type=str, + default="", + help="Optional HTTP headers as json string", + ) class WebApiUser(OsparcWebUserBase): @task - def get_endpoint(self) -> None: - self.authenticated_get(self.environment.parsed_options.endpoint) + def call_endpoint(self) -> None: + http_method = self.environment.parsed_options.http_method.lower() + method = getattr(self, f"authenticated_{http_method}") + if not isinstance(method, Callable): + msg = f"Unsupported HTTP method: {http_method}" + raise TypeError(msg) + + kwargs = {} + if len(self.environment.parsed_options.body) > 0: + kwargs["json"] = json.loads(self.environment.parsed_options.body) + if len(self.environment.parsed_options.body_json_schema) > 0: + faker = jsf.JSF( + json.loads(self.environment.parsed_options.body_json_schema) + ) + kwargs["json"] = faker.generate() + if len(self.environment.parsed_options.headers) > 0: + kwargs["headers"] = json.loads(self.environment.parsed_options.headers) + method(self.environment.parsed_options.endpoint, **kwargs) diff --git a/tests/performance/locustfiles/functions/function_map_load_test.py b/tests/performance/locustfiles/functions/function_map_load_test.py new file mode 100644 index 000000000000..6f556d36ea6b --- /dev/null +++ b/tests/performance/locustfiles/functions/function_map_load_test.py @@ -0,0 +1,114 @@ +# +# SEE https://docs.locust.io/en/stable/quickstart.html +# +# This script allows testing running a function via the map endpoint +# + + +import json +import random +from datetime import timedelta +from typing import Final +from uuid import UUID + +import jsf +from common.base_user import OsparcWebUserBase +from locust import events, task +from locust.argument_parser import LocustArgumentParser +from tenacity import ( + Retrying, + retry_if_exception_type, + stop_after_delay, + wait_exponential, +) + +_MAX_NJOBS: Final[int] = 50 +_REQUEST_TIMEOUT: Final[int] = 10 * 60 # 10 minutes request timeout for map endpoint + + +# Register the custom argument with Locust's parser +@events.init_command_line_parser.add_listener +def _(parser: LocustArgumentParser) -> None: + parser.add_argument( + "--function-uuid", + type=UUID, + default=None, + help="The function UUID to test", + ) + parser.add_argument( + "--function-input-json-schema", + type=str, + default=None, + help="JSON schema for the function job inputs", + ) + parser.add_argument( + "--max-poll-time-seconds", + type=int, + default=60, + help="Maximum time to wait for the function job collection to complete", + ) + parser.add_argument( + "--n-jobs", + type=int, + default=None, + help=f"Number of jobs to run via map-endpoint. If not set, a random number between 0 and {_MAX_NJOBS} is selected", + ) + + +class WebApiUser(OsparcWebUserBase): + network_timeout = _REQUEST_TIMEOUT + connection_timeout = _REQUEST_TIMEOUT + + @task + def map_function(self) -> None: + + function_uuid = self.environment.parsed_options.function_uuid + if function_uuid is None: + raise ValueError("function-uuid argument is required") + if self.environment.parsed_options.function_input_json_schema is None: + raise ValueError("function-input-json-schema argument is required") + job_input_schema = json.loads( + self.environment.parsed_options.function_input_json_schema + ) + max_poll_time = timedelta( + seconds=self.environment.parsed_options.max_poll_time_seconds + ) + n_jobs = ( + int(self.environment.parsed_options.n_jobs) + if self.environment.parsed_options.n_jobs is not None + else random.randint(1, _MAX_NJOBS) + ) + + # map function + job_input_faker = jsf.JSF(job_input_schema) + response = self.authenticated_post( + url=f"/v0/functions/{function_uuid}:map", + json=[job_input_faker.generate() for _ in range(n_jobs)], + headers={ + "x-simcore-parent-project-uuid": "null", + "x-simcore-parent-node-id": "null", + }, + name="/v0/functions/[function_uuid]:map", + ) + response.raise_for_status() + job_collection_uuid = response.json().get("uid") + + # wait for the job to complete + for attempt in Retrying( + stop=stop_after_delay(max_delay=max_poll_time), + wait=wait_exponential(multiplier=1, min=1, max=10), + reraise=True, + retry=retry_if_exception_type(ValueError), + ): + with attempt: + job_status_response = self.authenticated_get( + f"/v0/function_job_collections/{job_collection_uuid}/status", + name="/v0/function_job_collections/[job_collection_uuid]/status", + ) + job_status_response.raise_for_status() + all_job_statuses = job_status_response.json().get("status") + assert isinstance(all_job_statuses, list) + if any(status != "SUCCESS" for status in all_job_statuses): + raise ValueError( + f"Function job ({job_collection_uuid=}) for function ({function_uuid=}) returned {all_job_statuses=}" + ) diff --git a/tests/performance/locustfiles/functions/function_run_load_test.py b/tests/performance/locustfiles/functions/function_run_load_test.py new file mode 100644 index 000000000000..b86c1de7e32b --- /dev/null +++ b/tests/performance/locustfiles/functions/function_run_load_test.py @@ -0,0 +1,94 @@ +# +# SEE https://docs.locust.io/en/stable/quickstart.html +# +# This script allows testing running a function via the map endpoint +# + + +import json +from datetime import timedelta +from uuid import UUID + +import jsf +from common.base_user import OsparcWebUserBase +from locust import events, task +from locust.argument_parser import LocustArgumentParser +from tenacity import ( + Retrying, + retry_if_exception_type, + stop_after_delay, + wait_exponential, +) + + +# Register the custom argument with Locust's parser +@events.init_command_line_parser.add_listener +def _(parser: LocustArgumentParser) -> None: + parser.add_argument( + "--function-uuid", + type=UUID, + default=None, + help="The function UUID to test", + ) + parser.add_argument( + "--function-input-json-schema", + type=str, + default=None, + help="JSON schema for the function job inputs", + ) + parser.add_argument( + "--max-poll-time-seconds", + type=int, + default=60, + help="Maximum time to wait for the function job to complete", + ) + + +class WebApiUser(OsparcWebUserBase): + @task + def run_function(self) -> None: + + function_uuid = self.environment.parsed_options.function_uuid + if function_uuid is None: + raise ValueError("function-uuid argument is required") + if self.environment.parsed_options.function_input_json_schema is None: + raise ValueError("function-input-json-schema argument is required") + job_input_schema = json.loads( + self.environment.parsed_options.function_input_json_schema + ) + max_poll_time = timedelta( + seconds=self.environment.parsed_options.max_poll_time_seconds + ) + + # run function + job_input_faker = jsf.JSF(job_input_schema) + response = self.authenticated_post( + url=f"/v0/functions/{function_uuid}:run", + json=job_input_faker.generate(), + headers={ + "x-simcore-parent-project-uuid": "null", + "x-simcore-parent-node-id": "null", + }, + name="/v0/functions/[function_uuid]:run", + ) + response.raise_for_status() + job_uuid = response.json().get("uid") + + # wait for the job to complete + for attempt in Retrying( + stop=stop_after_delay(max_delay=max_poll_time), + wait=wait_exponential(multiplier=1, min=1, max=10), + reraise=True, + retry=retry_if_exception_type(ValueError), + ): + with attempt: + job_status_response = self.authenticated_get( + f"/v0/function_jobs/{job_uuid}/status", + name="/v0/function_jobs/[job_uuid]/status", + ) + job_status_response.raise_for_status() + status = job_status_response.json().get("status") + if status != "SUCCESS": + raise ValueError( + f"Function job ({job_uuid=}) for function ({function_uuid=}) returned {status=}" + ) diff --git a/tests/performance/requirements/_test.in b/tests/performance/requirements/_test.in index c2ffb26a7f91..2dd40a77bd31 100644 --- a/tests/performance/requirements/_test.in +++ b/tests/performance/requirements/_test.in @@ -1,3 +1,4 @@ +jsf locust locust-plugins[dashboards] pydantic diff --git a/tests/performance/requirements/_test.txt b/tests/performance/requirements/_test.txt index 03a956a7cd77..d0cd5d6b9ec8 100644 --- a/tests/performance/requirements/_test.txt +++ b/tests/performance/requirements/_test.txt @@ -1,5 +1,9 @@ annotated-types==0.7.0 # via pydantic +attrs==25.3.0 + # via + # jsonschema + # referencing bidict==0.23.1 # via python-socketio blinker==1.9.0 @@ -18,12 +22,14 @@ configargparse==1.7.1 # via # locust # locust-cloud +faker==37.6.0 + # via jsf flask==3.1.1 # via # flask-cors # flask-login # locust -flask-cors==6.0.0 +flask-cors==6.0.1 # via locust flask-login==0.6.3 # via locust @@ -44,6 +50,12 @@ itsdangerous==2.2.0 # via flask jinja2==3.1.6 # via flask +jsf==0.11.2 + # via -r requirements/_test.in +jsonschema==4.25.1 + # via jsf +jsonschema-specifications==2025.4.1 + # via jsonschema locust==2.37.5 # via # -r requirements/_test.in @@ -67,13 +79,14 @@ psycogreen==1.0.2 # via locust-plugins psycopg2-binary==2.9.10 # via locust-plugins -pydantic==2.11.5 +pydantic==2.11.7 # via # -r requirements/_test.in + # jsf # pydantic-settings pydantic-core==2.33.2 # via pydantic -pydantic-settings==2.9.1 +pydantic-settings==2.10.1 # via -r requirements/_test.in python-dotenv==1.1.0 # via pydantic-settings @@ -83,10 +96,21 @@ python-socketio==5.13.0 # via locust-cloud pyzmq==26.4.0 # via locust -requests==2.32.3 +referencing==0.36.2 + # via + # jsonschema + # jsonschema-specifications +requests==2.32.4 # via # locust # python-socketio + # smart-open +rpds-py==0.27.1 + # via + # jsonschema + # referencing +rstr==3.2.2 + # via jsf setuptools==80.9.0 # via # locust @@ -94,19 +118,25 @@ setuptools==80.9.0 # zope-interface simple-websocket==1.1.0 # via python-engineio +smart-open==7.3.0.post1 + # via jsf tenacity==9.1.2 # via -r requirements/_test.in -typing-extensions==4.13.2 +typing-extensions==4.14.1 # via + # jsf # locust-plugins # pydantic # pydantic-core + # referencing # typing-inspection typing-inspection==0.4.1 # via # pydantic # pydantic-settings -urllib3==2.4.0 +tzdata==2025.2 + # via faker +urllib3==2.5.0 # via # geventhttpclient # requests @@ -118,6 +148,8 @@ werkzeug==3.1.3 # flask-cors # flask-login # locust +wrapt==1.17.3 + # via smart-open wsproto==1.2.0 # via simple-websocket zope-event==5.0 diff --git a/tests/performance/requirements/_tools.txt b/tests/performance/requirements/_tools.txt index 096d2a1e3f9d..8192b5cde099 100644 --- a/tests/performance/requirements/_tools.txt +++ b/tests/performance/requirements/_tools.txt @@ -27,7 +27,7 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via -r requirements/../../../requirements/devenv.txt mypy-extensions==1.1.0 # via @@ -40,7 +40,9 @@ packaging==25.0 # black # build pathspec==0.12.1 - # via black + # via + # black + # mypy pip==25.1.1 # via pip-tools pip-tools==7.4.1 @@ -71,7 +73,7 @@ setuptools==80.9.0 # pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.13.2 +typing-extensions==4.14.1 # via # -c requirements/_test.txt # mypy diff --git a/tests/public-api/conftest.py b/tests/public-api/conftest.py index 508b4c59a8a8..23f495ccc882 100644 --- a/tests/public-api/conftest.py +++ b/tests/public-api/conftest.py @@ -8,8 +8,9 @@ import logging import os import time -from collections.abc import Callable, Iterator +from collections.abc import Awaitable, Callable, Iterator from pprint import pformat +from typing import Any import httpx import osparc @@ -157,8 +158,10 @@ def registered_user( @pytest.fixture(scope="module") -def services_registry( - docker_registry_image_injector: Callable, +async def services_registry( + docker_registry_image_injector: Callable[ + [str, str, str | None], Awaitable[dict[str, Any]] + ], registered_user: RegisteredUserDict, env_vars_for_docker_compose: dict[str, str], ) -> dict[ServiceNameStr, ServiceInfoDict]: @@ -168,10 +171,10 @@ def services_registry( # user_email = registered_user["email"] - sleeper_service = docker_registry_image_injector( - source_image_repo="itisfoundation/sleeper", - source_image_tag="2.1.1", - owner_email=user_email, + sleeper_service = await docker_registry_image_injector( + "itisfoundation/sleeper", + "2.1.1", + user_email, ) assert sleeper_service["image"]["tag"] == "2.1.1" diff --git a/tests/public-api/requirements/_base.txt b/tests/public-api/requirements/_base.txt index 79d2ca83c91f..dd082852acac 100644 --- a/tests/public-api/requirements/_base.txt +++ b/tests/public-api/requirements/_base.txt @@ -9,11 +9,11 @@ certifi==2025.1.31 # httpcore # httpx # osparc-client -click==8.1.8 +click==8.2.1 # via typer -h11==0.14.0 +h11==0.16.0 # via httpcore -httpcore==1.0.7 +httpcore==1.0.9 # via httpx httpx==0.28.1 # via @@ -41,7 +41,7 @@ osparc-client==0.8.3 # via osparc packaging==24.2 # via osparc -pydantic==2.10.6 +pydantic==2.11.7 # via # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt @@ -50,9 +50,9 @@ pydantic==2.10.6 # osparc # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.2 +pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.10.2 +pydantic-extra-types==2.10.5 # via -r requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/_base.in pydantic-settings==2.7.0 # via @@ -60,13 +60,13 @@ pydantic-settings==2.7.0 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/settings-library/requirements/_base.in # osparc -pygments==2.19.1 +pygments==2.19.2 # via rich python-dateutil==2.9.0.post0 # via osparc-client python-dotenv==1.0.1 # via pydantic-settings -rich==13.9.4 +rich==14.1.0 # via # -r requirements/../../../packages/settings-library/requirements/_base.in # typer @@ -82,16 +82,19 @@ tenacity==9.0.0 # via osparc tqdm==4.67.1 # via osparc -typer==0.15.2 +typer==0.16.1 # via -r requirements/../../../packages/settings-library/requirements/_base.in -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # anyio # pydantic # pydantic-core # pydantic-extra-types # typer -urllib3==2.3.0 + # typing-inspection +typing-inspection==0.4.1 + # via pydantic +urllib3==2.5.0 # via # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt diff --git a/tests/public-api/requirements/_test.txt b/tests/public-api/requirements/_test.txt index 05b1737e38dd..8d5eae692bfd 100644 --- a/tests/public-api/requirements/_test.txt +++ b/tests/public-api/requirements/_test.txt @@ -2,7 +2,7 @@ aiodocker==0.24.0 # via -r requirements/_test.in aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../requirements/constraints.txt # -r requirements/_test.in @@ -32,9 +32,9 @@ frozenlist==1.5.0 # via # aiohttp # aiosignal -h11==0.14.0 +h11==0.16.0 # via httpcore -httpcore==1.0.7 +httpcore==1.0.9 # via httpx httpx==0.28.1 # via @@ -64,11 +64,13 @@ propcache==0.3.0 # via # aiohttp # yarl -pytest==8.3.5 +pygments==2.19.2 + # via pytest +pytest==8.4.1 # via # -r requirements/_test.in # pytest-asyncio -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via -r requirements/_test.in python-dotenv==1.0.1 # via -r requirements/_test.in @@ -81,7 +83,7 @@ referencing==0.35.1 # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications -requests==2.32.3 +requests==2.32.4 # via docker rpds-py==0.23.1 # via @@ -91,11 +93,11 @@ sniffio==1.3.1 # via anyio tenacity==9.0.0 # via -r requirements/_test.in -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via anyio tzdata==2025.1 # via faker -urllib3==2.3.0 +urllib3==2.5.0 # via # -c requirements/../../../requirements/constraints.txt # docker diff --git a/tests/public-api/requirements/_tools.txt b/tests/public-api/requirements/_tools.txt index 0ce723bfa575..bf34ae2bd826 100644 --- a/tests/public-api/requirements/_tools.txt +++ b/tests/public-api/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.8 +click==8.2.1 # via # -c requirements/_base.txt # black @@ -27,9 +27,9 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via -r requirements/../../../requirements/devenv.txt -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via # black # mypy @@ -42,7 +42,9 @@ packaging==24.2 # black # build pathspec==0.12.1 - # via black + # via + # black + # mypy pip==25.0.1 # via pip-tools pip-tools==7.4.1 @@ -67,11 +69,11 @@ pyyaml==6.0.2 # pre-commit ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.2 +setuptools==80.9.0 # via pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_base.txt # -c requirements/_test.txt diff --git a/tests/swarm-deploy/conftest.py b/tests/swarm-deploy/conftest.py index debb3529a2d3..9825fb782a4d 100644 --- a/tests/swarm-deploy/conftest.py +++ b/tests/swarm-deploy/conftest.py @@ -111,7 +111,7 @@ def simcore_stack_deployed_services( # logs table like # ID NAME IMAGE NODE DESIRED STATE CURRENT STATE ERROR # xbrhmaygtb76 simcore_sidecar.1 itisfoundation/sidecar:latest crespo-wkstn Running Running 53 seconds ago - # zde7p8qdwk4j simcore_rabbit.1 itisfoundation/rabbitmq:3.13.7-management crespo-wkstn Running Running 59 seconds ago + # zde7p8qdwk4j simcore_rabbit.1 itisfoundation/rabbitmq:4.1.2-management crespo-wkstn Running Running 59 seconds ago # f2gxmhwq7hhk simcore_postgres.1 postgres:10.10 crespo-wkstn Running Running about a minute ago # 1lh2hulxmc4q simcore_director.1 itisfoundation/director:latest crespo-wkstn Running Running 34 seconds ago # ... diff --git a/tests/swarm-deploy/requirements/_test.txt b/tests/swarm-deploy/requirements/_test.txt index 210ad22a8f10..323da12b9465 100644 --- a/tests/swarm-deploy/requirements/_test.txt +++ b/tests/swarm-deploy/requirements/_test.txt @@ -24,7 +24,7 @@ aiofiles==24.1.0 # -r requirements/../../../packages/simcore-sdk/requirements/_base.in aiohappyeyeballs==2.6.1 # via aiohttp -aiohttp==3.11.18 +aiohttp==3.12.12 # via # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -68,6 +68,7 @@ anyio==4.8.0 # via # fast-depends # faststream + # httpx arrow==1.3.0 # via # -r requirements/../../../packages/models-library/requirements/_base.in @@ -111,22 +112,18 @@ certifi==2025.1.31 # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/postgres-database/requirements/_migration.txt + # httpcore + # httpx # requests charset-normalizer==3.4.1 # via # -r requirements/../../../packages/postgres-database/requirements/_migration.txt # requests -click==8.1.8 +click==8.2.1 # via # -r requirements/../../../packages/postgres-database/requirements/_migration.txt # -r requirements/_test.in # typer -deprecated==1.2.18 - # via - # opentelemetry-api - # opentelemetry-exporter-otlp-proto-grpc - # opentelemetry-exporter-otlp-proto-http - # opentelemetry-semantic-conventions dnspython==2.7.0 # via email-validator docker==7.1.0 @@ -153,7 +150,7 @@ frozenlist==1.5.0 # via # aiohttp # aiosignal -googleapis-common-protos==1.68.0 +googleapis-common-protos==1.70.0 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http @@ -163,17 +160,56 @@ greenlet==3.1.1 # sqlalchemy grpcio==1.70.0 # via opentelemetry-exporter-otlp-proto-grpc +h11==0.16.0 + # via httpcore +httpcore==1.0.9 + # via httpx +httpx==0.28.1 + # via + # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt + # -c requirements/../../../packages/simcore-sdk/requirements/../../../requirements/constraints.txt + # -c requirements/../../../requirements/constraints.txt + # -r requirements/../../../packages/simcore-sdk/requirements/_base.in idna==3.10 # via # -r requirements/../../../packages/postgres-database/requirements/_migration.txt # anyio # email-validator + # httpx # requests # yarl importlib-metadata==8.5.0 # via opentelemetry-api iniconfig==2.0.0 # via pytest +jsonref==1.1.0 + # via + # -r requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in jsonschema==4.23.0 # via # -r requirements/../../../packages/models-library/requirements/_base.in @@ -183,7 +219,7 @@ jsonschema==4.23.0 # -r requirements/_test.in jsonschema-specifications==2024.10.1 # via jsonschema -mako==1.3.9 +mako==1.3.10 # via # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -224,7 +260,7 @@ multidict==6.1.0 # via # aiohttp # yarl -opentelemetry-api==1.30.0 +opentelemetry-api==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in @@ -238,62 +274,64 @@ opentelemetry-api==1.30.0 # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions -opentelemetry-exporter-otlp==1.30.0 +opentelemetry-exporter-otlp==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-exporter-otlp-proto-common==1.30.0 +opentelemetry-exporter-otlp-proto-common==1.34.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-exporter-otlp-proto-grpc==1.30.0 +opentelemetry-exporter-otlp-proto-grpc==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-exporter-otlp-proto-http==1.30.0 +opentelemetry-exporter-otlp-proto-http==1.34.1 # via opentelemetry-exporter-otlp -opentelemetry-instrumentation==0.51b0 +opentelemetry-instrumentation==0.55b1 # via # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-logging # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests -opentelemetry-instrumentation-aio-pika==0.51b0 +opentelemetry-instrumentation-aio-pika==0.55b1 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in +opentelemetry-instrumentation-asyncpg==0.55b1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-asyncpg==0.51b0 - # via -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in -opentelemetry-instrumentation-logging==0.51b0 +opentelemetry-instrumentation-logging==0.55b1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-redis==0.51b0 +opentelemetry-instrumentation-redis==0.55b1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-instrumentation-requests==0.51b0 +opentelemetry-instrumentation-requests==0.55b1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -opentelemetry-proto==1.30.0 +opentelemetry-proto==1.34.1 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-sdk==1.30.0 +opentelemetry-sdk==1.34.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http -opentelemetry-semantic-conventions==0.51b0 +opentelemetry-semantic-conventions==0.55b1 # via # opentelemetry-instrumentation # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk -opentelemetry-util-http==0.51b0 +opentelemetry-util-http==0.55b1 # via opentelemetry-instrumentation-requests orjson==3.10.15 # via @@ -356,7 +394,7 @@ propcache==0.3.0 # via # aiohttp # yarl -protobuf==5.29.3 +protobuf==5.29.5 # via # googleapis-common-protos # opentelemetry-proto @@ -368,7 +406,7 @@ psycopg2-binary==2.9.10 # via sqlalchemy pycryptodome==3.21.0 # via stream-zip -pydantic==2.10.6 +pydantic==2.11.7 # via # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -422,9 +460,9 @@ pydantic==2.10.6 # fast-depends # pydantic-extra-types # pydantic-settings -pydantic-core==2.27.2 +pydantic-core==2.33.2 # via pydantic -pydantic-extra-types==2.10.2 +pydantic-extra-types==2.10.5 # via # -r requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in @@ -478,23 +516,25 @@ pydantic-settings==2.7.0 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in pygments==2.19.1 - # via rich + # via + # pytest + # rich pyinstrument==5.0.1 # via # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in -pytest==8.3.5 +pytest==8.4.1 # via # -r requirements/_test.in # pytest-asyncio # pytest-instafail # pytest-mock # pytest-sugar -pytest-asyncio==0.26.0 +pytest-asyncio==1.0.0 # via -r requirements/_test.in pytest-instafail==0.5.0 # via -r requirements/_test.in -pytest-mock==3.14.0 +pytest-mock==3.14.1 # via -r requirements/_test.in pytest-runner==6.0.1 # via -r requirements/_test.in @@ -594,12 +634,12 @@ referencing==0.35.1 # -c requirements/../../../requirements/constraints.txt # jsonschema # jsonschema-specifications -requests==2.32.3 +requests==2.32.4 # via # -r requirements/../../../packages/postgres-database/requirements/_migration.txt # docker # opentelemetry-exporter-otlp-proto-http -rich==13.9.4 +rich==14.1.0 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in @@ -669,7 +709,7 @@ tqdm==4.67.1 # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/_base.in -typer==0.15.2 +typer==0.16.1 # via # -r requirements/../../../packages/service-library/requirements/../../../packages/settings-library/requirements/_base.in # -r requirements/../../../packages/settings-library/requirements/_base.in @@ -677,7 +717,7 @@ typer==0.15.2 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/settings-library/requirements/_base.in types-python-dateutil==2.9.0.20241206 # via arrow -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -r requirements/../../../packages/postgres-database/requirements/_migration.txt # aiodebug @@ -686,15 +726,22 @@ typing-extensions==4.12.2 # faststream # flexcache # flexparser + # opentelemetry-api + # opentelemetry-exporter-otlp-proto-grpc + # opentelemetry-exporter-otlp-proto-http # opentelemetry-sdk + # opentelemetry-semantic-conventions # pint # pydantic # pydantic-core # pydantic-extra-types # typer + # typing-inspection +typing-inspection==0.4.1 + # via pydantic tzdata==2025.1 # via faker -urllib3==2.3.0 +urllib3==2.5.0 # via # -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -726,7 +773,6 @@ urllib3==2.3.0 # requests wrapt==1.17.2 # via - # deprecated # opentelemetry-instrumentation # opentelemetry-instrumentation-aio-pika # opentelemetry-instrumentation-redis diff --git a/tests/swarm-deploy/requirements/_tools.txt b/tests/swarm-deploy/requirements/_tools.txt index 891fdf1892c7..60365312a10e 100644 --- a/tests/swarm-deploy/requirements/_tools.txt +++ b/tests/swarm-deploy/requirements/_tools.txt @@ -8,7 +8,7 @@ bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.4.0 # via pre-commit -click==8.1.8 +click==8.2.1 # via # -c requirements/_test.txt # black @@ -27,9 +27,9 @@ isort==6.0.1 # pylint mccabe==0.7.0 # via pylint -mypy==1.15.0 +mypy==1.16.1 # via -r requirements/../../../requirements/devenv.txt -mypy-extensions==1.0.0 +mypy-extensions==1.1.0 # via # black # mypy @@ -41,7 +41,9 @@ packaging==24.2 # black # build pathspec==0.12.1 - # via black + # via + # black + # mypy pip==25.0.1 # via pip-tools pip-tools==7.4.1 @@ -68,11 +70,11 @@ pyyaml==6.0.2 # watchdog ruff==0.9.9 # via -r requirements/../../../requirements/devenv.txt -setuptools==75.8.2 +setuptools==80.9.0 # via pip-tools tomlkit==0.13.2 # via pylint -typing-extensions==4.12.2 +typing-extensions==4.14.1 # via # -c requirements/_test.txt # mypy